2017-07-03 16:05:27 +02:00
|
|
|
|
package info
|
|
|
|
|
|
|
|
|
|
// FIXME once translations are implemented will need a no-escape
|
|
|
|
|
// option for Put so we can make these tests work agaig
|
|
|
|
|
|
|
|
|
|
import (
|
|
|
|
|
"bytes"
|
2019-06-17 10:34:30 +02:00
|
|
|
|
"context"
|
2017-07-03 16:05:27 +02:00
|
|
|
|
"fmt"
|
2017-08-22 08:00:10 +02:00
|
|
|
|
"io"
|
2017-07-03 16:05:27 +02:00
|
|
|
|
"sort"
|
|
|
|
|
"strings"
|
|
|
|
|
"sync"
|
|
|
|
|
"time"
|
|
|
|
|
|
|
|
|
|
"github.com/pkg/errors"
|
2019-07-28 19:47:38 +02:00
|
|
|
|
"github.com/rclone/rclone/cmd"
|
|
|
|
|
"github.com/rclone/rclone/fs"
|
|
|
|
|
"github.com/rclone/rclone/fs/hash"
|
|
|
|
|
"github.com/rclone/rclone/fs/object"
|
|
|
|
|
"github.com/rclone/rclone/fstest"
|
2017-07-03 16:05:27 +02:00
|
|
|
|
"github.com/spf13/cobra"
|
|
|
|
|
)
|
|
|
|
|
|
2018-11-02 13:12:09 +01:00
|
|
|
|
type position int
|
|
|
|
|
|
|
|
|
|
const (
|
|
|
|
|
positionMiddle position = 1 << iota
|
|
|
|
|
positionLeft
|
|
|
|
|
positionRight
|
|
|
|
|
positionNone position = 0
|
|
|
|
|
positionAll position = positionRight<<1 - 1
|
|
|
|
|
)
|
|
|
|
|
|
2017-07-03 16:05:27 +02:00
|
|
|
|
var (
|
|
|
|
|
checkNormalization bool
|
|
|
|
|
checkControl bool
|
|
|
|
|
checkLength bool
|
2017-08-22 08:00:10 +02:00
|
|
|
|
checkStreaming bool
|
2018-11-02 13:12:09 +01:00
|
|
|
|
positionList = []position{positionMiddle, positionLeft, positionRight}
|
2017-07-03 16:05:27 +02:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
|
cmd.Root.AddCommand(commandDefintion)
|
|
|
|
|
commandDefintion.Flags().BoolVarP(&checkNormalization, "check-normalization", "", true, "Check UTF-8 Normalization.")
|
|
|
|
|
commandDefintion.Flags().BoolVarP(&checkControl, "check-control", "", true, "Check control characters.")
|
|
|
|
|
commandDefintion.Flags().BoolVarP(&checkLength, "check-length", "", true, "Check max filename length.")
|
2017-08-22 08:00:10 +02:00
|
|
|
|
commandDefintion.Flags().BoolVarP(&checkStreaming, "check-streaming", "", true, "Check uploads with indeterminate file size.")
|
2017-07-03 16:05:27 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var commandDefintion = &cobra.Command{
|
|
|
|
|
Use: "info [remote:path]+",
|
2017-08-22 08:00:10 +02:00
|
|
|
|
Short: `Discovers file name or other limitations for paths.`,
|
|
|
|
|
Long: `rclone info discovers what filenames and upload methods are possible
|
|
|
|
|
to write to the paths passed in and how long they can be. It can take some
|
|
|
|
|
time. It will write test files into the remote:path passed in. It outputs
|
|
|
|
|
a bit of go code for each one.
|
2017-07-03 16:05:27 +02:00
|
|
|
|
`,
|
|
|
|
|
Hidden: true,
|
|
|
|
|
Run: func(command *cobra.Command, args []string) {
|
|
|
|
|
cmd.CheckArgs(1, 1E6, command, args)
|
|
|
|
|
for i := range args {
|
2018-05-07 18:58:16 +02:00
|
|
|
|
f := cmd.NewFsDir(args[i : i+1])
|
2017-07-03 16:05:27 +02:00
|
|
|
|
cmd.Run(false, false, command, func() error {
|
2019-06-17 10:34:30 +02:00
|
|
|
|
return readInfo(context.Background(), f)
|
2017-07-03 16:05:27 +02:00
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
type results struct {
|
2019-06-17 10:34:30 +02:00
|
|
|
|
ctx context.Context
|
2017-07-03 16:05:27 +02:00
|
|
|
|
f fs.Fs
|
|
|
|
|
mu sync.Mutex
|
2018-11-02 13:12:09 +01:00
|
|
|
|
stringNeedsEscaping map[string]position
|
2017-07-03 16:05:27 +02:00
|
|
|
|
maxFileLength int
|
|
|
|
|
canWriteUnnormalized bool
|
|
|
|
|
canReadUnnormalized bool
|
|
|
|
|
canReadRenormalized bool
|
2017-08-22 08:00:10 +02:00
|
|
|
|
canStream bool
|
2017-07-03 16:05:27 +02:00
|
|
|
|
}
|
|
|
|
|
|
2019-06-17 10:34:30 +02:00
|
|
|
|
func newResults(ctx context.Context, f fs.Fs) *results {
|
2017-07-03 16:05:27 +02:00
|
|
|
|
return &results{
|
2019-06-17 10:34:30 +02:00
|
|
|
|
ctx: ctx,
|
2018-11-02 13:12:09 +01:00
|
|
|
|
f: f,
|
|
|
|
|
stringNeedsEscaping: make(map[string]position),
|
2017-07-03 16:05:27 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Print the results to stdout
|
|
|
|
|
func (r *results) Print() {
|
|
|
|
|
fmt.Printf("// %s\n", r.f.Name())
|
|
|
|
|
if checkControl {
|
|
|
|
|
escape := []string{}
|
2018-11-02 13:12:09 +01:00
|
|
|
|
for c, needsEscape := range r.stringNeedsEscaping {
|
|
|
|
|
if needsEscape != positionNone {
|
2017-07-03 16:05:27 +02:00
|
|
|
|
escape = append(escape, fmt.Sprintf("0x%02X", c))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
sort.Strings(escape)
|
2018-11-02 13:12:09 +01:00
|
|
|
|
fmt.Printf("stringNeedsEscaping = []byte{\n")
|
2017-07-03 16:05:27 +02:00
|
|
|
|
fmt.Printf("\t%s\n", strings.Join(escape, ", "))
|
|
|
|
|
fmt.Printf("}\n")
|
|
|
|
|
}
|
|
|
|
|
if checkLength {
|
|
|
|
|
fmt.Printf("maxFileLength = %d\n", r.maxFileLength)
|
|
|
|
|
}
|
|
|
|
|
if checkNormalization {
|
|
|
|
|
fmt.Printf("canWriteUnnormalized = %v\n", r.canWriteUnnormalized)
|
|
|
|
|
fmt.Printf("canReadUnnormalized = %v\n", r.canReadUnnormalized)
|
|
|
|
|
fmt.Printf("canReadRenormalized = %v\n", r.canReadRenormalized)
|
|
|
|
|
}
|
2017-08-22 08:00:10 +02:00
|
|
|
|
if checkStreaming {
|
|
|
|
|
fmt.Printf("canStream = %v\n", r.canStream)
|
|
|
|
|
}
|
2017-07-03 16:05:27 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// writeFile writes a file with some random contents
|
|
|
|
|
func (r *results) writeFile(path string) (fs.Object, error) {
|
|
|
|
|
contents := fstest.RandomString(50)
|
2018-01-12 17:30:54 +01:00
|
|
|
|
src := object.NewStaticObjectInfo(path, time.Now(), int64(len(contents)), true, nil, r.f)
|
2019-06-17 10:34:30 +02:00
|
|
|
|
return r.f.Put(r.ctx, bytes.NewBufferString(contents), src)
|
2017-07-03 16:05:27 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// check whether normalization is enforced and check whether it is
|
|
|
|
|
// done on the files anyway
|
|
|
|
|
func (r *results) checkUTF8Normalization() {
|
|
|
|
|
unnormalized := "Héroique"
|
|
|
|
|
normalized := "Héroique"
|
|
|
|
|
_, err := r.writeFile(unnormalized)
|
|
|
|
|
if err != nil {
|
|
|
|
|
r.canWriteUnnormalized = false
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
r.canWriteUnnormalized = true
|
2019-06-17 10:34:30 +02:00
|
|
|
|
_, err = r.f.NewObject(r.ctx, unnormalized)
|
2017-07-03 16:05:27 +02:00
|
|
|
|
if err == nil {
|
|
|
|
|
r.canReadUnnormalized = true
|
|
|
|
|
}
|
2019-06-17 10:34:30 +02:00
|
|
|
|
_, err = r.f.NewObject(r.ctx, normalized)
|
2017-07-03 16:05:27 +02:00
|
|
|
|
if err == nil {
|
|
|
|
|
r.canReadRenormalized = true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-11-02 13:12:09 +01:00
|
|
|
|
func (r *results) checkStringPositions(s string) {
|
|
|
|
|
fs.Infof(r.f, "Writing position file 0x%0X", s)
|
|
|
|
|
positionError := positionNone
|
|
|
|
|
|
|
|
|
|
for _, pos := range positionList {
|
|
|
|
|
path := ""
|
|
|
|
|
switch pos {
|
|
|
|
|
case positionMiddle:
|
|
|
|
|
path = fmt.Sprintf("position-middle-%0X-%s-", s, s)
|
|
|
|
|
case positionLeft:
|
|
|
|
|
path = fmt.Sprintf("%s-position-left-%0X", s, s)
|
|
|
|
|
case positionRight:
|
|
|
|
|
path = fmt.Sprintf("position-right-%0X-%s", s, s)
|
|
|
|
|
default:
|
|
|
|
|
panic("invalid position: " + pos.String())
|
|
|
|
|
}
|
|
|
|
|
_, writeErr := r.writeFile(path)
|
|
|
|
|
if writeErr != nil {
|
|
|
|
|
fs.Infof(r.f, "Writing %s position file 0x%0X Error: %s", pos.String(), s, writeErr)
|
|
|
|
|
} else {
|
|
|
|
|
fs.Infof(r.f, "Writing %s position file 0x%0X OK", pos.String(), s)
|
|
|
|
|
}
|
2019-06-17 10:34:30 +02:00
|
|
|
|
obj, getErr := r.f.NewObject(r.ctx, path)
|
2018-11-02 13:12:09 +01:00
|
|
|
|
if getErr != nil {
|
|
|
|
|
fs.Infof(r.f, "Getting %s position file 0x%0X Error: %s", pos.String(), s, getErr)
|
|
|
|
|
} else {
|
|
|
|
|
if obj.Size() != 50 {
|
|
|
|
|
fs.Infof(r.f, "Getting %s position file 0x%0X Invalid Size: %d", pos.String(), s, obj.Size())
|
|
|
|
|
} else {
|
|
|
|
|
fs.Infof(r.f, "Getting %s position file 0x%0X OK", pos.String(), s)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if writeErr != nil || getErr != nil {
|
|
|
|
|
positionError += pos
|
|
|
|
|
}
|
2017-07-03 16:05:27 +02:00
|
|
|
|
}
|
2018-11-02 13:12:09 +01:00
|
|
|
|
|
2017-07-03 16:05:27 +02:00
|
|
|
|
r.mu.Lock()
|
2018-11-02 13:12:09 +01:00
|
|
|
|
r.stringNeedsEscaping[s] = positionError
|
2017-07-03 16:05:27 +02:00
|
|
|
|
r.mu.Unlock()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// check we can write a file with the control chars
|
|
|
|
|
func (r *results) checkControls() {
|
|
|
|
|
fs.Infof(r.f, "Trying to create control character file names")
|
|
|
|
|
// Concurrency control
|
|
|
|
|
tokens := make(chan struct{}, fs.Config.Checkers)
|
|
|
|
|
for i := 0; i < fs.Config.Checkers; i++ {
|
|
|
|
|
tokens <- struct{}{}
|
|
|
|
|
}
|
|
|
|
|
var wg sync.WaitGroup
|
|
|
|
|
for i := rune(0); i < 128; i++ {
|
2018-11-02 13:12:09 +01:00
|
|
|
|
s := string(i)
|
2017-07-03 16:05:27 +02:00
|
|
|
|
if i == 0 || i == '/' {
|
|
|
|
|
// We're not even going to check NULL or /
|
2018-11-02 13:12:09 +01:00
|
|
|
|
r.stringNeedsEscaping[s] = positionAll
|
2017-07-03 16:05:27 +02:00
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
wg.Add(1)
|
2018-11-02 13:12:09 +01:00
|
|
|
|
go func(s string) {
|
2017-07-03 16:05:27 +02:00
|
|
|
|
defer wg.Done()
|
|
|
|
|
token := <-tokens
|
2018-11-02 13:12:09 +01:00
|
|
|
|
r.checkStringPositions(s)
|
2017-07-03 16:05:27 +02:00
|
|
|
|
tokens <- token
|
2018-11-02 13:12:09 +01:00
|
|
|
|
}(s)
|
|
|
|
|
}
|
|
|
|
|
for _, s := range []string{"\", "\xBF", "\xFE"} {
|
|
|
|
|
wg.Add(1)
|
|
|
|
|
go func(s string) {
|
|
|
|
|
defer wg.Done()
|
|
|
|
|
token := <-tokens
|
|
|
|
|
r.checkStringPositions(s)
|
|
|
|
|
tokens <- token
|
|
|
|
|
}(s)
|
2017-07-03 16:05:27 +02:00
|
|
|
|
}
|
|
|
|
|
wg.Wait()
|
|
|
|
|
fs.Infof(r.f, "Done trying to create control character file names")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// find the max file name size we can use
|
|
|
|
|
func (r *results) findMaxLength() {
|
|
|
|
|
const maxLen = 16 * 1024
|
|
|
|
|
name := make([]byte, maxLen)
|
|
|
|
|
for i := range name {
|
|
|
|
|
name[i] = 'a'
|
|
|
|
|
}
|
|
|
|
|
// Find the first size of filename we can't write
|
|
|
|
|
i := sort.Search(len(name), func(i int) (fail bool) {
|
|
|
|
|
defer func() {
|
|
|
|
|
if err := recover(); err != nil {
|
|
|
|
|
fs.Infof(r.f, "Couldn't write file with name length %d: %v", i, err)
|
|
|
|
|
fail = true
|
|
|
|
|
}
|
|
|
|
|
}()
|
|
|
|
|
|
|
|
|
|
path := string(name[:i])
|
|
|
|
|
_, err := r.writeFile(path)
|
|
|
|
|
if err != nil {
|
|
|
|
|
fs.Infof(r.f, "Couldn't write file with name length %d: %v", i, err)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
fs.Infof(r.f, "Wrote file with name length %d", i)
|
|
|
|
|
return false
|
|
|
|
|
})
|
|
|
|
|
r.maxFileLength = i - 1
|
|
|
|
|
fs.Infof(r.f, "Max file length is %d", r.maxFileLength)
|
|
|
|
|
}
|
|
|
|
|
|
2017-08-22 08:00:10 +02:00
|
|
|
|
func (r *results) checkStreaming() {
|
|
|
|
|
putter := r.f.Put
|
|
|
|
|
if r.f.Features().PutStream != nil {
|
|
|
|
|
fs.Infof(r.f, "Given remote has specialized streaming function. Using that to test streaming.")
|
|
|
|
|
putter = r.f.Features().PutStream
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
contents := "thinking of test strings is hard"
|
|
|
|
|
buf := bytes.NewBufferString(contents)
|
2018-01-12 17:30:54 +01:00
|
|
|
|
hashIn := hash.NewMultiHasher()
|
2017-08-22 08:00:10 +02:00
|
|
|
|
in := io.TeeReader(buf, hashIn)
|
|
|
|
|
|
2018-01-12 17:30:54 +01:00
|
|
|
|
objIn := object.NewStaticObjectInfo("checkStreamingTest", time.Now(), -1, true, nil, r.f)
|
2019-06-17 10:34:30 +02:00
|
|
|
|
objR, err := putter(r.ctx, in, objIn)
|
2017-08-22 08:00:10 +02:00
|
|
|
|
if err != nil {
|
|
|
|
|
fs.Infof(r.f, "Streamed file failed to upload (%v)", err)
|
|
|
|
|
r.canStream = false
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
hashes := hashIn.Sums()
|
|
|
|
|
types := objR.Fs().Hashes().Array()
|
2018-01-12 17:30:54 +01:00
|
|
|
|
for _, Hash := range types {
|
2019-06-17 10:34:30 +02:00
|
|
|
|
sum, err := objR.Hash(r.ctx, Hash)
|
2017-08-22 08:00:10 +02:00
|
|
|
|
if err != nil {
|
2018-01-12 17:30:54 +01:00
|
|
|
|
fs.Infof(r.f, "Streamed file failed when getting hash %v (%v)", Hash, err)
|
2017-08-22 08:00:10 +02:00
|
|
|
|
r.canStream = false
|
|
|
|
|
return
|
|
|
|
|
}
|
2018-01-12 17:30:54 +01:00
|
|
|
|
if !hash.Equals(hashes[Hash], sum) {
|
|
|
|
|
fs.Infof(r.f, "Streamed file has incorrect hash %v: expecting %q got %q", Hash, hashes[Hash], sum)
|
2017-08-22 08:00:10 +02:00
|
|
|
|
r.canStream = false
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if int64(len(contents)) != objR.Size() {
|
|
|
|
|
fs.Infof(r.f, "Streamed file has incorrect file size: expecting %d got %d", len(contents), objR.Size())
|
|
|
|
|
r.canStream = false
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
r.canStream = true
|
|
|
|
|
}
|
|
|
|
|
|
2019-06-17 10:34:30 +02:00
|
|
|
|
func readInfo(ctx context.Context, f fs.Fs) error {
|
|
|
|
|
err := f.Mkdir(ctx, "")
|
2017-07-03 16:05:27 +02:00
|
|
|
|
if err != nil {
|
|
|
|
|
return errors.Wrap(err, "couldn't mkdir")
|
|
|
|
|
}
|
2019-06-17 10:34:30 +02:00
|
|
|
|
r := newResults(ctx, f)
|
2017-07-03 16:05:27 +02:00
|
|
|
|
if checkControl {
|
|
|
|
|
r.checkControls()
|
|
|
|
|
}
|
|
|
|
|
if checkLength {
|
|
|
|
|
r.findMaxLength()
|
|
|
|
|
}
|
|
|
|
|
if checkNormalization {
|
|
|
|
|
r.checkUTF8Normalization()
|
|
|
|
|
}
|
2017-08-22 08:00:10 +02:00
|
|
|
|
if checkStreaming {
|
|
|
|
|
r.checkStreaming()
|
|
|
|
|
}
|
2017-07-03 16:05:27 +02:00
|
|
|
|
r.Print()
|
|
|
|
|
return nil
|
|
|
|
|
}
|
2018-11-02 13:12:09 +01:00
|
|
|
|
|
|
|
|
|
func (e position) String() string {
|
|
|
|
|
switch e {
|
|
|
|
|
case positionNone:
|
|
|
|
|
return "none"
|
|
|
|
|
case positionAll:
|
|
|
|
|
return "all"
|
|
|
|
|
}
|
|
|
|
|
var buf bytes.Buffer
|
|
|
|
|
if e&positionMiddle != 0 {
|
|
|
|
|
buf.WriteString("middle")
|
|
|
|
|
e &= ^positionMiddle
|
|
|
|
|
}
|
|
|
|
|
if e&positionLeft != 0 {
|
|
|
|
|
if buf.Len() != 0 {
|
|
|
|
|
buf.WriteRune(',')
|
|
|
|
|
}
|
|
|
|
|
buf.WriteString("left")
|
|
|
|
|
e &= ^positionLeft
|
|
|
|
|
}
|
|
|
|
|
if e&positionRight != 0 {
|
|
|
|
|
if buf.Len() != 0 {
|
|
|
|
|
buf.WriteRune(',')
|
|
|
|
|
}
|
|
|
|
|
buf.WriteString("right")
|
|
|
|
|
e &= ^positionRight
|
|
|
|
|
}
|
|
|
|
|
if e != positionNone {
|
|
|
|
|
panic("invalid position")
|
|
|
|
|
}
|
|
|
|
|
return buf.String()
|
|
|
|
|
}
|