2016-01-17 11:08:28 +01:00
|
|
|
// Integration tests - test rclone by doing real transactions to a
|
|
|
|
// storage provider to and from the local disk.
|
|
|
|
//
|
|
|
|
// By default it will use a local fs, however you can provide a
|
|
|
|
// -remote option to use a different remote. The test_all.go script
|
|
|
|
// is a wrapper to call this for all the test remotes.
|
|
|
|
//
|
|
|
|
// FIXME not safe for concurrent running of tests until fs.Config is
|
|
|
|
// no longer a global
|
|
|
|
//
|
|
|
|
// NB When writing tests
|
|
|
|
//
|
|
|
|
// Make sure every series of writes to the remote has a
|
|
|
|
// fstest.CheckItems() before use. This make sure the directory
|
|
|
|
// listing is now consistent and stops cascading errors.
|
|
|
|
//
|
|
|
|
// Call fs.Stats.ResetCounters() before every fs.Sync() as it uses the
|
|
|
|
// error count internally.
|
2014-08-01 18:58:39 +02:00
|
|
|
|
|
|
|
package fs_test
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
2017-02-13 11:48:26 +01:00
|
|
|
"errors"
|
2014-08-01 18:58:39 +02:00
|
|
|
"flag"
|
2017-01-11 15:59:53 +01:00
|
|
|
"fmt"
|
2017-02-13 11:48:26 +01:00
|
|
|
"io"
|
2014-08-01 18:58:39 +02:00
|
|
|
"io/ioutil"
|
|
|
|
"log"
|
|
|
|
"os"
|
|
|
|
"path"
|
2015-02-07 16:52:06 +01:00
|
|
|
"path/filepath"
|
2014-08-01 18:58:39 +02:00
|
|
|
"regexp"
|
2017-01-24 18:15:56 +01:00
|
|
|
"sort"
|
2014-08-01 18:58:39 +02:00
|
|
|
"strings"
|
|
|
|
"testing"
|
|
|
|
"time"
|
|
|
|
|
|
|
|
"github.com/ncw/rclone/fs"
|
2016-01-03 15:12:01 +01:00
|
|
|
_ "github.com/ncw/rclone/fs/all" // import all fs
|
2014-08-01 18:58:39 +02:00
|
|
|
"github.com/ncw/rclone/fstest"
|
2016-06-25 15:28:26 +02:00
|
|
|
"github.com/stretchr/testify/assert"
|
2016-06-29 18:59:31 +02:00
|
|
|
"github.com/stretchr/testify/require"
|
2014-08-01 18:58:39 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
// Globals
|
|
|
|
var (
|
2016-01-12 18:38:28 +01:00
|
|
|
RemoteName = flag.String("remote", "", "Remote to test with, defaults to local filesystem")
|
|
|
|
SubDir = flag.Bool("subdir", false, "Set to test with a sub directory")
|
|
|
|
Verbose = flag.Bool("verbose", false, "Set to enable logging")
|
|
|
|
DumpHeaders = flag.Bool("dump-headers", false, "Set to dump headers (needs -verbose)")
|
|
|
|
DumpBodies = flag.Bool("dump-bodies", false, "Set to dump bodies (needs -verbose)")
|
|
|
|
Individual = flag.Bool("individual", false, "Make individual bucket/container/directory for each test - much slower")
|
|
|
|
LowLevelRetries = flag.Int("low-level-retries", 10, "Number of low level retries")
|
2017-06-07 00:04:01 +02:00
|
|
|
UseListR = flag.Bool("fast-list", false, "Use recursive list if available. Uses more memory but fewer transactions.")
|
2014-08-01 18:58:39 +02:00
|
|
|
)
|
|
|
|
|
2016-01-17 11:08:28 +01:00
|
|
|
// Some times used in the tests
|
|
|
|
var (
|
|
|
|
t1 = fstest.Time("2001-02-03T04:05:06.499999999Z")
|
|
|
|
t2 = fstest.Time("2011-12-25T12:59:59.123456789Z")
|
|
|
|
t3 = fstest.Time("2011-12-30T12:59:59.000000000Z")
|
|
|
|
)
|
|
|
|
|
|
|
|
// TestMain drives the tests
|
|
|
|
func TestMain(m *testing.M) {
|
|
|
|
flag.Parse()
|
|
|
|
if !*Individual {
|
|
|
|
oneRun = newRun()
|
2014-08-01 18:58:39 +02:00
|
|
|
}
|
2016-01-17 11:08:28 +01:00
|
|
|
rc := m.Run()
|
|
|
|
if !*Individual {
|
|
|
|
oneRun.Finalise()
|
2014-08-01 18:58:39 +02:00
|
|
|
}
|
2016-01-17 11:08:28 +01:00
|
|
|
os.Exit(rc)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Run holds the remotes for a test run
|
|
|
|
type Run struct {
|
|
|
|
localName string
|
|
|
|
flocal fs.Fs
|
|
|
|
fremote fs.Fs
|
2016-07-11 12:36:46 +02:00
|
|
|
fremoteName string
|
2016-01-17 11:08:28 +01:00
|
|
|
cleanRemote func()
|
|
|
|
mkdir map[string]bool // whether the remote has been made yet for the fs name
|
|
|
|
Logf, Fatalf func(text string, args ...interface{})
|
2014-08-01 18:58:39 +02:00
|
|
|
}
|
|
|
|
|
2016-01-17 11:08:28 +01:00
|
|
|
// oneRun holds the master run data if individual is not set
|
|
|
|
var oneRun *Run
|
|
|
|
|
|
|
|
// newRun initialise the remote and local for testing and returns a
|
|
|
|
// run object.
|
|
|
|
//
|
|
|
|
// r.flocal is an empty local Fs
|
|
|
|
// r.fremote is an empty remote Fs
|
|
|
|
//
|
|
|
|
// Finalise() will tidy them away when done.
|
|
|
|
func newRun() *Run {
|
|
|
|
r := &Run{
|
|
|
|
Logf: log.Printf,
|
|
|
|
Fatalf: log.Fatalf,
|
|
|
|
mkdir: make(map[string]bool),
|
|
|
|
}
|
2014-08-01 18:58:39 +02:00
|
|
|
|
2016-02-16 16:25:27 +01:00
|
|
|
// Never ask for passwords, fail instead.
|
|
|
|
// If your local config is encrypted set environment variable
|
|
|
|
// "RCLONE_CONFIG_PASS=hunter2" (or your password)
|
|
|
|
*fs.AskPassword = false
|
2014-08-01 18:58:39 +02:00
|
|
|
fs.LoadConfig()
|
2017-02-09 22:22:46 +01:00
|
|
|
if *Verbose {
|
|
|
|
fs.Config.LogLevel = fs.LogLevelDebug
|
|
|
|
}
|
2015-12-29 01:17:39 +01:00
|
|
|
fs.Config.DumpHeaders = *DumpHeaders
|
|
|
|
fs.Config.DumpBodies = *DumpBodies
|
2016-01-12 18:38:28 +01:00
|
|
|
fs.Config.LowLevelRetries = *LowLevelRetries
|
2017-06-07 00:04:01 +02:00
|
|
|
fs.Config.UseListR = *UseListR
|
2014-08-01 18:58:39 +02:00
|
|
|
var err error
|
2016-07-11 12:36:46 +02:00
|
|
|
r.fremote, r.fremoteName, r.cleanRemote, err = fstest.RandomRemote(*RemoteName, *SubDir)
|
2014-08-01 18:58:39 +02:00
|
|
|
if err != nil {
|
2016-01-17 11:08:28 +01:00
|
|
|
r.Fatalf("Failed to open remote %q: %v", *RemoteName, err)
|
2014-08-01 18:58:39 +02:00
|
|
|
}
|
|
|
|
|
2016-01-17 11:08:28 +01:00
|
|
|
r.localName, err = ioutil.TempDir("", "rclone")
|
2014-08-01 18:58:39 +02:00
|
|
|
if err != nil {
|
2016-01-17 11:08:28 +01:00
|
|
|
r.Fatalf("Failed to create temp dir: %v", err)
|
2014-08-01 18:58:39 +02:00
|
|
|
}
|
2016-01-17 11:08:28 +01:00
|
|
|
r.localName = filepath.ToSlash(r.localName)
|
|
|
|
r.flocal, err = fs.NewFs(r.localName)
|
2014-08-01 18:58:39 +02:00
|
|
|
if err != nil {
|
2016-01-17 11:08:28 +01:00
|
|
|
r.Fatalf("Failed to make %q: %v", r.localName, err)
|
2014-08-01 18:58:39 +02:00
|
|
|
}
|
2016-01-17 11:08:28 +01:00
|
|
|
fs.CalculateModifyWindow(r.fremote, r.flocal)
|
|
|
|
return r
|
|
|
|
}
|
2014-08-01 18:58:39 +02:00
|
|
|
|
2017-01-24 18:15:56 +01:00
|
|
|
// dirsToRemove sorts by string length
|
|
|
|
type dirsToRemove []string
|
|
|
|
|
|
|
|
func (d dirsToRemove) Len() int { return len(d) }
|
|
|
|
func (d dirsToRemove) Swap(i, j int) { d[i], d[j] = d[j], d[i] }
|
|
|
|
func (d dirsToRemove) Less(i, j int) bool { return len(d[i]) > len(d[j]) }
|
|
|
|
|
2016-01-17 11:08:28 +01:00
|
|
|
// NewRun initialise the remote and local for testing and returns a
|
|
|
|
// run object. Call this from the tests.
|
|
|
|
//
|
|
|
|
// r.flocal is an empty local Fs
|
|
|
|
// r.fremote is an empty remote Fs
|
|
|
|
//
|
|
|
|
// Finalise() will tidy them away when done.
|
|
|
|
func NewRun(t *testing.T) *Run {
|
|
|
|
var r *Run
|
|
|
|
if *Individual {
|
|
|
|
r = newRun()
|
|
|
|
} else {
|
|
|
|
// If not individual, use the global one with the clean method overridden
|
|
|
|
r = new(Run)
|
|
|
|
*r = *oneRun
|
|
|
|
r.cleanRemote = func() {
|
2017-01-24 18:15:56 +01:00
|
|
|
var toDelete dirsToRemove
|
2017-02-24 23:51:01 +01:00
|
|
|
require.NoError(t, fs.Walk(r.fremote, "", true, -1, func(dirPath string, entries fs.DirEntries, err error) error {
|
2016-04-21 21:06:21 +02:00
|
|
|
if err != nil {
|
2017-01-13 18:21:47 +01:00
|
|
|
if err == fs.ErrorDirNotFound {
|
2017-02-24 23:51:01 +01:00
|
|
|
return nil
|
2017-01-13 18:21:47 +01:00
|
|
|
}
|
2016-04-21 21:06:21 +02:00
|
|
|
t.Fatalf("Error listing: %v", err)
|
2017-02-24 23:51:01 +01:00
|
|
|
}
|
|
|
|
for _, entry := range entries {
|
|
|
|
switch x := entry.(type) {
|
|
|
|
case fs.Object:
|
|
|
|
err = x.Remove()
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("Error removing file %q: %v", x.Remote(), err)
|
|
|
|
}
|
|
|
|
case *fs.Dir:
|
|
|
|
toDelete = append(toDelete, x.Remote())
|
2017-01-24 18:15:56 +01:00
|
|
|
}
|
2016-04-21 21:06:21 +02:00
|
|
|
}
|
2017-02-24 23:51:01 +01:00
|
|
|
return nil
|
|
|
|
}))
|
2017-01-24 18:15:56 +01:00
|
|
|
sort.Sort(toDelete)
|
|
|
|
for _, dir := range toDelete {
|
|
|
|
err := r.fremote.Rmdir(dir)
|
2016-04-21 21:06:21 +02:00
|
|
|
if err != nil {
|
2017-01-24 18:15:56 +01:00
|
|
|
t.Errorf("Error removing dir %q: %v", dir, err)
|
2016-04-21 21:06:21 +02:00
|
|
|
}
|
2016-01-17 11:08:28 +01:00
|
|
|
}
|
2016-01-24 13:37:19 +01:00
|
|
|
// Check remote is empty
|
|
|
|
fstest.CheckItems(t, r.fremote)
|
2016-01-17 11:08:28 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
r.Logf = t.Logf
|
|
|
|
r.Fatalf = t.Fatalf
|
|
|
|
r.Logf("Remote %q, Local %q, Modify Window %q", r.fremote, r.flocal, fs.Config.ModifyWindow)
|
|
|
|
return r
|
2014-08-01 18:58:39 +02:00
|
|
|
}
|
2016-01-17 11:08:28 +01:00
|
|
|
|
2016-12-18 11:03:56 +01:00
|
|
|
// Rename a file in local
|
|
|
|
func (r *Run) RenameFile(item fstest.Item, newpath string) fstest.Item {
|
|
|
|
oldFilepath := path.Join(r.localName, item.Path)
|
|
|
|
newFilepath := path.Join(r.localName, newpath)
|
|
|
|
if err := os.Rename(oldFilepath, newFilepath); err != nil {
|
|
|
|
r.Fatalf("Failed to rename file from %q to %q: %v", item.Path, newpath, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
item.Path = newpath
|
|
|
|
|
|
|
|
return item
|
|
|
|
}
|
|
|
|
|
2016-01-17 11:08:28 +01:00
|
|
|
// Write a file to local
|
|
|
|
func (r *Run) WriteFile(filePath, content string, t time.Time) fstest.Item {
|
|
|
|
item := fstest.NewItem(filePath, content, t)
|
|
|
|
// FIXME make directories?
|
|
|
|
filePath = path.Join(r.localName, filePath)
|
|
|
|
dirPath := path.Dir(filePath)
|
|
|
|
err := os.MkdirAll(dirPath, 0770)
|
|
|
|
if err != nil {
|
|
|
|
r.Fatalf("Failed to make directories %q: %v", dirPath, err)
|
|
|
|
}
|
|
|
|
err = ioutil.WriteFile(filePath, []byte(content), 0600)
|
|
|
|
if err != nil {
|
|
|
|
r.Fatalf("Failed to write file %q: %v", filePath, err)
|
|
|
|
}
|
|
|
|
err = os.Chtimes(filePath, t, t)
|
|
|
|
if err != nil {
|
|
|
|
r.Fatalf("Failed to chtimes file %q: %v", filePath, err)
|
|
|
|
}
|
|
|
|
return item
|
|
|
|
}
|
|
|
|
|
2016-12-31 16:36:58 +01:00
|
|
|
// ForceMkdir creates the remote
|
|
|
|
func (r *Run) ForceMkdir(f fs.Fs) {
|
|
|
|
err := f.Mkdir("")
|
|
|
|
if err != nil {
|
|
|
|
r.Fatalf("Failed to mkdir %q: %v", f, err)
|
|
|
|
}
|
|
|
|
r.mkdir[f.String()] = true
|
|
|
|
}
|
|
|
|
|
|
|
|
// Mkdir creates the remote if it hasn't been created already
|
|
|
|
func (r *Run) Mkdir(f fs.Fs) {
|
|
|
|
if !r.mkdir[f.String()] {
|
|
|
|
r.ForceMkdir(f)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-01-17 11:08:28 +01:00
|
|
|
// WriteObjectTo writes an object to the fs, remote passed in
|
2016-06-12 16:06:27 +02:00
|
|
|
func (r *Run) WriteObjectTo(f fs.Fs, remote, content string, modTime time.Time, useUnchecked bool) fstest.Item {
|
|
|
|
put := f.Put
|
|
|
|
if useUnchecked {
|
2017-01-13 18:21:47 +01:00
|
|
|
put = f.Features().PutUnchecked
|
|
|
|
if put == nil {
|
2016-06-12 16:06:27 +02:00
|
|
|
r.Fatalf("Fs doesn't support PutUnchecked")
|
|
|
|
}
|
|
|
|
}
|
2016-12-31 16:36:58 +01:00
|
|
|
r.Mkdir(f)
|
2016-07-12 20:41:34 +02:00
|
|
|
const maxTries = 10
|
2016-01-17 11:08:28 +01:00
|
|
|
for tries := 1; ; tries++ {
|
|
|
|
in := bytes.NewBufferString(content)
|
2016-02-18 12:35:25 +01:00
|
|
|
objinfo := fs.NewStaticObjectInfo(remote, modTime, int64(len(content)), true, nil, nil)
|
2016-06-12 16:06:27 +02:00
|
|
|
_, err := put(in, objinfo)
|
2016-01-17 11:08:28 +01:00
|
|
|
if err == nil {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
// Retry if err returned a retry error
|
2016-07-04 14:12:33 +02:00
|
|
|
if fs.IsRetryError(err) && tries < maxTries {
|
2016-01-17 11:08:28 +01:00
|
|
|
r.Logf("Retry Put of %q to %v: %d/%d (%v)", remote, f, tries, maxTries, err)
|
2016-07-12 20:41:34 +02:00
|
|
|
time.Sleep(2 * time.Second)
|
2016-01-17 11:08:28 +01:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
r.Fatalf("Failed to put %q to %q: %v", remote, f, err)
|
|
|
|
}
|
|
|
|
return fstest.NewItem(remote, content, modTime)
|
2014-08-01 18:58:39 +02:00
|
|
|
}
|
|
|
|
|
2016-01-17 11:08:28 +01:00
|
|
|
// WriteObject writes an object to the remote
|
|
|
|
func (r *Run) WriteObject(remote, content string, modTime time.Time) fstest.Item {
|
2016-06-12 16:06:27 +02:00
|
|
|
return r.WriteObjectTo(r.fremote, remote, content, modTime, false)
|
|
|
|
}
|
|
|
|
|
|
|
|
// WriteUncheckedObject writes an object to the remote not checking for duplicates
|
|
|
|
func (r *Run) WriteUncheckedObject(remote, content string, modTime time.Time) fstest.Item {
|
|
|
|
return r.WriteObjectTo(r.fremote, remote, content, modTime, true)
|
2016-01-17 11:08:28 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// WriteBoth calls WriteObject and WriteFile with the same arguments
|
|
|
|
func (r *Run) WriteBoth(remote, content string, modTime time.Time) fstest.Item {
|
|
|
|
r.WriteFile(remote, content, modTime)
|
|
|
|
return r.WriteObject(remote, content, modTime)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Clean the temporary directory
|
|
|
|
func (r *Run) cleanTempDir() {
|
|
|
|
err := os.RemoveAll(r.localName)
|
|
|
|
if err != nil {
|
|
|
|
r.Logf("Failed to clean temporary directory %q: %v", r.localName, err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// finalise cleans the remote and local
|
|
|
|
func (r *Run) Finalise() {
|
|
|
|
// r.Logf("Cleaning remote %q", r.fremote)
|
|
|
|
r.cleanRemote()
|
|
|
|
// r.Logf("Cleaning local %q", r.localName)
|
|
|
|
r.cleanTempDir()
|
|
|
|
}
|
|
|
|
|
|
|
|
// ------------------------------------------------------------
|
|
|
|
|
2014-08-01 18:58:39 +02:00
|
|
|
func TestMkdir(t *testing.T) {
|
2016-01-17 11:08:28 +01:00
|
|
|
r := NewRun(t)
|
|
|
|
defer r.Finalise()
|
|
|
|
fstest.TestMkdir(t, r.fremote)
|
2014-08-01 18:58:39 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestLsd(t *testing.T) {
|
2016-01-17 11:08:28 +01:00
|
|
|
r := NewRun(t)
|
|
|
|
defer r.Finalise()
|
|
|
|
file1 := r.WriteObject("sub dir/hello world", "hello world", t1)
|
|
|
|
|
|
|
|
fstest.CheckItems(t, r.fremote, file1)
|
|
|
|
|
2014-08-01 18:58:39 +02:00
|
|
|
var buf bytes.Buffer
|
2016-01-17 11:08:28 +01:00
|
|
|
err := fs.ListDir(r.fremote, &buf)
|
2016-06-29 18:59:31 +02:00
|
|
|
require.NoError(t, err)
|
2014-08-01 18:58:39 +02:00
|
|
|
res := buf.String()
|
2016-06-29 18:59:31 +02:00
|
|
|
assert.Contains(t, res, "sub dir\n")
|
2014-08-01 18:58:39 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestLs(t *testing.T) {
|
2016-01-17 11:08:28 +01:00
|
|
|
r := NewRun(t)
|
|
|
|
defer r.Finalise()
|
|
|
|
file1 := r.WriteBoth("potato2", "------------------------------------------------------------", t1)
|
|
|
|
file2 := r.WriteBoth("empty space", "", t2)
|
|
|
|
|
|
|
|
fstest.CheckItems(t, r.fremote, file1, file2)
|
|
|
|
|
2014-08-01 18:58:39 +02:00
|
|
|
var buf bytes.Buffer
|
2016-01-17 11:08:28 +01:00
|
|
|
err := fs.List(r.fremote, &buf)
|
2016-06-29 18:59:31 +02:00
|
|
|
require.NoError(t, err)
|
2014-08-01 18:58:39 +02:00
|
|
|
res := buf.String()
|
2016-06-29 18:59:31 +02:00
|
|
|
assert.Contains(t, res, " 0 empty space\n")
|
|
|
|
assert.Contains(t, res, " 60 potato2\n")
|
2014-08-01 18:58:39 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestLsLong(t *testing.T) {
|
2016-01-17 11:08:28 +01:00
|
|
|
r := NewRun(t)
|
|
|
|
defer r.Finalise()
|
|
|
|
file1 := r.WriteBoth("potato2", "------------------------------------------------------------", t1)
|
|
|
|
file2 := r.WriteBoth("empty space", "", t2)
|
|
|
|
|
|
|
|
fstest.CheckItems(t, r.fremote, file1, file2)
|
|
|
|
|
2014-08-01 18:58:39 +02:00
|
|
|
var buf bytes.Buffer
|
2016-01-17 11:08:28 +01:00
|
|
|
err := fs.ListLong(r.fremote, &buf)
|
2016-06-29 18:59:31 +02:00
|
|
|
require.NoError(t, err)
|
2014-08-01 18:58:39 +02:00
|
|
|
res := buf.String()
|
2015-08-17 00:24:34 +02:00
|
|
|
lines := strings.Split(strings.Trim(res, "\n"), "\n")
|
2016-06-29 18:59:31 +02:00
|
|
|
assert.Equal(t, 2, len(lines))
|
2015-08-17 00:24:34 +02:00
|
|
|
|
|
|
|
timeFormat := "2006-01-02 15:04:05.000000000"
|
2016-01-17 11:08:28 +01:00
|
|
|
precision := r.fremote.Precision()
|
2015-09-22 20:04:12 +02:00
|
|
|
location := time.Now().Location()
|
2015-08-17 00:24:34 +02:00
|
|
|
checkTime := func(m, filename string, expected time.Time) {
|
2015-09-22 20:04:12 +02:00
|
|
|
modTime, err := time.ParseInLocation(timeFormat, m, location) // parse as localtime
|
2015-08-17 00:24:34 +02:00
|
|
|
if err != nil {
|
|
|
|
t.Errorf("Error parsing %q: %v", m, err)
|
|
|
|
} else {
|
|
|
|
dt, ok := fstest.CheckTimeEqualWithPrecision(expected, modTime, precision)
|
|
|
|
if !ok {
|
|
|
|
t.Errorf("%s: Modification time difference too big |%s| > %s (%s vs %s) (precision %s)", filename, dt, precision, modTime, expected, precision)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
m1 := regexp.MustCompile(`(?m)^ 0 (\d{4}-\d\d-\d\d \d\d:\d\d:\d\d\.\d{9}) empty space$`)
|
|
|
|
if ms := m1.FindStringSubmatch(res); ms == nil {
|
2014-08-01 18:58:39 +02:00
|
|
|
t.Errorf("empty space missing: %q", res)
|
2015-08-17 00:24:34 +02:00
|
|
|
} else {
|
|
|
|
checkTime(ms[1], "empty space", t2.Local())
|
2014-08-01 18:58:39 +02:00
|
|
|
}
|
2015-08-17 00:24:34 +02:00
|
|
|
|
|
|
|
m2 := regexp.MustCompile(`(?m)^ 60 (\d{4}-\d\d-\d\d \d\d:\d\d:\d\d\.\d{9}) potato2$`)
|
|
|
|
if ms := m2.FindStringSubmatch(res); ms == nil {
|
2014-08-01 18:58:39 +02:00
|
|
|
t.Errorf("potato2 missing: %q", res)
|
2015-08-17 00:24:34 +02:00
|
|
|
} else {
|
|
|
|
checkTime(ms[1], "potato2", t1.Local())
|
2014-08-01 18:58:39 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-05-26 16:09:31 +02:00
|
|
|
func TestHashSums(t *testing.T) {
|
2016-01-17 11:08:28 +01:00
|
|
|
r := NewRun(t)
|
|
|
|
defer r.Finalise()
|
|
|
|
file1 := r.WriteBoth("potato2", "------------------------------------------------------------", t1)
|
|
|
|
file2 := r.WriteBoth("empty space", "", t2)
|
|
|
|
|
|
|
|
fstest.CheckItems(t, r.fremote, file1, file2)
|
|
|
|
|
2017-05-26 16:09:31 +02:00
|
|
|
// MD5 Sum
|
|
|
|
|
2014-08-01 18:58:39 +02:00
|
|
|
var buf bytes.Buffer
|
2016-01-17 11:08:28 +01:00
|
|
|
err := fs.Md5sum(r.fremote, &buf)
|
2016-06-29 18:59:31 +02:00
|
|
|
require.NoError(t, err)
|
2014-08-01 18:58:39 +02:00
|
|
|
res := buf.String()
|
2015-08-17 00:24:34 +02:00
|
|
|
if !strings.Contains(res, "d41d8cd98f00b204e9800998ecf8427e empty space\n") &&
|
2016-01-16 10:44:46 +01:00
|
|
|
!strings.Contains(res, " UNSUPPORTED empty space\n") &&
|
2015-08-17 00:24:34 +02:00
|
|
|
!strings.Contains(res, " empty space\n") {
|
2014-08-01 18:58:39 +02:00
|
|
|
t.Errorf("empty space missing: %q", res)
|
|
|
|
}
|
2017-06-07 00:21:47 +02:00
|
|
|
if !strings.Contains(res, "d6548b156ea68a4e003e786df99eee76 potato2\n") &&
|
2016-01-16 10:44:46 +01:00
|
|
|
!strings.Contains(res, " UNSUPPORTED potato2\n") &&
|
2015-08-17 00:24:34 +02:00
|
|
|
!strings.Contains(res, " potato2\n") {
|
2014-08-01 18:58:39 +02:00
|
|
|
t.Errorf("potato2 missing: %q", res)
|
|
|
|
}
|
|
|
|
|
2017-05-26 16:09:31 +02:00
|
|
|
// SHA1 Sum
|
2016-01-17 11:08:28 +01:00
|
|
|
|
2017-06-07 00:21:47 +02:00
|
|
|
buf.Reset()
|
2017-05-26 16:09:31 +02:00
|
|
|
err = fs.Sha1sum(r.fremote, &buf)
|
2016-06-29 18:59:31 +02:00
|
|
|
require.NoError(t, err)
|
2017-05-26 16:09:31 +02:00
|
|
|
res = buf.String()
|
2016-01-17 11:08:28 +01:00
|
|
|
if !strings.Contains(res, "da39a3ee5e6b4b0d3255bfef95601890afd80709 empty space\n") &&
|
|
|
|
!strings.Contains(res, " UNSUPPORTED empty space\n") &&
|
|
|
|
!strings.Contains(res, " empty space\n") {
|
|
|
|
t.Errorf("empty space missing: %q", res)
|
|
|
|
}
|
|
|
|
if !strings.Contains(res, "9dc7f7d3279715991a22853f5981df582b7f9f6d potato2\n") &&
|
|
|
|
!strings.Contains(res, " UNSUPPORTED potato2\n") &&
|
|
|
|
!strings.Contains(res, " potato2\n") {
|
|
|
|
t.Errorf("potato2 missing: %q", res)
|
|
|
|
}
|
2017-05-26 16:09:31 +02:00
|
|
|
|
|
|
|
// Dropbox Hash Sum
|
|
|
|
|
2017-06-07 00:21:47 +02:00
|
|
|
buf.Reset()
|
2017-05-26 16:09:31 +02:00
|
|
|
err = fs.DropboxHashSum(r.fremote, &buf)
|
|
|
|
require.NoError(t, err)
|
|
|
|
res = buf.String()
|
|
|
|
if !strings.Contains(res, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 empty space\n") &&
|
2017-06-07 00:21:47 +02:00
|
|
|
!strings.Contains(res, " UNSUPPORTED empty space\n") &&
|
|
|
|
!strings.Contains(res, " empty space\n") {
|
2017-05-26 16:09:31 +02:00
|
|
|
t.Errorf("empty space missing: %q", res)
|
|
|
|
}
|
|
|
|
if !strings.Contains(res, "a979481df794fed9c3990a6a422e0b1044ac802c15fab13af9c687f8bdbee01a potato2\n") &&
|
2017-06-07 00:21:47 +02:00
|
|
|
!strings.Contains(res, " UNSUPPORTED potato2\n") &&
|
|
|
|
!strings.Contains(res, " potato2\n") {
|
2017-05-26 16:09:31 +02:00
|
|
|
t.Errorf("potato2 missing: %q", res)
|
|
|
|
}
|
2016-01-17 11:08:28 +01:00
|
|
|
}
|
|
|
|
|
2015-10-02 20:48:48 +02:00
|
|
|
func TestCount(t *testing.T) {
|
2016-01-17 11:08:28 +01:00
|
|
|
r := NewRun(t)
|
|
|
|
defer r.Finalise()
|
|
|
|
file1 := r.WriteBoth("potato2", "------------------------------------------------------------", t1)
|
|
|
|
file2 := r.WriteBoth("empty space", "", t2)
|
2016-06-02 22:02:44 +02:00
|
|
|
file3 := r.WriteBoth("sub dir/potato3", "hello", t2)
|
2016-01-17 11:08:28 +01:00
|
|
|
|
2016-06-02 22:02:44 +02:00
|
|
|
fstest.CheckItems(t, r.fremote, file1, file2, file3)
|
|
|
|
|
|
|
|
// Check the MaxDepth too
|
|
|
|
fs.Config.MaxDepth = 1
|
|
|
|
defer func() { fs.Config.MaxDepth = -1 }()
|
2016-01-17 11:08:28 +01:00
|
|
|
|
|
|
|
objects, size, err := fs.Count(r.fremote)
|
2016-06-29 18:59:31 +02:00
|
|
|
require.NoError(t, err)
|
|
|
|
assert.Equal(t, int64(2), objects)
|
|
|
|
assert.Equal(t, int64(60), size)
|
2015-10-02 20:48:48 +02:00
|
|
|
}
|
|
|
|
|
2015-12-02 23:25:32 +01:00
|
|
|
func TestDelete(t *testing.T) {
|
|
|
|
r := NewRun(t)
|
|
|
|
defer r.Finalise()
|
|
|
|
file1 := r.WriteObject("small", "1234567890", t2) // 10 bytes
|
|
|
|
file2 := r.WriteObject("medium", "------------------------------------------------------------", t1) // 60 bytes
|
|
|
|
file3 := r.WriteObject("large", "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", t1) // 100 bytes
|
|
|
|
fstest.CheckItems(t, r.fremote, file1, file2, file3)
|
|
|
|
|
|
|
|
fs.Config.Filter.MaxSize = 60
|
|
|
|
defer func() {
|
2016-06-03 22:54:27 +02:00
|
|
|
fs.Config.Filter.MaxSize = -1
|
2015-12-02 23:25:32 +01:00
|
|
|
}()
|
|
|
|
|
|
|
|
err := fs.Delete(r.fremote)
|
2016-06-29 18:59:31 +02:00
|
|
|
require.NoError(t, err)
|
2015-12-02 23:25:32 +01:00
|
|
|
fstest.CheckItems(t, r.fremote, file3)
|
|
|
|
}
|
|
|
|
|
2017-02-13 11:48:26 +01:00
|
|
|
func testCheck(t *testing.T, checkFunction func(fdst, fsrc fs.Fs) error) {
|
2016-01-17 11:08:28 +01:00
|
|
|
r := NewRun(t)
|
|
|
|
defer r.Finalise()
|
|
|
|
|
|
|
|
check := func(i int, wantErrors int64) {
|
2017-02-09 12:01:20 +01:00
|
|
|
fs.Debugf(r.fremote, "%d: Starting check test", i)
|
2016-01-17 11:08:28 +01:00
|
|
|
oldErrors := fs.Stats.GetErrors()
|
2017-02-13 11:48:26 +01:00
|
|
|
err := checkFunction(r.flocal, r.fremote)
|
2016-01-17 11:08:28 +01:00
|
|
|
gotErrors := fs.Stats.GetErrors() - oldErrors
|
|
|
|
if wantErrors == 0 && err != nil {
|
|
|
|
t.Errorf("%d: Got error when not expecting one: %v", i, err)
|
|
|
|
}
|
|
|
|
if wantErrors != 0 && err == nil {
|
|
|
|
t.Errorf("%d: No error when expecting one", i)
|
|
|
|
}
|
|
|
|
if wantErrors != gotErrors {
|
|
|
|
t.Errorf("%d: Expecting %d errors but got %d", i, wantErrors, gotErrors)
|
|
|
|
}
|
2017-02-09 12:01:20 +01:00
|
|
|
fs.Debugf(r.fremote, "%d: Ending check test", i)
|
2014-08-01 18:58:39 +02:00
|
|
|
}
|
|
|
|
|
2016-01-17 11:08:28 +01:00
|
|
|
file1 := r.WriteBoth("rutabaga", "is tasty", t3)
|
|
|
|
fstest.CheckItems(t, r.fremote, file1)
|
|
|
|
fstest.CheckItems(t, r.flocal, file1)
|
|
|
|
check(1, 0)
|
|
|
|
|
|
|
|
file2 := r.WriteFile("potato2", "------------------------------------------------------------", t1)
|
|
|
|
fstest.CheckItems(t, r.flocal, file1, file2)
|
|
|
|
check(2, 1)
|
|
|
|
|
|
|
|
file3 := r.WriteObject("empty space", "", t2)
|
|
|
|
fstest.CheckItems(t, r.fremote, file1, file3)
|
|
|
|
check(3, 2)
|
|
|
|
|
2016-04-07 15:56:27 +02:00
|
|
|
file2r := file2
|
|
|
|
if fs.Config.SizeOnly {
|
|
|
|
file2r = r.WriteObject("potato2", "--Some-Differences-But-Size-Only-Is-Enabled-----------------", t1)
|
|
|
|
} else {
|
|
|
|
r.WriteObject("potato2", "------------------------------------------------------------", t1)
|
|
|
|
}
|
|
|
|
fstest.CheckItems(t, r.fremote, file1, file2r, file3)
|
2016-01-17 11:08:28 +01:00
|
|
|
check(4, 1)
|
2014-08-01 18:58:39 +02:00
|
|
|
|
2016-01-17 11:08:28 +01:00
|
|
|
r.WriteFile("empty space", "", t2)
|
|
|
|
fstest.CheckItems(t, r.flocal, file1, file2, file3)
|
|
|
|
check(5, 0)
|
2014-08-01 18:58:39 +02:00
|
|
|
}
|
2016-03-05 17:10:51 +01:00
|
|
|
|
2017-02-13 11:48:26 +01:00
|
|
|
func TestCheck(t *testing.T) {
|
|
|
|
testCheck(t, fs.Check)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestCheckDownload(t *testing.T) {
|
|
|
|
testCheck(t, fs.CheckDownload)
|
|
|
|
}
|
|
|
|
|
2016-04-07 15:56:27 +02:00
|
|
|
func TestCheckSizeOnly(t *testing.T) {
|
|
|
|
fs.Config.SizeOnly = true
|
|
|
|
defer func() { fs.Config.SizeOnly = false }()
|
|
|
|
TestCheck(t)
|
|
|
|
}
|
|
|
|
|
2016-03-05 17:10:51 +01:00
|
|
|
func (r *Run) checkWithDuplicates(t *testing.T, items ...fstest.Item) {
|
|
|
|
objects, size, err := fs.Count(r.fremote)
|
2016-06-29 18:59:31 +02:00
|
|
|
require.NoError(t, err)
|
|
|
|
assert.Equal(t, int64(len(items)), objects)
|
2016-03-05 17:10:51 +01:00
|
|
|
wantSize := int64(0)
|
|
|
|
for _, item := range items {
|
|
|
|
wantSize += item.Size
|
|
|
|
}
|
2016-06-29 18:59:31 +02:00
|
|
|
assert.Equal(t, wantSize, size)
|
2016-03-05 17:10:51 +01:00
|
|
|
}
|
|
|
|
|
2017-01-13 18:21:47 +01:00
|
|
|
func skipIfCantDedupe(t *testing.T, f fs.Fs) {
|
|
|
|
if f.Features().PutUnchecked == nil {
|
|
|
|
t.Skip("Can't test deduplicate - no PutUnchecked")
|
|
|
|
}
|
|
|
|
if !f.Features().DuplicateFiles {
|
|
|
|
t.Skip("Can't test deduplicate - no duplicate files possible")
|
|
|
|
}
|
|
|
|
if !f.Hashes().Contains(fs.HashMD5) {
|
|
|
|
t.Skip("Can't test deduplicate - MD5 not supported")
|
2016-03-05 17:10:51 +01:00
|
|
|
}
|
2017-01-13 18:21:47 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestDeduplicateInteractive(t *testing.T) {
|
2016-03-05 17:10:51 +01:00
|
|
|
r := NewRun(t)
|
|
|
|
defer r.Finalise()
|
2017-01-13 18:21:47 +01:00
|
|
|
skipIfCantDedupe(t, r.fremote)
|
2016-03-05 17:10:51 +01:00
|
|
|
|
2016-06-12 16:06:27 +02:00
|
|
|
file1 := r.WriteUncheckedObject("one", "This is one", t1)
|
|
|
|
file2 := r.WriteUncheckedObject("one", "This is one", t1)
|
|
|
|
file3 := r.WriteUncheckedObject("one", "This is one", t1)
|
2016-03-05 17:10:51 +01:00
|
|
|
r.checkWithDuplicates(t, file1, file2, file3)
|
|
|
|
|
|
|
|
err := fs.Deduplicate(r.fremote, fs.DeduplicateInteractive)
|
2016-06-29 18:59:31 +02:00
|
|
|
require.NoError(t, err)
|
2016-03-05 17:10:51 +01:00
|
|
|
|
|
|
|
fstest.CheckItems(t, r.fremote, file1)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestDeduplicateSkip(t *testing.T) {
|
|
|
|
r := NewRun(t)
|
|
|
|
defer r.Finalise()
|
2017-01-13 18:21:47 +01:00
|
|
|
skipIfCantDedupe(t, r.fremote)
|
2016-03-05 17:10:51 +01:00
|
|
|
|
2016-06-12 16:06:27 +02:00
|
|
|
file1 := r.WriteUncheckedObject("one", "This is one", t1)
|
|
|
|
file2 := r.WriteUncheckedObject("one", "This is one", t1)
|
|
|
|
file3 := r.WriteUncheckedObject("one", "This is another one", t1)
|
2016-03-05 17:10:51 +01:00
|
|
|
r.checkWithDuplicates(t, file1, file2, file3)
|
|
|
|
|
|
|
|
err := fs.Deduplicate(r.fremote, fs.DeduplicateSkip)
|
2016-06-29 18:59:31 +02:00
|
|
|
require.NoError(t, err)
|
2016-03-05 17:10:51 +01:00
|
|
|
|
|
|
|
r.checkWithDuplicates(t, file1, file3)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestDeduplicateFirst(t *testing.T) {
|
|
|
|
r := NewRun(t)
|
|
|
|
defer r.Finalise()
|
2017-01-13 18:21:47 +01:00
|
|
|
skipIfCantDedupe(t, r.fremote)
|
2016-03-05 17:10:51 +01:00
|
|
|
|
2016-06-12 16:06:27 +02:00
|
|
|
file1 := r.WriteUncheckedObject("one", "This is one", t1)
|
|
|
|
file2 := r.WriteUncheckedObject("one", "This is one A", t1)
|
|
|
|
file3 := r.WriteUncheckedObject("one", "This is one BB", t1)
|
2016-03-05 17:10:51 +01:00
|
|
|
r.checkWithDuplicates(t, file1, file2, file3)
|
|
|
|
|
|
|
|
err := fs.Deduplicate(r.fremote, fs.DeduplicateFirst)
|
2016-06-29 18:59:31 +02:00
|
|
|
require.NoError(t, err)
|
2016-03-05 17:10:51 +01:00
|
|
|
|
|
|
|
objects, size, err := fs.Count(r.fremote)
|
2016-06-29 18:59:31 +02:00
|
|
|
require.NoError(t, err)
|
2016-07-12 20:41:34 +02:00
|
|
|
assert.Equal(t, int64(1), objects)
|
2016-03-05 17:10:51 +01:00
|
|
|
if size != file1.Size && size != file2.Size && size != file3.Size {
|
|
|
|
t.Errorf("Size not one of the object sizes %d", size)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestDeduplicateNewest(t *testing.T) {
|
|
|
|
r := NewRun(t)
|
|
|
|
defer r.Finalise()
|
2017-01-13 18:21:47 +01:00
|
|
|
skipIfCantDedupe(t, r.fremote)
|
2016-03-05 17:10:51 +01:00
|
|
|
|
2016-06-12 16:06:27 +02:00
|
|
|
file1 := r.WriteUncheckedObject("one", "This is one", t1)
|
|
|
|
file2 := r.WriteUncheckedObject("one", "This is one too", t2)
|
|
|
|
file3 := r.WriteUncheckedObject("one", "This is another one", t3)
|
2016-03-05 17:10:51 +01:00
|
|
|
r.checkWithDuplicates(t, file1, file2, file3)
|
|
|
|
|
|
|
|
err := fs.Deduplicate(r.fremote, fs.DeduplicateNewest)
|
2016-06-29 18:59:31 +02:00
|
|
|
require.NoError(t, err)
|
2016-03-05 17:10:51 +01:00
|
|
|
|
|
|
|
fstest.CheckItems(t, r.fremote, file3)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestDeduplicateOldest(t *testing.T) {
|
|
|
|
r := NewRun(t)
|
|
|
|
defer r.Finalise()
|
2017-01-13 18:21:47 +01:00
|
|
|
skipIfCantDedupe(t, r.fremote)
|
2016-03-05 17:10:51 +01:00
|
|
|
|
2016-06-12 16:06:27 +02:00
|
|
|
file1 := r.WriteUncheckedObject("one", "This is one", t1)
|
|
|
|
file2 := r.WriteUncheckedObject("one", "This is one too", t2)
|
|
|
|
file3 := r.WriteUncheckedObject("one", "This is another one", t3)
|
2016-03-05 17:10:51 +01:00
|
|
|
r.checkWithDuplicates(t, file1, file2, file3)
|
|
|
|
|
|
|
|
err := fs.Deduplicate(r.fremote, fs.DeduplicateOldest)
|
2016-06-29 18:59:31 +02:00
|
|
|
require.NoError(t, err)
|
2016-03-05 17:10:51 +01:00
|
|
|
|
|
|
|
fstest.CheckItems(t, r.fremote, file1)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestDeduplicateRename(t *testing.T) {
|
|
|
|
r := NewRun(t)
|
|
|
|
defer r.Finalise()
|
2017-01-13 18:21:47 +01:00
|
|
|
skipIfCantDedupe(t, r.fremote)
|
2016-03-05 17:10:51 +01:00
|
|
|
|
2016-06-12 16:06:27 +02:00
|
|
|
file1 := r.WriteUncheckedObject("one.txt", "This is one", t1)
|
|
|
|
file2 := r.WriteUncheckedObject("one.txt", "This is one too", t2)
|
|
|
|
file3 := r.WriteUncheckedObject("one.txt", "This is another one", t3)
|
2016-03-05 17:10:51 +01:00
|
|
|
r.checkWithDuplicates(t, file1, file2, file3)
|
|
|
|
|
|
|
|
err := fs.Deduplicate(r.fremote, fs.DeduplicateRename)
|
2016-06-29 18:59:31 +02:00
|
|
|
require.NoError(t, err)
|
2016-03-05 17:10:51 +01:00
|
|
|
|
2017-02-24 23:51:01 +01:00
|
|
|
require.NoError(t, fs.Walk(r.fremote, "", true, -1, func(dirPath string, entries fs.DirEntries, err error) error {
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2016-03-05 17:10:51 +01:00
|
|
|
}
|
2017-02-24 23:51:01 +01:00
|
|
|
entries.ForObject(func(o fs.Object) {
|
|
|
|
remote := o.Remote()
|
|
|
|
if remote != "one-1.txt" &&
|
|
|
|
remote != "one-2.txt" &&
|
|
|
|
remote != "one-3.txt" {
|
|
|
|
t.Errorf("Bad file name after rename %q", remote)
|
|
|
|
}
|
|
|
|
size := o.Size()
|
|
|
|
if size != file1.Size && size != file2.Size && size != file3.Size {
|
|
|
|
t.Errorf("Size not one of the object sizes %d", size)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
return nil
|
|
|
|
}))
|
2016-03-05 17:10:51 +01:00
|
|
|
}
|
2016-08-18 23:43:02 +02:00
|
|
|
|
|
|
|
func TestCat(t *testing.T) {
|
|
|
|
r := NewRun(t)
|
|
|
|
defer r.Finalise()
|
2017-02-08 09:09:41 +01:00
|
|
|
file1 := r.WriteBoth("file1", "ABCDEFGHIJ", t1)
|
|
|
|
file2 := r.WriteBoth("file2", "012345678", t2)
|
2016-08-18 23:43:02 +02:00
|
|
|
|
|
|
|
fstest.CheckItems(t, r.fremote, file1, file2)
|
|
|
|
|
2017-02-08 09:09:41 +01:00
|
|
|
for _, test := range []struct {
|
|
|
|
offset int64
|
|
|
|
count int64
|
|
|
|
a string
|
|
|
|
b string
|
|
|
|
}{
|
|
|
|
{0, -1, "ABCDEFGHIJ", "012345678"},
|
|
|
|
{0, 5, "ABCDE", "01234"},
|
|
|
|
{-3, -1, "HIJ", "678"},
|
2017-06-10 14:48:00 +02:00
|
|
|
{1, 3, "BCD", "123"},
|
2017-02-08 09:09:41 +01:00
|
|
|
} {
|
|
|
|
var buf bytes.Buffer
|
|
|
|
err := fs.Cat(r.fremote, &buf, test.offset, test.count)
|
|
|
|
require.NoError(t, err)
|
|
|
|
res := buf.String()
|
2016-08-18 23:43:02 +02:00
|
|
|
|
2017-02-08 09:09:41 +01:00
|
|
|
if res != test.a+test.b && res != test.b+test.a {
|
|
|
|
t.Errorf("Incorrect output from Cat(%d,%d): %q", test.offset, test.count, res)
|
|
|
|
}
|
2016-08-18 23:43:02 +02:00
|
|
|
}
|
|
|
|
}
|
2016-11-27 12:49:31 +01:00
|
|
|
|
|
|
|
func TestRmdirs(t *testing.T) {
|
|
|
|
r := NewRun(t)
|
|
|
|
defer r.Finalise()
|
2016-12-31 16:36:58 +01:00
|
|
|
r.Mkdir(r.fremote)
|
2016-11-28 13:23:24 +01:00
|
|
|
|
|
|
|
// Clean any directories that have crept in so far
|
|
|
|
// FIXME make the Finalise method do this?
|
2017-02-01 14:23:31 +01:00
|
|
|
require.NoError(t, fs.Rmdirs(r.fremote, ""))
|
2016-11-28 13:23:24 +01:00
|
|
|
|
2016-11-27 12:49:31 +01:00
|
|
|
// Make some files and dirs we expect to keep
|
2016-12-31 16:36:58 +01:00
|
|
|
r.ForceMkdir(r.fremote)
|
2016-11-27 12:49:31 +01:00
|
|
|
file1 := r.WriteObject("A1/B1/C1/one", "aaa", t1)
|
|
|
|
//..and dirs we expect to delete
|
|
|
|
require.NoError(t, fs.Mkdir(r.fremote, "A2"))
|
|
|
|
require.NoError(t, fs.Mkdir(r.fremote, "A1/B2"))
|
|
|
|
require.NoError(t, fs.Mkdir(r.fremote, "A1/B2/C2"))
|
|
|
|
require.NoError(t, fs.Mkdir(r.fremote, "A1/B1/C3"))
|
|
|
|
require.NoError(t, fs.Mkdir(r.fremote, "A3"))
|
|
|
|
require.NoError(t, fs.Mkdir(r.fremote, "A3/B3"))
|
|
|
|
require.NoError(t, fs.Mkdir(r.fremote, "A3/B3/C4"))
|
2017-04-26 19:41:01 +02:00
|
|
|
//..and one more file at the end
|
|
|
|
file2 := r.WriteObject("A1/two", "bbb", t2)
|
2016-11-27 12:49:31 +01:00
|
|
|
|
|
|
|
fstest.CheckListingWithPrecision(
|
|
|
|
t,
|
|
|
|
r.fremote,
|
|
|
|
[]fstest.Item{
|
|
|
|
file1, file2,
|
|
|
|
},
|
2017-04-26 19:41:01 +02:00
|
|
|
/* FIXME bucket based Fses are only showing some of the directories
|
2016-11-27 12:49:31 +01:00
|
|
|
[]string{
|
|
|
|
"A1",
|
|
|
|
"A1/B1",
|
|
|
|
"A1/B1/C1",
|
|
|
|
"A2",
|
|
|
|
"A1/B2",
|
|
|
|
"A1/B2/C2",
|
|
|
|
"A1/B1/C3",
|
|
|
|
"A3",
|
|
|
|
"A3/B3",
|
|
|
|
"A3/B3/C4",
|
2017-04-26 19:41:01 +02:00
|
|
|
},*/
|
|
|
|
nil,
|
2016-11-27 12:49:31 +01:00
|
|
|
fs.Config.ModifyWindow,
|
|
|
|
)
|
|
|
|
|
2017-02-01 14:23:31 +01:00
|
|
|
require.NoError(t, fs.Rmdirs(r.fremote, ""))
|
2016-11-27 12:49:31 +01:00
|
|
|
|
|
|
|
fstest.CheckListingWithPrecision(
|
|
|
|
t,
|
|
|
|
r.fremote,
|
|
|
|
[]fstest.Item{
|
|
|
|
file1, file2,
|
|
|
|
},
|
|
|
|
[]string{
|
|
|
|
"A1",
|
|
|
|
"A1/B1",
|
|
|
|
"A1/B1/C1",
|
|
|
|
},
|
|
|
|
fs.Config.ModifyWindow,
|
|
|
|
)
|
|
|
|
|
|
|
|
}
|
2016-10-23 18:34:17 +02:00
|
|
|
|
|
|
|
func TestMoveFile(t *testing.T) {
|
|
|
|
r := NewRun(t)
|
|
|
|
defer r.Finalise()
|
|
|
|
|
|
|
|
file1 := r.WriteFile("file1", "file1 contents", t1)
|
|
|
|
fstest.CheckItems(t, r.flocal, file1)
|
|
|
|
|
|
|
|
file2 := file1
|
|
|
|
file2.Path = "sub/file2"
|
|
|
|
|
|
|
|
err := fs.MoveFile(r.fremote, r.flocal, file2.Path, file1.Path)
|
|
|
|
require.NoError(t, err)
|
|
|
|
fstest.CheckItems(t, r.flocal)
|
|
|
|
fstest.CheckItems(t, r.fremote, file2)
|
|
|
|
|
|
|
|
r.WriteFile("file1", "file1 contents", t1)
|
|
|
|
fstest.CheckItems(t, r.flocal, file1)
|
|
|
|
|
|
|
|
err = fs.MoveFile(r.fremote, r.flocal, file2.Path, file1.Path)
|
|
|
|
require.NoError(t, err)
|
|
|
|
fstest.CheckItems(t, r.flocal)
|
|
|
|
fstest.CheckItems(t, r.fremote, file2)
|
2017-05-27 17:30:26 +02:00
|
|
|
|
2017-06-07 15:08:46 +02:00
|
|
|
err = fs.MoveFile(r.fremote, r.fremote, file2.Path, file2.Path)
|
2017-05-27 17:30:26 +02:00
|
|
|
require.NoError(t, err)
|
|
|
|
fstest.CheckItems(t, r.flocal)
|
|
|
|
fstest.CheckItems(t, r.fremote, file2)
|
2016-10-23 18:34:17 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestCopyFile(t *testing.T) {
|
|
|
|
r := NewRun(t)
|
|
|
|
defer r.Finalise()
|
|
|
|
|
|
|
|
file1 := r.WriteFile("file1", "file1 contents", t1)
|
|
|
|
fstest.CheckItems(t, r.flocal, file1)
|
|
|
|
|
|
|
|
file2 := file1
|
|
|
|
file2.Path = "sub/file2"
|
|
|
|
|
|
|
|
err := fs.CopyFile(r.fremote, r.flocal, file2.Path, file1.Path)
|
|
|
|
require.NoError(t, err)
|
|
|
|
fstest.CheckItems(t, r.flocal, file1)
|
|
|
|
fstest.CheckItems(t, r.fremote, file2)
|
|
|
|
|
|
|
|
err = fs.CopyFile(r.fremote, r.flocal, file2.Path, file1.Path)
|
|
|
|
require.NoError(t, err)
|
|
|
|
fstest.CheckItems(t, r.flocal, file1)
|
|
|
|
fstest.CheckItems(t, r.fremote, file2)
|
2017-05-27 17:30:26 +02:00
|
|
|
|
2017-06-07 15:08:46 +02:00
|
|
|
err = fs.CopyFile(r.fremote, r.fremote, file2.Path, file2.Path)
|
2017-05-27 17:30:26 +02:00
|
|
|
require.NoError(t, err)
|
|
|
|
fstest.CheckItems(t, r.flocal, file1)
|
|
|
|
fstest.CheckItems(t, r.fremote, file2)
|
2016-10-23 18:34:17 +02:00
|
|
|
}
|
2017-01-11 15:59:53 +01:00
|
|
|
|
|
|
|
// testFsInfo is for unit testing fs.Info
|
|
|
|
type testFsInfo struct {
|
|
|
|
name string
|
|
|
|
root string
|
|
|
|
stringVal string
|
|
|
|
precision time.Duration
|
|
|
|
hashes fs.HashSet
|
2017-01-13 18:21:47 +01:00
|
|
|
features fs.Features
|
2017-01-11 15:59:53 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Name of the remote (as passed into NewFs)
|
|
|
|
func (i *testFsInfo) Name() string { return i.name }
|
|
|
|
|
|
|
|
// Root of the remote (as passed into NewFs)
|
|
|
|
func (i *testFsInfo) Root() string { return i.root }
|
|
|
|
|
|
|
|
// String returns a description of the FS
|
|
|
|
func (i *testFsInfo) String() string { return i.stringVal }
|
|
|
|
|
|
|
|
// Precision of the ModTimes in this Fs
|
|
|
|
func (i *testFsInfo) Precision() time.Duration { return i.precision }
|
|
|
|
|
|
|
|
// Returns the supported hash types of the filesystem
|
|
|
|
func (i *testFsInfo) Hashes() fs.HashSet { return i.hashes }
|
|
|
|
|
2017-01-13 18:21:47 +01:00
|
|
|
// Returns the supported hash types of the filesystem
|
|
|
|
func (i *testFsInfo) Features() *fs.Features { return &i.features }
|
|
|
|
|
2017-01-11 15:59:53 +01:00
|
|
|
func TestSameConfig(t *testing.T) {
|
|
|
|
a := &testFsInfo{name: "name", root: "root"}
|
|
|
|
for _, test := range []struct {
|
|
|
|
name string
|
|
|
|
root string
|
|
|
|
expected bool
|
|
|
|
}{
|
|
|
|
{"name", "root", true},
|
|
|
|
{"name", "rooty", true},
|
|
|
|
{"namey", "root", false},
|
|
|
|
{"namey", "roott", false},
|
|
|
|
} {
|
|
|
|
b := &testFsInfo{name: test.name, root: test.root}
|
|
|
|
actual := fs.SameConfig(a, b)
|
|
|
|
assert.Equal(t, test.expected, actual)
|
|
|
|
actual = fs.SameConfig(b, a)
|
|
|
|
assert.Equal(t, test.expected, actual)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSame(t *testing.T) {
|
|
|
|
a := &testFsInfo{name: "name", root: "root"}
|
|
|
|
for _, test := range []struct {
|
|
|
|
name string
|
|
|
|
root string
|
|
|
|
expected bool
|
|
|
|
}{
|
|
|
|
{"name", "root", true},
|
|
|
|
{"name", "rooty", false},
|
|
|
|
{"namey", "root", false},
|
|
|
|
{"namey", "roott", false},
|
|
|
|
} {
|
|
|
|
b := &testFsInfo{name: test.name, root: test.root}
|
|
|
|
actual := fs.Same(a, b)
|
|
|
|
assert.Equal(t, test.expected, actual)
|
|
|
|
actual = fs.Same(b, a)
|
|
|
|
assert.Equal(t, test.expected, actual)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestOverlapping(t *testing.T) {
|
|
|
|
a := &testFsInfo{name: "name", root: "root"}
|
|
|
|
for _, test := range []struct {
|
|
|
|
name string
|
|
|
|
root string
|
|
|
|
expected bool
|
|
|
|
}{
|
|
|
|
{"name", "root", true},
|
|
|
|
{"namey", "root", false},
|
|
|
|
{"name", "rooty", false},
|
|
|
|
{"namey", "rooty", false},
|
|
|
|
{"name", "roo", false},
|
|
|
|
{"name", "root/toot", true},
|
|
|
|
{"name", "root/toot/", true},
|
|
|
|
{"name", "", true},
|
|
|
|
{"name", "/", true},
|
|
|
|
} {
|
|
|
|
b := &testFsInfo{name: test.name, root: test.root}
|
|
|
|
what := fmt.Sprintf("(%q,%q) vs (%q,%q)", a.name, a.root, b.name, b.root)
|
|
|
|
actual := fs.Overlapping(a, b)
|
|
|
|
assert.Equal(t, test.expected, actual, what)
|
|
|
|
actual = fs.Overlapping(b, a)
|
|
|
|
assert.Equal(t, test.expected, actual, what)
|
|
|
|
}
|
|
|
|
}
|
2017-01-24 12:00:05 +01:00
|
|
|
|
|
|
|
func TestListDirSorted(t *testing.T) {
|
|
|
|
r := NewRun(t)
|
|
|
|
defer r.Finalise()
|
|
|
|
|
|
|
|
fs.Config.Filter.MaxSize = 10
|
|
|
|
defer func() {
|
|
|
|
fs.Config.Filter.MaxSize = -1
|
|
|
|
}()
|
|
|
|
|
2017-03-12 15:00:22 +01:00
|
|
|
files := []fstest.Item{
|
|
|
|
r.WriteObject("a.txt", "hello world", t1),
|
|
|
|
r.WriteObject("zend.txt", "hello", t1),
|
|
|
|
r.WriteObject("sub dir/hello world", "hello world", t1),
|
|
|
|
r.WriteObject("sub dir/hello world2", "hello world", t1),
|
|
|
|
r.WriteObject("sub dir/sub sub dir/hello world3", "hello world", t1),
|
|
|
|
}
|
|
|
|
fstest.CheckItems(t, r.fremote, files...)
|
2017-01-24 12:00:05 +01:00
|
|
|
var items fs.DirEntries
|
|
|
|
var err error
|
|
|
|
|
|
|
|
// Turn the BasicInfo into a name, ending with a / if it is a
|
|
|
|
// dir
|
|
|
|
str := func(i int) string {
|
|
|
|
item := items[i]
|
|
|
|
name := item.Remote()
|
|
|
|
switch item.(type) {
|
|
|
|
case fs.Object:
|
|
|
|
case *fs.Dir:
|
|
|
|
name += "/"
|
|
|
|
default:
|
|
|
|
t.Fatalf("Unknown type %+v", item)
|
|
|
|
}
|
|
|
|
return name
|
|
|
|
}
|
|
|
|
|
|
|
|
items, err = fs.ListDirSorted(r.fremote, true, "")
|
|
|
|
require.NoError(t, err)
|
|
|
|
require.Len(t, items, 3)
|
|
|
|
assert.Equal(t, "a.txt", str(0))
|
|
|
|
assert.Equal(t, "sub dir/", str(1))
|
|
|
|
assert.Equal(t, "zend.txt", str(2))
|
|
|
|
|
|
|
|
items, err = fs.ListDirSorted(r.fremote, false, "")
|
|
|
|
require.NoError(t, err)
|
|
|
|
require.Len(t, items, 2)
|
|
|
|
assert.Equal(t, "sub dir/", str(0))
|
|
|
|
assert.Equal(t, "zend.txt", str(1))
|
|
|
|
|
|
|
|
items, err = fs.ListDirSorted(r.fremote, true, "sub dir")
|
|
|
|
require.NoError(t, err)
|
|
|
|
require.Len(t, items, 3)
|
|
|
|
assert.Equal(t, "sub dir/hello world", str(0))
|
|
|
|
assert.Equal(t, "sub dir/hello world2", str(1))
|
|
|
|
assert.Equal(t, "sub dir/sub sub dir/", str(2))
|
|
|
|
|
|
|
|
items, err = fs.ListDirSorted(r.fremote, false, "sub dir")
|
|
|
|
require.NoError(t, err)
|
|
|
|
require.Len(t, items, 1)
|
|
|
|
assert.Equal(t, "sub dir/sub sub dir/", str(0))
|
|
|
|
}
|
2017-02-13 11:48:26 +01:00
|
|
|
|
|
|
|
type byteReader struct {
|
|
|
|
c byte
|
|
|
|
}
|
|
|
|
|
|
|
|
func (br *byteReader) Read(p []byte) (n int, err error) {
|
|
|
|
if br.c == 0 {
|
|
|
|
err = io.EOF
|
|
|
|
} else if len(p) >= 1 {
|
|
|
|
p[0] = br.c
|
|
|
|
n = 1
|
|
|
|
br.c--
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestReadFill(t *testing.T) {
|
|
|
|
buf := []byte{9, 9, 9, 9, 9}
|
|
|
|
|
|
|
|
n, err := fs.ReadFill(&byteReader{0}, buf)
|
|
|
|
assert.Equal(t, io.EOF, err)
|
|
|
|
assert.Equal(t, 0, n)
|
|
|
|
assert.Equal(t, []byte{9, 9, 9, 9, 9}, buf)
|
|
|
|
|
|
|
|
n, err = fs.ReadFill(&byteReader{3}, buf)
|
|
|
|
assert.Equal(t, io.EOF, err)
|
|
|
|
assert.Equal(t, 3, n)
|
|
|
|
assert.Equal(t, []byte{3, 2, 1, 9, 9}, buf)
|
|
|
|
|
|
|
|
n, err = fs.ReadFill(&byteReader{8}, buf)
|
|
|
|
assert.Equal(t, nil, err)
|
|
|
|
assert.Equal(t, 5, n)
|
|
|
|
assert.Equal(t, []byte{8, 7, 6, 5, 4}, buf)
|
|
|
|
}
|
|
|
|
|
|
|
|
type errorReader struct {
|
|
|
|
err error
|
|
|
|
}
|
|
|
|
|
|
|
|
func (er errorReader) Read(p []byte) (n int, err error) {
|
|
|
|
return 0, er.err
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestCheckEqualReaders(t *testing.T) {
|
|
|
|
b65a := make([]byte, 65*1024)
|
|
|
|
b65b := make([]byte, 65*1024)
|
|
|
|
b65b[len(b65b)-1] = 1
|
|
|
|
b66 := make([]byte, 66*1024)
|
|
|
|
|
|
|
|
differ, err := fs.CheckEqualReaders(bytes.NewBuffer(b65a), bytes.NewBuffer(b65a))
|
|
|
|
assert.NoError(t, err)
|
|
|
|
assert.Equal(t, differ, false)
|
|
|
|
|
|
|
|
differ, err = fs.CheckEqualReaders(bytes.NewBuffer(b65a), bytes.NewBuffer(b65b))
|
|
|
|
assert.NoError(t, err)
|
|
|
|
assert.Equal(t, differ, true)
|
|
|
|
|
|
|
|
differ, err = fs.CheckEqualReaders(bytes.NewBuffer(b65a), bytes.NewBuffer(b66))
|
|
|
|
assert.NoError(t, err)
|
|
|
|
assert.Equal(t, differ, true)
|
|
|
|
|
|
|
|
differ, err = fs.CheckEqualReaders(bytes.NewBuffer(b66), bytes.NewBuffer(b65a))
|
|
|
|
assert.NoError(t, err)
|
|
|
|
assert.Equal(t, differ, true)
|
|
|
|
|
|
|
|
myErr := errors.New("sentinel")
|
|
|
|
wrap := func(b []byte) io.Reader {
|
|
|
|
r := bytes.NewBuffer(b)
|
|
|
|
e := errorReader{myErr}
|
|
|
|
return io.MultiReader(r, e)
|
|
|
|
}
|
|
|
|
|
|
|
|
differ, err = fs.CheckEqualReaders(wrap(b65a), bytes.NewBuffer(b65a))
|
|
|
|
assert.Equal(t, myErr, err)
|
|
|
|
assert.Equal(t, differ, true)
|
|
|
|
|
|
|
|
differ, err = fs.CheckEqualReaders(wrap(b65a), bytes.NewBuffer(b65b))
|
|
|
|
assert.Equal(t, myErr, err)
|
|
|
|
assert.Equal(t, differ, true)
|
|
|
|
|
|
|
|
differ, err = fs.CheckEqualReaders(wrap(b65a), bytes.NewBuffer(b66))
|
|
|
|
assert.Equal(t, myErr, err)
|
|
|
|
assert.Equal(t, differ, true)
|
|
|
|
|
|
|
|
differ, err = fs.CheckEqualReaders(wrap(b66), bytes.NewBuffer(b65a))
|
|
|
|
assert.Equal(t, myErr, err)
|
|
|
|
assert.Equal(t, differ, true)
|
|
|
|
|
|
|
|
differ, err = fs.CheckEqualReaders(bytes.NewBuffer(b65a), wrap(b65a))
|
|
|
|
assert.Equal(t, myErr, err)
|
|
|
|
assert.Equal(t, differ, true)
|
|
|
|
|
|
|
|
differ, err = fs.CheckEqualReaders(bytes.NewBuffer(b65a), wrap(b65b))
|
|
|
|
assert.Equal(t, myErr, err)
|
|
|
|
assert.Equal(t, differ, true)
|
|
|
|
|
|
|
|
differ, err = fs.CheckEqualReaders(bytes.NewBuffer(b65a), wrap(b66))
|
|
|
|
assert.Equal(t, myErr, err)
|
|
|
|
assert.Equal(t, differ, true)
|
|
|
|
|
|
|
|
differ, err = fs.CheckEqualReaders(bytes.NewBuffer(b66), wrap(b65a))
|
|
|
|
assert.Equal(t, myErr, err)
|
|
|
|
assert.Equal(t, differ, true)
|
|
|
|
}
|