2018-01-12 17:30:54 +01:00
|
|
|
// Package march traverses two directories in lock step
|
|
|
|
package march
|
2017-09-01 16:21:46 +02:00
|
|
|
|
|
|
|
import (
|
2018-04-06 20:13:27 +02:00
|
|
|
"context"
|
2021-11-04 11:12:57 +01:00
|
|
|
"fmt"
|
2017-09-08 17:19:41 +02:00
|
|
|
"path"
|
|
|
|
"sort"
|
|
|
|
"strings"
|
2017-09-01 16:21:46 +02:00
|
|
|
"sync"
|
|
|
|
|
2019-07-28 19:47:38 +02:00
|
|
|
"github.com/rclone/rclone/fs"
|
|
|
|
"github.com/rclone/rclone/fs/dirtree"
|
|
|
|
"github.com/rclone/rclone/fs/filter"
|
|
|
|
"github.com/rclone/rclone/fs/list"
|
|
|
|
"github.com/rclone/rclone/fs/walk"
|
2017-09-08 17:19:41 +02:00
|
|
|
"golang.org/x/text/unicode/norm"
|
2017-09-01 16:21:46 +02:00
|
|
|
)
|
|
|
|
|
2018-01-12 17:30:54 +01:00
|
|
|
// March holds the data used to traverse two Fs simultaneously,
|
2018-11-25 18:26:58 +01:00
|
|
|
// calling Callback for each match
|
2018-01-12 17:30:54 +01:00
|
|
|
type March struct {
|
2017-09-01 16:21:46 +02:00
|
|
|
// parameters
|
2020-05-15 01:27:59 +02:00
|
|
|
Ctx context.Context // context for background goroutines
|
|
|
|
Fdst fs.Fs // source Fs
|
|
|
|
Fsrc fs.Fs // dest Fs
|
|
|
|
Dir string // directory
|
|
|
|
NoTraverse bool // don't traverse the destination
|
|
|
|
SrcIncludeAll bool // don't include all files in the src
|
|
|
|
DstIncludeAll bool // don't include all files in the destination
|
|
|
|
Callback Marcher // object to call with results
|
|
|
|
NoCheckDest bool // transfer all objects regardless without checking dst
|
|
|
|
NoUnicodeNormalization bool // don't normalize unicode characters in filenames
|
2017-09-01 16:21:46 +02:00
|
|
|
// internal state
|
|
|
|
srcListDir listDirFn // function to call to list a directory in the src
|
|
|
|
dstListDir listDirFn // function to call to list a directory in the dst
|
2017-09-08 17:19:41 +02:00
|
|
|
transforms []matchTransformFn
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
|
|
|
|
2018-01-12 17:30:54 +01:00
|
|
|
// Marcher is called on each match
|
|
|
|
type Marcher interface {
|
2017-09-01 16:21:46 +02:00
|
|
|
// SrcOnly is called for a DirEntry found only in the source
|
2018-01-12 17:30:54 +01:00
|
|
|
SrcOnly(src fs.DirEntry) (recurse bool)
|
2017-09-01 16:21:46 +02:00
|
|
|
// DstOnly is called for a DirEntry found only in the destination
|
2018-01-12 17:30:54 +01:00
|
|
|
DstOnly(dst fs.DirEntry) (recurse bool)
|
2017-09-01 16:21:46 +02:00
|
|
|
// Match is called for a DirEntry found both in the source and destination
|
2019-06-17 10:34:30 +02:00
|
|
|
Match(ctx context.Context, dst, src fs.DirEntry) (recurse bool)
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
|
|
|
|
2018-11-25 18:26:58 +01:00
|
|
|
// init sets up a march over opt.Fsrc, and opt.Fdst calling back callback for each match
|
2020-12-02 17:20:58 +01:00
|
|
|
// Note: this will flag filter-aware backends on the source side
|
2020-11-05 12:33:32 +01:00
|
|
|
func (m *March) init(ctx context.Context) {
|
|
|
|
ci := fs.GetConfig(ctx)
|
|
|
|
m.srcListDir = m.makeListDir(ctx, m.Fsrc, m.SrcIncludeAll)
|
2018-11-25 17:49:38 +01:00
|
|
|
if !m.NoTraverse {
|
2020-11-05 12:33:32 +01:00
|
|
|
m.dstListDir = m.makeListDir(ctx, m.Fdst, m.DstIncludeAll)
|
2018-11-25 17:49:38 +01:00
|
|
|
}
|
2017-09-08 17:19:41 +02:00
|
|
|
// Now create the matching transform
|
|
|
|
// ..normalise the UTF8 first
|
2020-05-15 01:27:59 +02:00
|
|
|
if !m.NoUnicodeNormalization {
|
|
|
|
m.transforms = append(m.transforms, norm.NFC.String)
|
|
|
|
}
|
2017-09-08 17:19:41 +02:00
|
|
|
// ..if destination is caseInsensitive then make it lower case
|
|
|
|
// case Insensitive | src | dst | lower case compare |
|
|
|
|
// | No | No | No |
|
|
|
|
// | Yes | No | No |
|
|
|
|
// | No | Yes | Yes |
|
|
|
|
// | Yes | Yes | Yes |
|
2020-11-05 12:33:32 +01:00
|
|
|
if m.Fdst.Features().CaseInsensitive || ci.IgnoreCaseSync {
|
2017-09-08 17:19:41 +02:00
|
|
|
m.transforms = append(m.transforms, strings.ToLower)
|
|
|
|
}
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// list a directory into entries, err
|
2018-01-12 17:30:54 +01:00
|
|
|
type listDirFn func(dir string) (entries fs.DirEntries, err error)
|
2017-09-01 16:21:46 +02:00
|
|
|
|
2019-10-14 17:06:13 +02:00
|
|
|
// makeListDir makes constructs a listing function for the given fs
|
|
|
|
// and includeAll flags for marching through the file system.
|
2020-12-02 17:20:58 +01:00
|
|
|
// Note: this will optionally flag filter-aware backends!
|
2020-11-05 12:33:32 +01:00
|
|
|
func (m *March) makeListDir(ctx context.Context, f fs.Fs, includeAll bool) listDirFn {
|
|
|
|
ci := fs.GetConfig(ctx)
|
2020-11-26 18:10:41 +01:00
|
|
|
fi := filter.GetConfig(ctx)
|
2020-11-05 12:33:32 +01:00
|
|
|
if !(ci.UseListR && f.Features().ListR != nil) && // !--fast-list active and
|
2020-11-26 18:10:41 +01:00
|
|
|
!(ci.NoTraverse && fi.HaveFilesFrom()) { // !(--files-from and --no-traverse)
|
2018-01-12 17:30:54 +01:00
|
|
|
return func(dir string) (entries fs.DirEntries, err error) {
|
2020-12-02 17:20:58 +01:00
|
|
|
dirCtx := filter.SetUseFilter(m.Ctx, !includeAll) // make filter-aware backends constrain List
|
|
|
|
return list.DirSorted(dirCtx, f, includeAll, dir)
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
|
|
|
}
|
2019-10-14 17:06:13 +02:00
|
|
|
|
|
|
|
// This returns a closure for use when --fast-list is active or for when
|
|
|
|
// --files-from and --no-traverse is set
|
2017-09-01 16:21:46 +02:00
|
|
|
var (
|
|
|
|
mu sync.Mutex
|
|
|
|
started bool
|
2019-06-27 16:54:43 +02:00
|
|
|
dirs dirtree.DirTree
|
2017-09-01 16:21:46 +02:00
|
|
|
dirsErr error
|
|
|
|
)
|
2018-01-12 17:30:54 +01:00
|
|
|
return func(dir string) (entries fs.DirEntries, err error) {
|
2017-09-01 16:21:46 +02:00
|
|
|
mu.Lock()
|
|
|
|
defer mu.Unlock()
|
|
|
|
if !started {
|
2020-12-02 17:20:58 +01:00
|
|
|
dirCtx := filter.SetUseFilter(m.Ctx, !includeAll) // make filter-aware backends constrain List
|
|
|
|
dirs, dirsErr = walk.NewDirTree(dirCtx, f, m.Dir, includeAll, ci.MaxDepth)
|
2017-09-01 16:21:46 +02:00
|
|
|
started = true
|
|
|
|
}
|
|
|
|
if dirsErr != nil {
|
|
|
|
return nil, dirsErr
|
|
|
|
}
|
|
|
|
entries, ok := dirs[dir]
|
|
|
|
if !ok {
|
2018-01-12 17:30:54 +01:00
|
|
|
err = fs.ErrorDirNotFound
|
2017-09-01 16:21:46 +02:00
|
|
|
} else {
|
|
|
|
delete(dirs, dir)
|
|
|
|
}
|
|
|
|
return entries, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// listDirJob describe a directory listing that needs to be done
|
|
|
|
type listDirJob struct {
|
|
|
|
srcRemote string
|
|
|
|
dstRemote string
|
|
|
|
srcDepth int
|
|
|
|
dstDepth int
|
|
|
|
noSrc bool
|
|
|
|
noDst bool
|
|
|
|
}
|
|
|
|
|
2018-01-12 17:30:54 +01:00
|
|
|
// Run starts the matching process off
|
2020-11-05 12:33:32 +01:00
|
|
|
func (m *March) Run(ctx context.Context) error {
|
|
|
|
ci := fs.GetConfig(ctx)
|
2020-11-26 18:10:41 +01:00
|
|
|
fi := filter.GetConfig(ctx)
|
2020-11-05 12:33:32 +01:00
|
|
|
m.init(ctx)
|
2018-11-25 18:26:58 +01:00
|
|
|
|
2020-11-05 12:33:32 +01:00
|
|
|
srcDepth := ci.MaxDepth
|
2017-09-01 16:21:46 +02:00
|
|
|
if srcDepth < 0 {
|
2018-01-12 17:30:54 +01:00
|
|
|
srcDepth = fs.MaxLevel
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
|
|
|
dstDepth := srcDepth
|
2020-11-26 18:10:41 +01:00
|
|
|
if fi.Opt.DeleteExcluded {
|
2018-01-12 17:30:54 +01:00
|
|
|
dstDepth = fs.MaxLevel
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
|
|
|
|
2019-06-20 13:50:25 +02:00
|
|
|
var mu sync.Mutex // Protects vars below
|
|
|
|
var jobError error
|
|
|
|
var errCount int
|
|
|
|
|
2017-09-01 16:21:46 +02:00
|
|
|
// Start some directory listing go routines
|
|
|
|
var wg sync.WaitGroup // sync closing of go routines
|
|
|
|
var traversing sync.WaitGroup // running directory traversals
|
2020-11-05 12:33:32 +01:00
|
|
|
checkers := ci.Checkers
|
|
|
|
in := make(chan listDirJob, checkers)
|
|
|
|
for i := 0; i < checkers; i++ {
|
2017-09-01 16:21:46 +02:00
|
|
|
wg.Add(1)
|
|
|
|
go func() {
|
|
|
|
defer wg.Done()
|
|
|
|
for {
|
|
|
|
select {
|
2018-11-25 18:26:58 +01:00
|
|
|
case <-m.Ctx.Done():
|
2017-09-01 16:21:46 +02:00
|
|
|
return
|
|
|
|
case job, ok := <-in:
|
|
|
|
if !ok {
|
|
|
|
return
|
|
|
|
}
|
2019-06-20 13:50:25 +02:00
|
|
|
jobs, err := m.processJob(job)
|
|
|
|
if err != nil {
|
|
|
|
mu.Lock()
|
|
|
|
// Keep reference only to the first encountered error
|
|
|
|
if jobError == nil {
|
|
|
|
jobError = err
|
|
|
|
}
|
|
|
|
errCount++
|
|
|
|
mu.Unlock()
|
|
|
|
}
|
2017-09-01 16:21:46 +02:00
|
|
|
if len(jobs) > 0 {
|
|
|
|
traversing.Add(len(jobs))
|
|
|
|
go func() {
|
|
|
|
// Now we have traversed this directory, send these
|
|
|
|
// jobs off for traversal in the background
|
|
|
|
for _, newJob := range jobs {
|
2018-04-21 23:01:27 +02:00
|
|
|
select {
|
2018-11-25 18:26:58 +01:00
|
|
|
case <-m.Ctx.Done():
|
2018-04-21 23:01:27 +02:00
|
|
|
// discard job if finishing
|
|
|
|
traversing.Done()
|
|
|
|
case in <- newJob:
|
|
|
|
}
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
traversing.Done()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
|
|
|
|
// Start the process
|
|
|
|
traversing.Add(1)
|
|
|
|
in <- listDirJob{
|
2018-11-25 18:26:58 +01:00
|
|
|
srcRemote: m.Dir,
|
2017-09-01 16:21:46 +02:00
|
|
|
srcDepth: srcDepth - 1,
|
2018-11-25 18:26:58 +01:00
|
|
|
dstRemote: m.Dir,
|
2017-09-01 16:21:46 +02:00
|
|
|
dstDepth: dstDepth - 1,
|
2019-10-17 18:41:11 +02:00
|
|
|
noDst: m.NoCheckDest,
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
2018-04-21 23:01:27 +02:00
|
|
|
go func() {
|
|
|
|
// when the context is cancelled discard the remaining jobs
|
2018-11-25 18:26:58 +01:00
|
|
|
<-m.Ctx.Done()
|
2018-04-21 23:01:27 +02:00
|
|
|
for range in {
|
|
|
|
traversing.Done()
|
|
|
|
}
|
|
|
|
}()
|
2017-09-01 16:21:46 +02:00
|
|
|
traversing.Wait()
|
|
|
|
close(in)
|
|
|
|
wg.Wait()
|
2019-06-20 13:50:25 +02:00
|
|
|
|
|
|
|
if errCount > 1 {
|
2021-11-04 11:12:57 +01:00
|
|
|
return fmt.Errorf("march failed with %d error(s): first error: %w", errCount, jobError)
|
2019-06-20 13:50:25 +02:00
|
|
|
}
|
|
|
|
return jobError
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// Check to see if the context has been cancelled
|
2018-01-12 17:30:54 +01:00
|
|
|
func (m *March) aborting() bool {
|
2017-09-01 16:21:46 +02:00
|
|
|
select {
|
2018-11-25 18:26:58 +01:00
|
|
|
case <-m.Ctx.Done():
|
2017-09-01 16:21:46 +02:00
|
|
|
return true
|
|
|
|
default:
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2017-09-08 17:19:41 +02:00
|
|
|
// matchEntry is an entry plus transformed name
|
|
|
|
type matchEntry struct {
|
2018-01-12 17:30:54 +01:00
|
|
|
entry fs.DirEntry
|
2017-09-08 17:19:41 +02:00
|
|
|
leaf string
|
|
|
|
name string
|
|
|
|
}
|
|
|
|
|
|
|
|
// matchEntries contains many matchEntry~s
|
|
|
|
type matchEntries []matchEntry
|
|
|
|
|
|
|
|
// Len is part of sort.Interface.
|
|
|
|
func (es matchEntries) Len() int { return len(es) }
|
|
|
|
|
|
|
|
// Swap is part of sort.Interface.
|
|
|
|
func (es matchEntries) Swap(i, j int) { es[i], es[j] = es[j], es[i] }
|
|
|
|
|
|
|
|
// Less is part of sort.Interface.
|
|
|
|
//
|
|
|
|
// Compare in order (name, leaf, remote)
|
|
|
|
func (es matchEntries) Less(i, j int) bool {
|
|
|
|
ei, ej := &es[i], &es[j]
|
|
|
|
if ei.name == ej.name {
|
|
|
|
if ei.leaf == ej.leaf {
|
2019-06-09 16:57:05 +02:00
|
|
|
return fs.CompareDirEntries(ei.entry, ej.entry) < 0
|
2017-09-08 17:19:41 +02:00
|
|
|
}
|
|
|
|
return ei.leaf < ej.leaf
|
|
|
|
}
|
|
|
|
return ei.name < ej.name
|
|
|
|
}
|
|
|
|
|
|
|
|
// Sort the directory entries by (name, leaf, remote)
|
|
|
|
//
|
|
|
|
// We use a stable sort here just in case there are
|
|
|
|
// duplicates. Assuming the remote delivers the entries in a
|
|
|
|
// consistent order, this will give the best user experience
|
|
|
|
// in syncing as it will use the first entry for the sync
|
|
|
|
// comparison.
|
|
|
|
func (es matchEntries) sort() {
|
|
|
|
sort.Stable(es)
|
|
|
|
}
|
|
|
|
|
|
|
|
// make a matchEntries from a newMatch entries
|
2018-01-12 17:30:54 +01:00
|
|
|
func newMatchEntries(entries fs.DirEntries, transforms []matchTransformFn) matchEntries {
|
2017-09-08 17:19:41 +02:00
|
|
|
es := make(matchEntries, len(entries))
|
|
|
|
for i := range es {
|
|
|
|
es[i].entry = entries[i]
|
|
|
|
name := path.Base(entries[i].Remote())
|
|
|
|
es[i].leaf = name
|
|
|
|
for _, transform := range transforms {
|
|
|
|
name = transform(name)
|
|
|
|
}
|
|
|
|
es[i].name = name
|
|
|
|
}
|
|
|
|
es.sort()
|
|
|
|
return es
|
|
|
|
}
|
|
|
|
|
|
|
|
// matchPair is a matched pair of direntries returned by matchListings
|
2017-09-01 16:21:46 +02:00
|
|
|
type matchPair struct {
|
2018-01-12 17:30:54 +01:00
|
|
|
src, dst fs.DirEntry
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
|
|
|
|
2017-09-08 17:19:41 +02:00
|
|
|
// matchTransformFn converts a name into a form which is used for
|
|
|
|
// comparison in matchListings.
|
|
|
|
type matchTransformFn func(name string) string
|
|
|
|
|
|
|
|
// Process the two listings, matching up the items in the two slices
|
|
|
|
// using the transform function on each name first.
|
2017-09-01 16:21:46 +02:00
|
|
|
//
|
|
|
|
// Into srcOnly go Entries which only exist in the srcList
|
|
|
|
// Into dstOnly go Entries which only exist in the dstList
|
|
|
|
// Into matches go matchPair's of src and dst which have the same name
|
|
|
|
//
|
|
|
|
// This checks for duplicates and checks the list is sorted.
|
2018-01-12 17:30:54 +01:00
|
|
|
func matchListings(srcListEntries, dstListEntries fs.DirEntries, transforms []matchTransformFn) (srcOnly fs.DirEntries, dstOnly fs.DirEntries, matches []matchPair) {
|
2017-09-08 17:19:41 +02:00
|
|
|
srcList := newMatchEntries(srcListEntries, transforms)
|
|
|
|
dstList := newMatchEntries(dstListEntries, transforms)
|
2019-06-09 16:57:05 +02:00
|
|
|
|
2017-09-01 16:21:46 +02:00
|
|
|
for iSrc, iDst := 0, 0; ; iSrc, iDst = iSrc+1, iDst+1 {
|
2018-01-12 17:30:54 +01:00
|
|
|
var src, dst fs.DirEntry
|
2017-09-08 17:19:41 +02:00
|
|
|
var srcName, dstName string
|
2017-09-01 16:21:46 +02:00
|
|
|
if iSrc < len(srcList) {
|
2017-09-08 17:19:41 +02:00
|
|
|
src = srcList[iSrc].entry
|
|
|
|
srcName = srcList[iSrc].name
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
|
|
|
if iDst < len(dstList) {
|
2017-09-08 17:19:41 +02:00
|
|
|
dst = dstList[iDst].entry
|
|
|
|
dstName = dstList[iDst].name
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
|
|
|
if src == nil && dst == nil {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
if src != nil && iSrc > 0 {
|
2019-06-09 16:57:05 +02:00
|
|
|
prev := srcList[iSrc-1].entry
|
|
|
|
prevName := srcList[iSrc-1].name
|
|
|
|
if srcName == prevName && fs.DirEntryType(prev) == fs.DirEntryType(src) {
|
2018-01-12 17:30:54 +01:00
|
|
|
fs.Logf(src, "Duplicate %s found in source - ignoring", fs.DirEntryType(src))
|
2017-10-02 17:52:53 +02:00
|
|
|
iDst-- // ignore the src and retry the dst
|
|
|
|
continue
|
2019-06-09 16:57:05 +02:00
|
|
|
} else if srcName < prevName {
|
2017-10-02 17:52:53 +02:00
|
|
|
// this should never happen since we sort the listings
|
|
|
|
panic("Out of order listing in source")
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
if dst != nil && iDst > 0 {
|
2019-06-09 16:57:05 +02:00
|
|
|
prev := dstList[iDst-1].entry
|
|
|
|
prevName := dstList[iDst-1].name
|
|
|
|
if dstName == prevName && fs.DirEntryType(dst) == fs.DirEntryType(prev) {
|
2018-01-12 17:30:54 +01:00
|
|
|
fs.Logf(dst, "Duplicate %s found in destination - ignoring", fs.DirEntryType(dst))
|
2017-10-02 17:52:53 +02:00
|
|
|
iSrc-- // ignore the dst and retry the src
|
|
|
|
continue
|
2019-06-09 16:57:05 +02:00
|
|
|
} else if dstName < prevName {
|
2017-10-02 17:52:53 +02:00
|
|
|
// this should never happen since we sort the listings
|
|
|
|
panic("Out of order listing in destination")
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
if src != nil && dst != nil {
|
2019-06-09 16:57:05 +02:00
|
|
|
// we can't use CompareDirEntries because srcName, dstName could
|
|
|
|
// be different then src.Remote() or dst.Remote()
|
|
|
|
srcType := fs.DirEntryType(src)
|
|
|
|
dstType := fs.DirEntryType(dst)
|
|
|
|
if srcName > dstName || (srcName == dstName && srcType > dstType) {
|
2017-09-01 16:21:46 +02:00
|
|
|
src = nil
|
2019-06-09 16:57:05 +02:00
|
|
|
iSrc--
|
|
|
|
} else if srcName < dstName || (srcName == dstName && srcType < dstType) {
|
|
|
|
dst = nil
|
|
|
|
iDst--
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
// Debugf(nil, "src = %v, dst = %v", src, dst)
|
|
|
|
switch {
|
|
|
|
case src == nil && dst == nil:
|
|
|
|
// do nothing
|
|
|
|
case src == nil:
|
|
|
|
dstOnly = append(dstOnly, dst)
|
|
|
|
case dst == nil:
|
|
|
|
srcOnly = append(srcOnly, src)
|
|
|
|
default:
|
|
|
|
matches = append(matches, matchPair{src: src, dst: dst})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// processJob processes a listDirJob listing the source and
|
|
|
|
// destination directories, comparing them and returning a slice of
|
|
|
|
// more jobs
|
|
|
|
//
|
|
|
|
// returns errors using processError
|
2019-06-20 13:50:25 +02:00
|
|
|
func (m *March) processJob(job listDirJob) ([]listDirJob, error) {
|
2017-09-01 16:21:46 +02:00
|
|
|
var (
|
2019-06-20 13:50:25 +02:00
|
|
|
jobs []listDirJob
|
2018-01-12 17:30:54 +01:00
|
|
|
srcList, dstList fs.DirEntries
|
2017-09-01 16:21:46 +02:00
|
|
|
srcListErr, dstListErr error
|
|
|
|
wg sync.WaitGroup
|
2020-12-02 12:02:49 +01:00
|
|
|
mu sync.Mutex
|
2017-09-01 16:21:46 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
// List the src and dst directories
|
|
|
|
if !job.noSrc {
|
|
|
|
wg.Add(1)
|
|
|
|
go func() {
|
|
|
|
defer wg.Done()
|
|
|
|
srcList, srcListErr = m.srcListDir(job.srcRemote)
|
|
|
|
}()
|
|
|
|
}
|
2018-11-25 17:49:38 +01:00
|
|
|
if !m.NoTraverse && !job.noDst {
|
2017-09-01 16:21:46 +02:00
|
|
|
wg.Add(1)
|
|
|
|
go func() {
|
|
|
|
defer wg.Done()
|
|
|
|
dstList, dstListErr = m.dstListDir(job.dstRemote)
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
|
|
|
|
// Wait for listings to complete and report errors
|
|
|
|
wg.Wait()
|
|
|
|
if srcListErr != nil {
|
2021-04-10 10:29:21 +02:00
|
|
|
if job.srcRemote != "" {
|
|
|
|
fs.Errorf(job.srcRemote, "error reading source directory: %v", srcListErr)
|
|
|
|
} else {
|
|
|
|
fs.Errorf(m.Fsrc, "error reading source root directory: %v", srcListErr)
|
|
|
|
}
|
2019-11-18 15:13:02 +01:00
|
|
|
srcListErr = fs.CountError(srcListErr)
|
2019-06-20 13:50:25 +02:00
|
|
|
return nil, srcListErr
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
2018-01-12 17:30:54 +01:00
|
|
|
if dstListErr == fs.ErrorDirNotFound {
|
2017-09-01 16:21:46 +02:00
|
|
|
// Copy the stuff anyway
|
|
|
|
} else if dstListErr != nil {
|
2021-04-10 10:29:21 +02:00
|
|
|
if job.dstRemote != "" {
|
|
|
|
fs.Errorf(job.dstRemote, "error reading destination directory: %v", dstListErr)
|
|
|
|
} else {
|
|
|
|
fs.Errorf(m.Fdst, "error reading destination root directory: %v", dstListErr)
|
|
|
|
}
|
2019-11-18 15:13:02 +01:00
|
|
|
dstListErr = fs.CountError(dstListErr)
|
2019-06-20 13:50:25 +02:00
|
|
|
return nil, dstListErr
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
|
|
|
|
2018-11-25 17:49:38 +01:00
|
|
|
// If NoTraverse is set, then try to find a matching object
|
2020-12-02 12:02:49 +01:00
|
|
|
// for each item in the srcList to head dst object
|
|
|
|
ci := fs.GetConfig(m.Ctx)
|
|
|
|
limiter := make(chan struct{}, ci.Checkers)
|
2019-10-17 18:41:11 +02:00
|
|
|
if m.NoTraverse && !m.NoCheckDest {
|
2018-11-25 17:49:38 +01:00
|
|
|
for _, src := range srcList {
|
2020-12-02 12:02:49 +01:00
|
|
|
wg.Add(1)
|
|
|
|
limiter <- struct{}{}
|
|
|
|
go func(limiter chan struct{}, src fs.DirEntry) {
|
|
|
|
defer wg.Done()
|
|
|
|
if srcObj, ok := src.(fs.Object); ok {
|
|
|
|
leaf := path.Base(srcObj.Remote())
|
|
|
|
dstObj, err := m.Fdst.NewObject(m.Ctx, path.Join(job.dstRemote, leaf))
|
|
|
|
if err == nil {
|
|
|
|
mu.Lock()
|
|
|
|
dstList = append(dstList, dstObj)
|
|
|
|
mu.Unlock()
|
|
|
|
}
|
2018-11-25 17:49:38 +01:00
|
|
|
}
|
2020-12-02 12:02:49 +01:00
|
|
|
<-limiter
|
|
|
|
}(limiter, src)
|
2018-11-25 17:49:38 +01:00
|
|
|
}
|
2020-12-02 12:02:49 +01:00
|
|
|
wg.Wait()
|
2018-11-25 17:49:38 +01:00
|
|
|
}
|
|
|
|
|
2017-09-01 16:21:46 +02:00
|
|
|
// Work out what to do and do it
|
2017-09-08 17:19:41 +02:00
|
|
|
srcOnly, dstOnly, matches := matchListings(srcList, dstList, m.transforms)
|
2017-09-01 16:21:46 +02:00
|
|
|
for _, src := range srcOnly {
|
|
|
|
if m.aborting() {
|
2019-06-20 13:50:25 +02:00
|
|
|
return nil, m.Ctx.Err()
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
2018-11-25 18:26:58 +01:00
|
|
|
recurse := m.Callback.SrcOnly(src)
|
2017-09-01 16:21:46 +02:00
|
|
|
if recurse && job.srcDepth > 0 {
|
|
|
|
jobs = append(jobs, listDirJob{
|
|
|
|
srcRemote: src.Remote(),
|
2019-08-05 13:52:00 +02:00
|
|
|
dstRemote: src.Remote(),
|
2017-09-01 16:21:46 +02:00
|
|
|
srcDepth: job.srcDepth - 1,
|
|
|
|
noDst: true,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
for _, dst := range dstOnly {
|
|
|
|
if m.aborting() {
|
2019-06-20 13:50:25 +02:00
|
|
|
return nil, m.Ctx.Err()
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
2018-11-25 18:26:58 +01:00
|
|
|
recurse := m.Callback.DstOnly(dst)
|
2017-09-01 16:21:46 +02:00
|
|
|
if recurse && job.dstDepth > 0 {
|
|
|
|
jobs = append(jobs, listDirJob{
|
2019-08-05 13:52:00 +02:00
|
|
|
srcRemote: dst.Remote(),
|
2017-09-01 16:21:46 +02:00
|
|
|
dstRemote: dst.Remote(),
|
|
|
|
dstDepth: job.dstDepth - 1,
|
|
|
|
noSrc: true,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for _, match := range matches {
|
|
|
|
if m.aborting() {
|
2019-06-20 13:50:25 +02:00
|
|
|
return nil, m.Ctx.Err()
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|
2019-06-17 10:34:30 +02:00
|
|
|
recurse := m.Callback.Match(m.Ctx, match.dst, match.src)
|
2017-09-01 16:21:46 +02:00
|
|
|
if recurse && job.srcDepth > 0 && job.dstDepth > 0 {
|
|
|
|
jobs = append(jobs, listDirJob{
|
|
|
|
srcRemote: match.src.Remote(),
|
|
|
|
dstRemote: match.dst.Remote(),
|
|
|
|
srcDepth: job.srcDepth - 1,
|
|
|
|
dstDepth: job.dstDepth - 1,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2019-06-20 13:50:25 +02:00
|
|
|
return jobs, nil
|
2017-09-01 16:21:46 +02:00
|
|
|
}
|