mirror of
https://github.com/rclone/rclone.git
synced 2024-11-07 09:04:52 +01:00
sync: report list of synced paths to file -- see #7282
Allows rclone sync to accept the same output file flags as rclone check, for the purpose of writing results to a file. A new --dest-after option is also supported, which writes a list file using the same ListFormat flags as lsf (including customizable options for hash, modtime, etc.) Conceptually it is similar to rsync's --itemize-changes, but not identical -- it should output an accurate list of what will be on the destination after the sync. Note that it has a few limitations, and certain scenarios are not currently supported: --max-duration / CutoffModeHard --compare-dest / --copy-dest (because equal() is called multiple times for the same file) server-side moves of an entire dir at once (because we never get the individual file objects in the dir) High-level retries, because there would be dupes Possibly some error scenarios that didn't come up on the tests Note also that each file is logged during the sync, as opposed to after, so it is most useful as a predictor of what SHOULD happen to each file (which may or may not match what actually DID.) Only rclone sync is currently supported -- support for copy and move may be added in the future.
This commit is contained in:
parent
c0968a0987
commit
3a50f35df9
171
cmd/sync/sync.go
171
cmd/sync/sync.go
@ -3,22 +3,137 @@ package sync
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"os"
|
||||
|
||||
mutex "sync" // renamed as "sync" already in use
|
||||
|
||||
"github.com/rclone/rclone/cmd"
|
||||
"github.com/rclone/rclone/fs"
|
||||
"github.com/rclone/rclone/fs/config/flags"
|
||||
"github.com/rclone/rclone/fs/operations"
|
||||
"github.com/rclone/rclone/fs/operations/operationsflags"
|
||||
"github.com/rclone/rclone/fs/sync"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
createEmptySrcDirs = false
|
||||
opt = operations.LoggerOpt{}
|
||||
loggerFlagsOpt = operationsflags.AddLoggerFlagsOptions{}
|
||||
)
|
||||
|
||||
func init() {
|
||||
cmd.Root.AddCommand(commandDefinition)
|
||||
cmdFlags := commandDefinition.Flags()
|
||||
flags.BoolVarP(cmdFlags, &createEmptySrcDirs, "create-empty-src-dirs", "", createEmptySrcDirs, "Create empty source dirs on destination after sync", "")
|
||||
operationsflags.AddLoggerFlags(cmdFlags, &opt, &loggerFlagsOpt)
|
||||
// TODO: add same flags to move and copy
|
||||
}
|
||||
|
||||
var lock mutex.Mutex
|
||||
|
||||
func syncLoggerFn(ctx context.Context, sigil operations.Sigil, src, dst fs.DirEntry, err error) {
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
|
||||
if err == fs.ErrorIsDir && !opt.FilesOnly && opt.DestAfter != nil {
|
||||
opt.PrintDestAfter(ctx, sigil, src, dst, err)
|
||||
return
|
||||
}
|
||||
|
||||
_, srcOk := src.(fs.Object)
|
||||
_, dstOk := dst.(fs.Object)
|
||||
var filename string
|
||||
if !srcOk && !dstOk {
|
||||
return
|
||||
} else if srcOk && !dstOk {
|
||||
filename = src.String()
|
||||
} else {
|
||||
filename = dst.String()
|
||||
}
|
||||
|
||||
if sigil.Writer(opt) != nil {
|
||||
operations.SyncFprintf(sigil.Writer(opt), "%s\n", filename)
|
||||
}
|
||||
if opt.Combined != nil {
|
||||
operations.SyncFprintf(opt.Combined, "%c %s\n", sigil, filename)
|
||||
fs.Debugf(nil, "Sync Logger: %s: %c %s\n", sigil.String(), sigil, filename)
|
||||
}
|
||||
if opt.DestAfter != nil {
|
||||
opt.PrintDestAfter(ctx, sigil, src, dst, err)
|
||||
}
|
||||
}
|
||||
|
||||
// GetSyncLoggerOpt gets the options corresponding to the logger flags
|
||||
func GetSyncLoggerOpt(ctx context.Context, fdst fs.Fs, command *cobra.Command) (operations.LoggerOpt, func(), error) {
|
||||
closers := []io.Closer{}
|
||||
|
||||
opt.LoggerFn = syncLoggerFn
|
||||
if opt.TimeFormat == "max" {
|
||||
opt.TimeFormat = operations.FormatForLSFPrecision(fdst.Precision())
|
||||
}
|
||||
opt.SetListFormat(ctx, command.Flags())
|
||||
opt.NewListJSON(ctx, fdst, "")
|
||||
|
||||
open := func(name string, pout *io.Writer) error {
|
||||
if name == "" {
|
||||
return nil
|
||||
}
|
||||
if name == "-" {
|
||||
*pout = os.Stdout
|
||||
return nil
|
||||
}
|
||||
out, err := os.Create(name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*pout = out
|
||||
closers = append(closers, out)
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := open(loggerFlagsOpt.Combined, &opt.Combined); err != nil {
|
||||
return opt, nil, err
|
||||
}
|
||||
if err := open(loggerFlagsOpt.MissingOnSrc, &opt.MissingOnSrc); err != nil {
|
||||
return opt, nil, err
|
||||
}
|
||||
if err := open(loggerFlagsOpt.MissingOnDst, &opt.MissingOnDst); err != nil {
|
||||
return opt, nil, err
|
||||
}
|
||||
if err := open(loggerFlagsOpt.Match, &opt.Match); err != nil {
|
||||
return opt, nil, err
|
||||
}
|
||||
if err := open(loggerFlagsOpt.Differ, &opt.Differ); err != nil {
|
||||
return opt, nil, err
|
||||
}
|
||||
if err := open(loggerFlagsOpt.ErrFile, &opt.Error); err != nil {
|
||||
return opt, nil, err
|
||||
}
|
||||
if err := open(loggerFlagsOpt.DestAfter, &opt.DestAfter); err != nil {
|
||||
return opt, nil, err
|
||||
}
|
||||
|
||||
close := func() {
|
||||
for _, closer := range closers {
|
||||
err := closer.Close()
|
||||
if err != nil {
|
||||
fs.Errorf(nil, "Failed to close report output: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return opt, close, nil
|
||||
}
|
||||
|
||||
func anyNotBlank(s ...string) bool {
|
||||
for _, x := range s {
|
||||
if x != "" {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
var commandDefinition = &cobra.Command{
|
||||
@ -59,6 +174,44 @@ destination that is inside the source directory.
|
||||
|
||||
**Note**: Use the ` + "`rclone dedupe`" + ` command to deal with "Duplicate object/directory found in source/destination - ignoring" errors.
|
||||
See [this forum post](https://forum.rclone.org/t/sync-not-clearing-duplicates/14372) for more info.
|
||||
|
||||
## Logger Flags
|
||||
|
||||
The ` + "`--differ`" + `, ` + "`--missing-on-dst`" + `, ` + "`--missing-on-src`" + `, ` +
|
||||
"`--match`" + ` and ` + "`--error`" + ` flags write paths, one per line, to the file name (or
|
||||
stdout if it is ` + "`-`" + `) supplied. What they write is described in the
|
||||
help below. For example ` + "`--differ`" + ` will write all paths which are
|
||||
present on both the source and destination but different.
|
||||
|
||||
The ` + "`--combined`" + ` flag will write a file (or stdout) which contains all
|
||||
file paths with a symbol and then a space and then the path to tell
|
||||
you what happened to it. These are reminiscent of diff files.
|
||||
|
||||
- ` + "`= path`" + ` means path was found in source and destination and was identical
|
||||
- ` + "`- path`" + ` means path was missing on the source, so only in the destination
|
||||
- ` + "`+ path`" + ` means path was missing on the destination, so only in the source
|
||||
- ` + "`* path`" + ` means path was present in source and destination but different.
|
||||
- ` + "`! path`" + ` means there was an error reading or hashing the source or dest.
|
||||
|
||||
The ` + "`--dest-after`" + ` flag writes a list file using the same format flags
|
||||
as [` + "`lsf`" + `](/commands/rclone_lsf/#synopsis) (including [customizable options
|
||||
for hash, modtime, etc.](/commands/rclone_lsf/#synopsis))
|
||||
Conceptually it is similar to rsync's ` + "`--itemize-changes`" + `, but not identical
|
||||
-- it should output an accurate list of what will be on the destination
|
||||
after the sync.
|
||||
|
||||
Note that these logger flags have a few limitations, and certain scenarios
|
||||
are not currently supported:
|
||||
|
||||
- ` + "`--max-duration`" + ` / ` + "`CutoffModeHard`" + `
|
||||
- ` + "`--compare-dest`" + ` / ` + "`--copy-dest`" + `
|
||||
- server-side moves of an entire dir at once
|
||||
- High-level retries, because there would be duplicates (use ` + "`--retries 1`" + ` to disable)
|
||||
- Possibly some unusual error scenarios
|
||||
|
||||
Note also that each file is logged during the sync, as opposed to after, so it
|
||||
is most useful as a predictor of what SHOULD happen to each file
|
||||
(which may or may not match what actually DID.)
|
||||
`,
|
||||
Annotations: map[string]string{
|
||||
"groups": "Sync,Copy,Filter,Listing,Important",
|
||||
@ -67,10 +220,22 @@ See [this forum post](https://forum.rclone.org/t/sync-not-clearing-duplicates/14
|
||||
cmd.CheckArgs(2, 2, command, args)
|
||||
fsrc, srcFileName, fdst := cmd.NewFsSrcFileDst(args)
|
||||
cmd.Run(true, true, command, func() error {
|
||||
if srcFileName == "" {
|
||||
return sync.Sync(context.Background(), fdst, fsrc, createEmptySrcDirs)
|
||||
ctx := context.Background()
|
||||
opt, close, err := GetSyncLoggerOpt(ctx, fdst, command)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return operations.CopyFile(context.Background(), fdst, fsrc, srcFileName, srcFileName)
|
||||
defer close()
|
||||
|
||||
if anyNotBlank(loggerFlagsOpt.Combined, loggerFlagsOpt.MissingOnSrc, loggerFlagsOpt.MissingOnDst,
|
||||
loggerFlagsOpt.Match, loggerFlagsOpt.Differ, loggerFlagsOpt.ErrFile, loggerFlagsOpt.DestAfter) {
|
||||
ctx = operations.WithSyncLogger(ctx, opt)
|
||||
}
|
||||
|
||||
if srcFileName == "" {
|
||||
return sync.Sync(ctx, fdst, fsrc, createEmptySrcDirs)
|
||||
}
|
||||
return operations.CopyFile(ctx, fdst, fsrc, srcFileName, srcFileName)
|
||||
})
|
||||
},
|
||||
}
|
||||
|
@ -48,6 +48,43 @@ destination that is inside the source directory.
|
||||
**Note**: Use the `rclone dedupe` command to deal with "Duplicate object/directory found in source/destination - ignoring" errors.
|
||||
See [this forum post](https://forum.rclone.org/t/sync-not-clearing-duplicates/14372) for more info.
|
||||
|
||||
## Logger Flags
|
||||
|
||||
The `--differ`, `--missing-on-dst`, `--missing-on-src`, `--match`
|
||||
and `--error` flags write paths, one per line, to the file name (or
|
||||
stdout if it is `-`) supplied. What they write is described in the
|
||||
help below. For example `--differ` will write all paths which are
|
||||
present on both the source and destination but different.
|
||||
|
||||
The `--combined` flag will write a file (or stdout) which contains all
|
||||
file paths with a symbol and then a space and then the path to tell
|
||||
you what happened to it. These are reminiscent of diff files.
|
||||
|
||||
- `= path` means path was found in source and destination and was identical
|
||||
- `- path` means path was missing on the source, so only in the destination
|
||||
- `+ path` means path was missing on the destination, so only in the source
|
||||
- `* path` means path was present in source and destination but different.
|
||||
- `! path` means there was an error reading or hashing the source or dest.
|
||||
|
||||
The `--dest-after` flag writes a list file using the same format flags
|
||||
as [`lsf`](/commands/rclone_lsf/#synopsis) (including [customizable options
|
||||
for hash, modtime, etc.](/commands/rclone_lsf/#synopsis))
|
||||
Conceptually it is similar to rsync's `--itemize-changes`, but not identical
|
||||
-- it should output an accurate list of what will be on the destination
|
||||
after the sync.
|
||||
|
||||
Note that these logger flags have a few limitations, and certain scenarios
|
||||
are not currently supported:
|
||||
|
||||
- `--max-duration` / `CutoffModeHard`
|
||||
- `--compare-dest` / `--copy-dest`
|
||||
- server-side moves of an entire dir at once
|
||||
- High-level retries, because there would be duplicates (use `--retries 1` to disable)
|
||||
- Possibly some unusual error scenarios
|
||||
|
||||
Note also that each file is logged during the sync, as opposed to after, so it
|
||||
is most useful as a predictor of what SHOULD happen to each file
|
||||
(which may or may not match what actually DID.)
|
||||
|
||||
```
|
||||
rclone sync source:path dest:path [flags]
|
||||
|
17
fs/logger/logger.go
Normal file
17
fs/logger/logger.go
Normal file
@ -0,0 +1,17 @@
|
||||
// Package logger implements testing for the sync (and bisync) logger
|
||||
package logger
|
||||
|
||||
import (
|
||||
_ "github.com/rclone/rclone/backend/all" // import all backends
|
||||
"github.com/rclone/rclone/cmd"
|
||||
_ "github.com/rclone/rclone/cmd/all" // import all commands
|
||||
_ "github.com/rclone/rclone/lib/plugin" // import plugins
|
||||
)
|
||||
|
||||
// Main enables the testscript package. See:
|
||||
// https://bitfieldconsulting.com/golang/cli-testing
|
||||
// https://pkg.go.dev/github.com/rogpeppe/go-internal@v1.11.0/testscript
|
||||
func Main() int {
|
||||
cmd.Main()
|
||||
return 0
|
||||
}
|
33
fs/logger/logger_test.go
Normal file
33
fs/logger/logger_test.go
Normal file
@ -0,0 +1,33 @@
|
||||
package logger_test
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/rclone/rclone/fs/logger"
|
||||
"github.com/rogpeppe/go-internal/testscript"
|
||||
)
|
||||
|
||||
// TestMain drives the tests
|
||||
func TestMain(m *testing.M) {
|
||||
// This enables the testscript package. See:
|
||||
// https://bitfieldconsulting.com/golang/cli-testing
|
||||
// https://pkg.go.dev/github.com/rogpeppe/go-internal@v1.11.0/testscript
|
||||
os.Exit(testscript.RunMain(m, map[string]func() int{
|
||||
"rclone": logger.Main,
|
||||
}))
|
||||
}
|
||||
|
||||
func TestLogger(t *testing.T) {
|
||||
// Usage: https://bitfieldconsulting.com/golang/cli-testing
|
||||
|
||||
testscript.Run(t, testscript.Params{
|
||||
Dir: "testdata/script",
|
||||
Setup: func(env *testscript.Env) error {
|
||||
env.Setenv("SRC", filepath.Join("$WORK", "src"))
|
||||
env.Setenv("DST", filepath.Join("$WORK", "dst"))
|
||||
return nil
|
||||
},
|
||||
})
|
||||
}
|
48
fs/logger/testdata/script/TestBeforeVsAfter.txtar
vendored
Normal file
48
fs/logger/testdata/script/TestBeforeVsAfter.txtar
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
# tests whether an md5sum file generated post-sync matches our pre-sync prediction
|
||||
|
||||
# Filling src and dst with two different versions of rclone source!:
|
||||
exec rclone copyurl https://github.com/rclone/rclone/archive/refs/tags/v1.49.1.zip $SRC/src.zip
|
||||
exec rclone copyurl https://github.com/rclone/rclone/archive/refs/tags/v1.54.1.zip $DST/dst.zip
|
||||
exec unzip $SRC/src.zip -d $SRC
|
||||
exec unzip $DST/dst.zip -d $DST
|
||||
|
||||
|
||||
# generating sumfiles:
|
||||
exec rclone md5sum $SRC --output-file $WORK/src-before.txt
|
||||
exec rclone md5sum $DST --output-file $WORK/dst-before.txt
|
||||
|
||||
# running sync with output files:
|
||||
exec rclone sync $SRC $DST --match $WORK/SYNCmatch.txt --combined $WORK/SYNCcombined.txt --missing-on-src $WORK/SYNCmissingonsrc.txt --missing-on-dst $WORK/SYNCmissingondst.txt --error $WORK/SYNCerr.txt --differ $WORK/SYNCdiffer.txt --dest-after $WORK/SYNCdestafter.txt --format 'hp' --separator ' '
|
||||
|
||||
# generating sumfiles:
|
||||
exec rclone md5sum $SRC --output-file $WORK/src-after.txt
|
||||
exec rclone md5sum $DST --output-file $WORK/dst-after.txt
|
||||
|
||||
# sorting them by line and diffing:
|
||||
exec sort $WORK/src-before.txt -o $WORK/src-before.txt
|
||||
exec sort $WORK/dst-before.txt -o $WORK/dst-before.txt
|
||||
exec sort $WORK/src-after.txt -o $WORK/src-after.txt
|
||||
exec sort $WORK/dst-after.txt -o $WORK/dst-after.txt
|
||||
|
||||
exec sort $WORK/SYNCmatch.txt -o $WORK/SYNCmatch.txt
|
||||
exec sort $WORK/SYNCcombined.txt -o $WORK/SYNCcombined.txt
|
||||
exec sort $WORK/SYNCmissingonsrc.txt -o $WORK/SYNCmissingonsrc.txt
|
||||
exec sort $WORK/SYNCmissingondst.txt -o $WORK/SYNCmissingondst.txt
|
||||
exec sort $WORK/SYNCerr.txt -o $WORK/SYNCerr.txt
|
||||
exec sort $WORK/SYNCdiffer.txt -o $WORK/SYNCdiffer.txt
|
||||
exec sort $WORK/SYNCdestafter.txt -o $WORK/SYNCdestafter.txt
|
||||
|
||||
# diff src before vs src after:
|
||||
cmp $WORK/src-before.txt $WORK/src-after.txt
|
||||
|
||||
# diff dst before vs dst after:
|
||||
! cmp $WORK/dst-before.txt $WORK/dst-after.txt
|
||||
|
||||
# diff src before vs dst after:
|
||||
cmp $WORK/src-before.txt $WORK/dst-after.txt
|
||||
|
||||
# diff dst before vs src after:
|
||||
! cmp $WORK/dst-before.txt $WORK/src-after.txt
|
||||
|
||||
# diff md5sum dst after vs sync dest-after:
|
||||
cmp $WORK/dst-after.txt $WORK/SYNCdestafter.txt
|
48
fs/logger/testdata/script/TestCheckVsSync.txtar
vendored
Normal file
48
fs/logger/testdata/script/TestCheckVsSync.txtar
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
# tests whether rclone check and rclone sync output exactly the same file lists.
|
||||
|
||||
# Filling src and dst with makefiles:
|
||||
exec rclone test makefiles $SRC --seed 0
|
||||
exec rclone test makefiles $DST --seed 1
|
||||
exec rclone test makefiles $SRC --seed 2
|
||||
exec rclone test makefiles $DST --seed 2
|
||||
|
||||
# running rclone check for baseline test:
|
||||
# error is expected here:
|
||||
! exec rclone check $SRC $DST --match $WORK/CHECKmatch.txt --combined $WORK/CHECKcombined.txt --missing-on-src $WORK/CHECKmissingonsrc.txt --missing-on-dst $WORK/CHECKmissingondst.txt --error $WORK/CHECKerr.txt --differ $WORK/CHECKdiffer.txt -q
|
||||
|
||||
# running sync with output files:
|
||||
exec rclone sync $SRC $DST --match $WORK/SYNCmatch.txt --combined $WORK/SYNCcombined.txt --missing-on-src $WORK/SYNCmissingonsrc.txt --missing-on-dst $WORK/SYNCmissingondst.txt --error $WORK/SYNCerr.txt --differ $WORK/SYNCdiffer.txt
|
||||
|
||||
# sorting them by line and diffing:
|
||||
exec sort $WORK/CHECKmatch.txt -o $WORK/CHECKmatch.txt
|
||||
exec sort $WORK/CHECKcombined.txt -o $WORK/CHECKcombined.txt
|
||||
exec sort $WORK/CHECKmissingonsrc.txt -o $WORK/CHECKmissingonsrc.txt
|
||||
exec sort $WORK/CHECKmissingondst.txt -o $WORK/CHECKmissingondst.txt
|
||||
exec sort $WORK/CHECKerr.txt -o $WORK/CHECKerr.txt
|
||||
exec sort $WORK/CHECKdiffer.txt -o $WORK/CHECKdiffer.txt
|
||||
|
||||
exec sort $WORK/SYNCmatch.txt -o $WORK/SYNCmatch.txt
|
||||
exec sort $WORK/SYNCcombined.txt -o $WORK/SYNCcombined.txt
|
||||
exec sort $WORK/SYNCmissingonsrc.txt -o $WORK/SYNCmissingonsrc.txt
|
||||
exec sort $WORK/SYNCmissingondst.txt -o $WORK/SYNCmissingondst.txt
|
||||
exec sort $WORK/SYNCerr.txt -o $WORK/SYNCerr.txt
|
||||
exec sort $WORK/SYNCdiffer.txt -o $WORK/SYNCdiffer.txt
|
||||
|
||||
# diff match check vs. sync:
|
||||
cmp $WORK/CHECKmatch.txt $WORK/SYNCmatch.txt
|
||||
# diff combined check vs. sync:
|
||||
cmp $WORK/CHECKcombined.txt $WORK/SYNCcombined.txt
|
||||
# diff missingonsrc check vs. sync:
|
||||
cmp $WORK/CHECKmissingonsrc.txt $WORK/SYNCmissingonsrc.txt
|
||||
# diff missingondst check vs. sync:
|
||||
cmp $WORK/CHECKmissingondst.txt $WORK/SYNCmissingondst.txt
|
||||
# diff error check vs. sync:
|
||||
cmp $WORK/CHECKerr.txt $WORK/SYNCerr.txt
|
||||
# diff differ check vs. sync:
|
||||
cmp $WORK/CHECKdiffer.txt $WORK/SYNCdiffer.txt
|
||||
|
||||
# verify accuracy
|
||||
exec rclone check $SRC $DST
|
||||
exec diff -rya --suppress-common-lines $SRC $DST
|
||||
[!windows] exec rsync -aEvhPu $SRC/ $WORK/rsyncDst
|
||||
[!windows] exec rclone check $DST $WORK/rsyncDst
|
52
fs/logger/testdata/script/TestRepoCompare.txtar
vendored
Normal file
52
fs/logger/testdata/script/TestRepoCompare.txtar
vendored
Normal file
@ -0,0 +1,52 @@
|
||||
# tests whether rclone check and rclone sync output exactly the same file lists.
|
||||
# uses two different old versions of rclone source code for the src and dst
|
||||
# to produce a more realistic test than makefiles
|
||||
# (has lots of files with same name but different content)
|
||||
|
||||
# Filling src and dst with two different versions of rclone source!:
|
||||
exec rclone copyurl https://github.com/rclone/rclone/archive/refs/tags/v1.49.1.zip $SRC/src.zip
|
||||
exec rclone copyurl https://github.com/rclone/rclone/archive/refs/tags/v1.54.1.zip $DST/dst.zip
|
||||
exec unzip $SRC/src.zip -d $SRC
|
||||
exec unzip $DST/dst.zip -d $DST
|
||||
|
||||
|
||||
# running rclone check for baseline test:
|
||||
# error is expected here:
|
||||
! exec rclone check $SRC $DST --match $WORK/CHECKmatch.txt --combined $WORK/CHECKcombined.txt --missing-on-src $WORK/CHECKmissingonsrc.txt --missing-on-dst $WORK/CHECKmissingondst.txt --error $WORK/CHECKerr.txt --differ $WORK/CHECKdiffer.txt -q
|
||||
|
||||
# running sync with output files:
|
||||
exec rclone sync $SRC $DST --match $WORK/SYNCmatch.txt --combined $WORK/SYNCcombined.txt --missing-on-src $WORK/SYNCmissingonsrc.txt --missing-on-dst $WORK/SYNCmissingondst.txt --error $WORK/SYNCerr.txt --differ $WORK/SYNCdiffer.txt
|
||||
|
||||
# sorting them by line and diffing:
|
||||
exec sort $WORK/CHECKmatch.txt -o $WORK/CHECKmatch.txt
|
||||
exec sort $WORK/CHECKcombined.txt -o $WORK/CHECKcombined.txt
|
||||
exec sort $WORK/CHECKmissingonsrc.txt -o $WORK/CHECKmissingonsrc.txt
|
||||
exec sort $WORK/CHECKmissingondst.txt -o $WORK/CHECKmissingondst.txt
|
||||
exec sort $WORK/CHECKerr.txt -o $WORK/CHECKerr.txt
|
||||
exec sort $WORK/CHECKdiffer.txt -o $WORK/CHECKdiffer.txt
|
||||
|
||||
exec sort $WORK/SYNCmatch.txt -o $WORK/SYNCmatch.txt
|
||||
exec sort $WORK/SYNCcombined.txt -o $WORK/SYNCcombined.txt
|
||||
exec sort $WORK/SYNCmissingonsrc.txt -o $WORK/SYNCmissingonsrc.txt
|
||||
exec sort $WORK/SYNCmissingondst.txt -o $WORK/SYNCmissingondst.txt
|
||||
exec sort $WORK/SYNCerr.txt -o $WORK/SYNCerr.txt
|
||||
exec sort $WORK/SYNCdiffer.txt -o $WORK/SYNCdiffer.txt
|
||||
|
||||
# diff match check vs. sync:
|
||||
cmp $WORK/CHECKmatch.txt $WORK/SYNCmatch.txt
|
||||
# diff combined check vs. sync:
|
||||
cmp $WORK/CHECKcombined.txt $WORK/SYNCcombined.txt
|
||||
# diff missingonsrc check vs. sync:
|
||||
cmp $WORK/CHECKmissingonsrc.txt $WORK/SYNCmissingonsrc.txt
|
||||
# diff missingondst check vs. sync:
|
||||
cmp $WORK/CHECKmissingondst.txt $WORK/SYNCmissingondst.txt
|
||||
# diff error check vs. sync:
|
||||
cmp $WORK/CHECKerr.txt $WORK/SYNCerr.txt
|
||||
# diff differ check vs. sync:
|
||||
cmp $WORK/CHECKdiffer.txt $WORK/SYNCdiffer.txt
|
||||
|
||||
# verify accuracy
|
||||
exec rclone check $SRC $DST
|
||||
exec diff -rya --suppress-common-lines $SRC $DST
|
||||
[!windows] exec rsync -aEvhPu $SRC/ $WORK/rsyncDst
|
||||
[!windows] exec rclone check $DST $WORK/rsyncDst
|
@ -8,7 +8,6 @@ import (
|
||||
"io"
|
||||
|
||||
"github.com/rclone/rclone/fs"
|
||||
"github.com/rclone/rclone/fs/config/flags"
|
||||
"github.com/rclone/rclone/fs/hash"
|
||||
"github.com/spf13/pflag"
|
||||
)
|
||||
@ -107,28 +106,6 @@ type LoggerOpt struct {
|
||||
Absolute bool
|
||||
}
|
||||
|
||||
// AddLoggerFlags adds the logger flags to the cmdFlags command
|
||||
func AddLoggerFlags(cmdFlags *pflag.FlagSet, opt *LoggerOpt, combined, missingOnSrc, missingOnDst, match, differ, errFile, destAfter *string) {
|
||||
flags.StringVarP(cmdFlags, combined, "combined", "", *combined, "Make a combined report of changes to this file", "Sync")
|
||||
flags.StringVarP(cmdFlags, missingOnSrc, "missing-on-src", "", *missingOnSrc, "Report all files missing from the source to this file", "Sync")
|
||||
flags.StringVarP(cmdFlags, missingOnDst, "missing-on-dst", "", *missingOnDst, "Report all files missing from the destination to this file", "Sync")
|
||||
flags.StringVarP(cmdFlags, match, "match", "", *match, "Report all matching files to this file", "Sync")
|
||||
flags.StringVarP(cmdFlags, differ, "differ", "", *differ, "Report all non-matching files to this file", "Sync")
|
||||
flags.StringVarP(cmdFlags, errFile, "error", "", *errFile, "Report all files with errors (hashing or reading) to this file", "Sync")
|
||||
flags.StringVarP(cmdFlags, destAfter, "dest-after", "", *destAfter, "Report all files that exist on the dest post-sync", "Sync")
|
||||
|
||||
// lsf flags for destAfter
|
||||
flags.StringVarP(cmdFlags, &opt.Format, "format", "F", "p", "Output format - see lsf help for details", "Sync")
|
||||
flags.StringVarP(cmdFlags, &opt.Separator, "separator", "s", ";", "Separator for the items in the format", "Sync")
|
||||
flags.BoolVarP(cmdFlags, &opt.DirSlash, "dir-slash", "d", true, "Append a slash to directory names", "Sync")
|
||||
flags.FVarP(cmdFlags, &opt.HashType, "hash", "", "Use this hash when `h` is used in the format MD5|SHA-1|DropboxHash", "Sync")
|
||||
flags.BoolVarP(cmdFlags, &opt.FilesOnly, "files-only", "", true, "Only list files", "Sync")
|
||||
flags.BoolVarP(cmdFlags, &opt.DirsOnly, "dirs-only", "", false, "Only list directories", "Sync")
|
||||
flags.BoolVarP(cmdFlags, &opt.Csv, "csv", "", false, "Output in CSV format", "Sync")
|
||||
flags.BoolVarP(cmdFlags, &opt.Absolute, "absolute", "", false, "Put a leading / in front of path names", "Sync")
|
||||
// flags.BoolVarP(cmdFlags, &recurse, "recursive", "R", false, "Recurse into the listing", "")
|
||||
}
|
||||
|
||||
// WithLogger stores logger in ctx and returns a copy of ctx in which loggerKey = logger
|
||||
func WithLogger(ctx context.Context, logger LoggerFn) context.Context {
|
||||
return context.WithValue(ctx, loggerKey, logger)
|
||||
|
45
fs/operations/operationsflags/operationsflags.go
Normal file
45
fs/operations/operationsflags/operationsflags.go
Normal file
@ -0,0 +1,45 @@
|
||||
// Package operationsflags defines the flags used by rclone operations.
|
||||
// It is decoupled into a separate package so it can be replaced.
|
||||
package operationsflags
|
||||
|
||||
import (
|
||||
"github.com/rclone/rclone/fs/config/flags"
|
||||
"github.com/rclone/rclone/fs/hash"
|
||||
"github.com/rclone/rclone/fs/operations"
|
||||
"github.com/spf13/pflag"
|
||||
)
|
||||
|
||||
// AddLoggerFlagsOptions contains options for the Logger Flags
|
||||
type AddLoggerFlagsOptions struct {
|
||||
Combined string // a file with file names with leading sigils
|
||||
MissingOnSrc string // files only in the destination
|
||||
MissingOnDst string // files only in the source
|
||||
Match string // matching files
|
||||
Differ string // differing files
|
||||
ErrFile string // files with errors of some kind
|
||||
DestAfter string // files that exist on the destination post-sync
|
||||
}
|
||||
|
||||
// AddLoggerFlags adds the logger flags to the cmdFlags command
|
||||
func AddLoggerFlags(cmdFlags *pflag.FlagSet, opt *operations.LoggerOpt, flagsOpt *AddLoggerFlagsOptions) {
|
||||
flags.StringVarP(cmdFlags, &flagsOpt.Combined, "combined", "", flagsOpt.Combined, "Make a combined report of changes to this file", "Sync")
|
||||
flags.StringVarP(cmdFlags, &flagsOpt.MissingOnSrc, "missing-on-src", "", flagsOpt.MissingOnSrc, "Report all files missing from the source to this file", "Sync")
|
||||
flags.StringVarP(cmdFlags, &flagsOpt.MissingOnDst, "missing-on-dst", "", flagsOpt.MissingOnDst, "Report all files missing from the destination to this file", "Sync")
|
||||
flags.StringVarP(cmdFlags, &flagsOpt.Match, "match", "", flagsOpt.Match, "Report all matching files to this file", "Sync")
|
||||
flags.StringVarP(cmdFlags, &flagsOpt.Differ, "differ", "", flagsOpt.Differ, "Report all non-matching files to this file", "Sync")
|
||||
flags.StringVarP(cmdFlags, &flagsOpt.ErrFile, "error", "", flagsOpt.ErrFile, "Report all files with errors (hashing or reading) to this file", "Sync")
|
||||
flags.StringVarP(cmdFlags, &flagsOpt.DestAfter, "dest-after", "", flagsOpt.DestAfter, "Report all files that exist on the dest post-sync", "Sync")
|
||||
|
||||
// lsf flags for destAfter
|
||||
flags.StringVarP(cmdFlags, &opt.Format, "format", "F", "p", "Output format - see lsf help for details", "Sync")
|
||||
flags.StringVarP(cmdFlags, &opt.TimeFormat, "timeformat", "t", "", "Specify a custom time format, or 'max' for max precision supported by remote (default: 2006-01-02 15:04:05)", "")
|
||||
flags.StringVarP(cmdFlags, &opt.Separator, "separator", "s", ";", "Separator for the items in the format", "Sync")
|
||||
flags.BoolVarP(cmdFlags, &opt.DirSlash, "dir-slash", "d", true, "Append a slash to directory names", "Sync")
|
||||
opt.HashType = hash.MD5
|
||||
flags.FVarP(cmdFlags, &opt.HashType, "hash", "", "Use this hash when `h` is used in the format MD5|SHA-1|DropboxHash", "Sync")
|
||||
flags.BoolVarP(cmdFlags, &opt.FilesOnly, "files-only", "", true, "Only list files", "Sync")
|
||||
flags.BoolVarP(cmdFlags, &opt.DirsOnly, "dirs-only", "", false, "Only list directories", "Sync")
|
||||
flags.BoolVarP(cmdFlags, &opt.Csv, "csv", "", false, "Output in CSV format", "Sync")
|
||||
flags.BoolVarP(cmdFlags, &opt.Absolute, "absolute", "", false, "Put a leading / in front of path names", "Sync")
|
||||
// flags.BoolVarP(cmdFlags, &recurse, "recursive", "R", false, "Recurse into the listing", "")
|
||||
}
|
@ -1495,10 +1495,10 @@ func TestServerSideMoveOverlap(t *testing.T) {
|
||||
r.CheckRemoteItems(t, file1)
|
||||
|
||||
// Subdir move with no filters should return ErrorCantMoveOverlapping
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
// ctx = predictDstFromLogger(ctx)
|
||||
err = MoveDir(ctx, FremoteMove, r.Fremote, false, false)
|
||||
assert.EqualError(t, err, fs.ErrorOverlapping.Error())
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
// testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
|
||||
// Now try with a filter which should also fail with ErrorCantMoveOverlapping
|
||||
fi, err := filter.NewFilter(nil)
|
||||
@ -1506,10 +1506,10 @@ func TestServerSideMoveOverlap(t *testing.T) {
|
||||
fi.Opt.MinSize = 40
|
||||
ctx = filter.ReplaceConfig(ctx, fi)
|
||||
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
// ctx = predictDstFromLogger(ctx)
|
||||
err = MoveDir(ctx, FremoteMove, r.Fremote, false, false)
|
||||
assert.EqualError(t, err, fs.ErrorOverlapping.Error())
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
// testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
}
|
||||
|
||||
// Test a sync with overlap
|
||||
@ -1822,9 +1822,9 @@ func TestSyncCopyDest(t *testing.T) {
|
||||
r.CheckLocalItems(t, file1)
|
||||
|
||||
accounting.GlobalStats().ResetCounters()
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
// ctx = predictDstFromLogger(ctx)
|
||||
err = Sync(ctx, fdst, r.Flocal, false)
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
// testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t) // not currently supported
|
||||
require.NoError(t, err)
|
||||
|
||||
file1dst := file1
|
||||
@ -1838,9 +1838,9 @@ func TestSyncCopyDest(t *testing.T) {
|
||||
r.CheckLocalItems(t, file1b)
|
||||
|
||||
accounting.GlobalStats().ResetCounters()
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
// ctx = predictDstFromLogger(ctx)
|
||||
err = Sync(ctx, fdst, r.Flocal, false)
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
// testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
require.NoError(t, err)
|
||||
|
||||
file1bdst := file1b
|
||||
@ -1859,9 +1859,9 @@ func TestSyncCopyDest(t *testing.T) {
|
||||
r.CheckLocalItems(t, file1c)
|
||||
|
||||
accounting.GlobalStats().ResetCounters()
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
// ctx = predictDstFromLogger(ctx)
|
||||
err = Sync(ctx, fdst, r.Flocal, false)
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
// testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
require.NoError(t, err)
|
||||
|
||||
file2dst := file2
|
||||
@ -1878,8 +1878,8 @@ func TestSyncCopyDest(t *testing.T) {
|
||||
r.CheckLocalItems(t, file1c, file5)
|
||||
|
||||
accounting.GlobalStats().ResetCounters()
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
// ctx = predictDstFromLogger(ctx)
|
||||
// testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
err = Sync(ctx, fdst, r.Flocal, false)
|
||||
require.NoError(t, err)
|
||||
|
||||
@ -1890,9 +1890,9 @@ func TestSyncCopyDest(t *testing.T) {
|
||||
|
||||
// check new dest, new copy
|
||||
accounting.GlobalStats().ResetCounters()
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
// ctx = predictDstFromLogger(ctx)
|
||||
err = Sync(ctx, fdst, r.Flocal, false)
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
// testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
require.NoError(t, err)
|
||||
|
||||
r.CheckRemoteItems(t, file2, file2dst, file3, file4, file4dst)
|
||||
@ -1904,9 +1904,9 @@ func TestSyncCopyDest(t *testing.T) {
|
||||
r.CheckLocalItems(t, file1c, file5, file7)
|
||||
|
||||
accounting.GlobalStats().ResetCounters()
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
// ctx = predictDstFromLogger(ctx)
|
||||
err = Sync(ctx, fdst, r.Flocal, false)
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
// testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
require.NoError(t, err)
|
||||
|
||||
file7dst := file7
|
||||
@ -2312,17 +2312,19 @@ func predictDstFromLogger(ctx context.Context) context.Context {
|
||||
file := winner.Obj
|
||||
obj, ok := file.(fs.ObjectInfo)
|
||||
checksum := ""
|
||||
timeFormat := "2006-01-02 15:04:05"
|
||||
if ok {
|
||||
if obj.Fs().Hashes().GetOne() == hash.MD5 {
|
||||
// skip if no MD5
|
||||
checksum, _ = obj.Hash(ctx, obj.Fs().Hashes().GetOne())
|
||||
checksum, _ = obj.Hash(ctx, hash.MD5)
|
||||
}
|
||||
timeFormat = operations.FormatForLSFPrecision(obj.Fs().Precision())
|
||||
}
|
||||
errMsg := ""
|
||||
if winner.Err != nil {
|
||||
errMsg = ";" + winner.Err.Error()
|
||||
}
|
||||
operations.SyncFprintf(opt.JSON, "%s;%s;%v;%s%s\n", file.ModTime(ctx).Local().Format("2006-01-02 15:04:05.000000000"), checksum, file.Size(), file.Remote(), errMsg)
|
||||
operations.SyncFprintf(opt.JSON, "%s;%s;%v;%s%s\n", file.ModTime(ctx).Local().Format(timeFormat), checksum, file.Size(), file.Remote(), errMsg)
|
||||
}
|
||||
}
|
||||
return operations.WithSyncLogger(ctx, opt)
|
||||
@ -2407,10 +2409,10 @@ func testLoggerVsLsf(ctx context.Context, Fremote fs.Fs, logger *bytes.Buffer, t
|
||||
newlogger.Write(logger.Bytes())
|
||||
}
|
||||
|
||||
lsf := DstLsf(ctx, Fremote)
|
||||
err := LoggerMatchesLsf(&newlogger, lsf)
|
||||
r := fstest.NewRun(t)
|
||||
if r.Flocal.Precision() == Fremote.Precision() && r.Flocal.Hashes().Contains(hash.MD5) && canTestHash {
|
||||
lsf := DstLsf(ctx, Fremote)
|
||||
err := LoggerMatchesLsf(&newlogger, lsf)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user