mirror of
https://github.com/rclone/rclone.git
synced 2024-12-22 23:22:08 +01:00
bisync: generate listings concurrently with march -- fixes #7332
Before this change, bisync needed to build a full listing for Path1, then a full listing for Path2, then compare them -- and each of those tasks needed to finish before the next one could start. In addition to being slow and inefficient, it also caused real problems if a file changed between the time bisync checked it on Path1 and the time it checked the corresponding file on Path2. This change solves these problems by listing both paths concurrently, using the same March infrastructure that check and sync use to traverse two directories in lock-step, optimized by Go's robust concurrency support. Listings should now be much faster, and any given path is now checked nearly-instantaneously on both sides, minimizing room for error. Further discussion: https://forum.rclone.org/t/bisync-bugs-and-feature-requests/37636#:~:text=4.%20Listings%20should%20alternate%20between%20paths%20to%20minimize%20errors
This commit is contained in:
parent
0cac5d67ab
commit
fd95511091
@ -39,7 +39,7 @@ func FileExists(file string) bool {
|
||||
return !os.IsNotExist(err)
|
||||
}
|
||||
|
||||
// CopyFileIfExists is like CopyFile but does to fail if source does not exist
|
||||
// CopyFileIfExists is like CopyFile but does not fail if source does not exist
|
||||
func CopyFileIfExists(srcFile, dstFile string) error {
|
||||
if !FileExists(srcFile) {
|
||||
return nil
|
||||
|
@ -137,8 +137,9 @@ func (b *bisyncRun) checkconflicts(ctxCheck context.Context, filterCheck *filter
|
||||
}
|
||||
|
||||
// findDeltas
|
||||
func (b *bisyncRun) findDeltas(fctx context.Context, f fs.Fs, oldListing, newListing, msg string) (ds *deltaSet, err error) {
|
||||
var old, now *fileList
|
||||
func (b *bisyncRun) findDeltas(fctx context.Context, f fs.Fs, oldListing string, now *fileList, msg string) (ds *deltaSet, err error) {
|
||||
var old *fileList
|
||||
newListing := oldListing + "-new"
|
||||
|
||||
old, err = b.loadListing(oldListing)
|
||||
if err != nil {
|
||||
@ -150,7 +151,6 @@ func (b *bisyncRun) findDeltas(fctx context.Context, f fs.Fs, oldListing, newLis
|
||||
return
|
||||
}
|
||||
|
||||
now, err = b.makeListing(fctx, f, newListing)
|
||||
if err == nil {
|
||||
err = b.checkListing(now, newListing, "current "+msg)
|
||||
}
|
||||
@ -235,6 +235,8 @@ func (b *bisyncRun) applyDeltas(ctx context.Context, ds1, ds2 *deltaSet) (change
|
||||
renamed2 := bilib.Names{}
|
||||
renameSkipped := bilib.Names{}
|
||||
deletedonboth := bilib.Names{}
|
||||
skippedDirs1 := newFileList()
|
||||
skippedDirs2 := newFileList()
|
||||
|
||||
ctxMove := b.opt.setDryRun(ctx)
|
||||
|
||||
@ -304,6 +306,8 @@ func (b *bisyncRun) applyDeltas(ctx context.Context, ds1, ds2 *deltaSet) (change
|
||||
//if files are identical, leave them alone instead of renaming
|
||||
if dirs1.has(file) && dirs2.has(file) {
|
||||
fs.Debugf(nil, "This is a directory, not a file. Skipping equality check and will not rename: %s", file)
|
||||
ls1.getPut(file, skippedDirs1)
|
||||
ls2.getPut(file, skippedDirs2)
|
||||
} else {
|
||||
equal := matches.Has(file)
|
||||
if equal {
|
||||
@ -424,6 +428,8 @@ func (b *bisyncRun) applyDeltas(ctx context.Context, ds1, ds2 *deltaSet) (change
|
||||
queues.renamed2 = renamed2
|
||||
queues.renameSkipped = renameSkipped
|
||||
queues.deletedonboth = deletedonboth
|
||||
queues.skippedDirs1 = skippedDirs1
|
||||
queues.skippedDirs2 = skippedDirs2
|
||||
|
||||
return
|
||||
}
|
||||
|
@ -12,7 +12,6 @@ import (
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/rclone/rclone/cmd/bisync/bilib"
|
||||
@ -20,7 +19,6 @@ import (
|
||||
"github.com/rclone/rclone/fs/filter"
|
||||
"github.com/rclone/rclone/fs/hash"
|
||||
"github.com/rclone/rclone/fs/operations"
|
||||
"github.com/rclone/rclone/fs/walk"
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
||||
@ -70,6 +68,9 @@ func newFileList() *fileList {
|
||||
}
|
||||
|
||||
func (ls *fileList) empty() bool {
|
||||
if ls == nil {
|
||||
return true
|
||||
}
|
||||
return len(ls.list) == 0
|
||||
}
|
||||
|
||||
@ -99,6 +100,12 @@ func (ls *fileList) getPut(file string, dest *fileList) {
|
||||
dest.put(file, f.size, f.time, f.hash, f.id, f.flags)
|
||||
}
|
||||
|
||||
func (ls *fileList) getPutAll(dest *fileList) {
|
||||
for file, f := range ls.info {
|
||||
dest.put(file, f.size, f.time, f.hash, f.id, f.flags)
|
||||
}
|
||||
}
|
||||
|
||||
func (ls *fileList) remove(file string) {
|
||||
if ls.has(file) {
|
||||
ls.list = slices.Delete(ls.list, slices.Index(ls.list, file), slices.Index(ls.list, file)+1)
|
||||
@ -292,13 +299,16 @@ func (b *bisyncRun) loadListing(listing string) (*fileList, error) {
|
||||
return ls, nil
|
||||
}
|
||||
|
||||
// saveOldListings saves the most recent successful listing, in case we need to rollback on error
|
||||
func (b *bisyncRun) saveOldListings() {
|
||||
if err := bilib.CopyFileIfExists(b.listing1, b.listing1+"-old"); err != nil {
|
||||
fs.Debugf(b.listing1, "error saving old listing1: %v", err)
|
||||
}
|
||||
if err := bilib.CopyFileIfExists(b.listing2, b.listing2+"-old"); err != nil {
|
||||
fs.Debugf(b.listing1, "error saving old listing2: %v", err)
|
||||
}
|
||||
b.handleErr(b.listing1, "error saving old Path1 listing", bilib.CopyFileIfExists(b.listing1, b.listing1+"-old"), true, true)
|
||||
b.handleErr(b.listing2, "error saving old Path2 listing", bilib.CopyFileIfExists(b.listing2, b.listing2+"-old"), true, true)
|
||||
}
|
||||
|
||||
// replaceCurrentListings saves both ".lst-new" listings as ".lst"
|
||||
func (b *bisyncRun) replaceCurrentListings() {
|
||||
b.handleErr(b.newListing1, "error replacing Path1 listing", bilib.CopyFileIfExists(b.newListing1, b.listing1), true, true)
|
||||
b.handleErr(b.newListing2, "error replacing Path2 listing", bilib.CopyFileIfExists(b.newListing2, b.listing2), true, true)
|
||||
}
|
||||
|
||||
func parseHash(str string) (string, string, error) {
|
||||
@ -314,71 +324,6 @@ func parseHash(str string) (string, string, error) {
|
||||
return "", "", fmt.Errorf("invalid hash %q", str)
|
||||
}
|
||||
|
||||
// makeListing will produce listing from directory tree and write it to a file
|
||||
func (b *bisyncRun) makeListing(ctx context.Context, f fs.Fs, listing string) (ls *fileList, err error) {
|
||||
ci := fs.GetConfig(ctx)
|
||||
depth := ci.MaxDepth
|
||||
hashType := hash.None
|
||||
if !b.opt.IgnoreListingChecksum {
|
||||
// Currently bisync just honors --ignore-listing-checksum
|
||||
// (note that this is different from --ignore-checksum)
|
||||
// TODO add full support for checksums and related flags
|
||||
hashType = f.Hashes().GetOne()
|
||||
}
|
||||
ls = newFileList()
|
||||
ls.hash = hashType
|
||||
var lock sync.Mutex
|
||||
listType := walk.ListObjects
|
||||
if b.opt.CreateEmptySrcDirs {
|
||||
listType = walk.ListAll
|
||||
}
|
||||
err = walk.ListR(ctx, f, "", false, depth, listType, func(entries fs.DirEntries) error {
|
||||
var firstErr error
|
||||
entries.ForObject(func(o fs.Object) {
|
||||
//tr := accounting.Stats(ctx).NewCheckingTransfer(o) // TODO
|
||||
var (
|
||||
hashVal string
|
||||
hashErr error
|
||||
)
|
||||
if hashType != hash.None {
|
||||
hashVal, hashErr = o.Hash(ctx, hashType)
|
||||
if firstErr == nil {
|
||||
firstErr = hashErr
|
||||
}
|
||||
}
|
||||
time := o.ModTime(ctx).In(TZ)
|
||||
id := "" // TODO
|
||||
flags := "-" // "-" for a file and "d" for a directory
|
||||
lock.Lock()
|
||||
ls.put(o.Remote(), o.Size(), time, hashVal, id, flags)
|
||||
lock.Unlock()
|
||||
//tr.Done(ctx, nil) // TODO
|
||||
})
|
||||
if b.opt.CreateEmptySrcDirs {
|
||||
entries.ForDir(func(o fs.Directory) {
|
||||
var (
|
||||
hashVal string
|
||||
)
|
||||
time := o.ModTime(ctx).In(TZ)
|
||||
id := "" // TODO
|
||||
flags := "d" // "-" for a file and "d" for a directory
|
||||
lock.Lock()
|
||||
//record size as 0 instead of -1, so bisync doesn't think it's a google doc
|
||||
ls.put(o.Remote(), 0, time, hashVal, id, flags)
|
||||
lock.Unlock()
|
||||
})
|
||||
}
|
||||
return firstErr
|
||||
})
|
||||
if err == nil {
|
||||
err = ls.save(ctx, listing)
|
||||
}
|
||||
if err != nil {
|
||||
b.abort = true
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// checkListing verifies that listing is not empty (unless resynching)
|
||||
func (b *bisyncRun) checkListing(ls *fileList, listing, msg string) error {
|
||||
if b.opt.Resync || !ls.empty() {
|
||||
@ -542,8 +487,6 @@ func (b *bisyncRun) modifyListing(ctx context.Context, src fs.Fs, dst fs.Fs, res
|
||||
updateLists("src", srcWinners, srcList)
|
||||
updateLists("dst", dstWinners, dstList)
|
||||
|
||||
// TODO: rollback on error
|
||||
|
||||
// account for "deltaOthers" we handled separately
|
||||
if queues.deletedonboth.NotEmpty() {
|
||||
for file := range queues.deletedonboth {
|
||||
@ -587,7 +530,7 @@ func (b *bisyncRun) modifyListing(ctx context.Context, src fs.Fs, dst fs.Fs, res
|
||||
|
||||
// recheck the ones we skipped because they were equal
|
||||
// we never got their info because they were never synced.
|
||||
// TODO: add flag to skip this for people who don't care and would rather avoid?
|
||||
// TODO: add flag to skip this? (since it re-lists)
|
||||
if queues.renameSkipped.NotEmpty() {
|
||||
skippedList := queues.renameSkipped.ToList()
|
||||
for _, file := range skippedList {
|
||||
@ -596,6 +539,20 @@ func (b *bisyncRun) modifyListing(ctx context.Context, src fs.Fs, dst fs.Fs, res
|
||||
}
|
||||
}
|
||||
}
|
||||
// skipped dirs -- nothing to recheck, just add them
|
||||
// (they are not necessarily there already, if they are new)
|
||||
path1List := srcList
|
||||
path2List := dstList
|
||||
if !is1to2 {
|
||||
path1List = dstList
|
||||
path2List = srcList
|
||||
}
|
||||
if !queues.skippedDirs1.empty() {
|
||||
queues.skippedDirs1.getPutAll(path1List)
|
||||
}
|
||||
if !queues.skippedDirs2.empty() {
|
||||
queues.skippedDirs2.getPutAll(path2List)
|
||||
}
|
||||
|
||||
if filterRecheck.HaveFilesFrom() {
|
||||
b.recheck(ctxRecheck, src, dst, srcList, dstList, is1to2)
|
||||
@ -665,9 +622,9 @@ func (b *bisyncRun) recheck(ctxRecheck context.Context, src, dst fs.Fs, srcList,
|
||||
if len(toRollback) > 0 {
|
||||
srcListing, dstListing := b.getListingNames(is1to2)
|
||||
oldSrc, err := b.loadListing(srcListing + "-old")
|
||||
handleErr(oldSrc, "error loading old src listing", err) // TODO: make this critical?
|
||||
b.handleErr(oldSrc, "error loading old src listing", err, true, true)
|
||||
oldDst, err := b.loadListing(dstListing + "-old")
|
||||
handleErr(oldDst, "error loading old dst listing", err) // TODO: make this critical?
|
||||
b.handleErr(oldDst, "error loading old dst listing", err, true, true)
|
||||
|
||||
for _, item := range toRollback {
|
||||
rollback(item, oldSrc, srcList)
|
||||
@ -683,13 +640,6 @@ func (b *bisyncRun) getListingNames(is1to2 bool) (srcListing string, dstListing
|
||||
return b.listing2, b.listing1
|
||||
}
|
||||
|
||||
func handleErr(o interface{}, msg string, err error) {
|
||||
// TODO: add option to make critical?
|
||||
if err != nil {
|
||||
fs.Debugf(o, "%s: %v", msg, err)
|
||||
}
|
||||
}
|
||||
|
||||
func rollback(item string, oldList, newList *fileList) {
|
||||
if oldList.has(item) {
|
||||
oldList.getPut(item, newList)
|
||||
|
189
cmd/bisync/march.go
Normal file
189
cmd/bisync/march.go
Normal file
@ -0,0 +1,189 @@
|
||||
package bisync
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
|
||||
"github.com/rclone/rclone/fs"
|
||||
"github.com/rclone/rclone/fs/accounting"
|
||||
"github.com/rclone/rclone/fs/hash"
|
||||
"github.com/rclone/rclone/fs/march"
|
||||
)
|
||||
|
||||
var ls1 = newFileList()
|
||||
var ls2 = newFileList()
|
||||
var err error
|
||||
var firstErr error
|
||||
var marchLsLock sync.Mutex
|
||||
var marchErrLock sync.Mutex
|
||||
var marchCtx context.Context
|
||||
|
||||
func (b *bisyncRun) makeMarchListing(ctx context.Context) (*fileList, *fileList, error) {
|
||||
ci := fs.GetConfig(ctx)
|
||||
marchCtx = ctx
|
||||
b.setupListing()
|
||||
fs.Debugf(b, "starting to march!")
|
||||
|
||||
// set up a march over fdst (Path2) and fsrc (Path1)
|
||||
m := &march.March{
|
||||
Ctx: ctx,
|
||||
Fdst: b.fs2,
|
||||
Fsrc: b.fs1,
|
||||
Dir: "",
|
||||
NoTraverse: false,
|
||||
Callback: b,
|
||||
DstIncludeAll: false,
|
||||
NoCheckDest: false,
|
||||
NoUnicodeNormalization: ci.NoUnicodeNormalization,
|
||||
}
|
||||
err = m.Run(ctx)
|
||||
|
||||
fs.Debugf(b, "march completed. err: %v", err)
|
||||
if err == nil {
|
||||
err = firstErr
|
||||
}
|
||||
if err != nil {
|
||||
b.abort = true
|
||||
}
|
||||
|
||||
// save files
|
||||
err = ls1.save(ctx, b.newListing1)
|
||||
if err != nil {
|
||||
b.abort = true
|
||||
}
|
||||
err = ls2.save(ctx, b.newListing2)
|
||||
if err != nil {
|
||||
b.abort = true
|
||||
}
|
||||
|
||||
return ls1, ls2, err
|
||||
}
|
||||
|
||||
// SrcOnly have an object which is on path1 only
|
||||
func (b *bisyncRun) SrcOnly(o fs.DirEntry) (recurse bool) {
|
||||
fs.Debugf(o, "path1 only")
|
||||
b.parse(o, true)
|
||||
return isDir(o)
|
||||
}
|
||||
|
||||
// DstOnly have an object which is on path2 only
|
||||
func (b *bisyncRun) DstOnly(o fs.DirEntry) (recurse bool) {
|
||||
fs.Debugf(o, "path2 only")
|
||||
b.parse(o, false)
|
||||
return isDir(o)
|
||||
}
|
||||
|
||||
// Match is called when object exists on both path1 and path2 (whether equal or not)
|
||||
func (b *bisyncRun) Match(ctx context.Context, o2, o1 fs.DirEntry) (recurse bool) {
|
||||
fs.Debugf(o1, "both path1 and path2")
|
||||
b.parse(o1, true)
|
||||
b.parse(o2, false)
|
||||
return isDir(o1)
|
||||
}
|
||||
|
||||
func isDir(e fs.DirEntry) bool {
|
||||
switch x := e.(type) {
|
||||
case fs.Object:
|
||||
fs.Debugf(x, "is Object")
|
||||
return false
|
||||
case fs.Directory:
|
||||
fs.Debugf(x, "is Dir")
|
||||
return true
|
||||
default:
|
||||
fs.Debugf(e, "is unknown")
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (b *bisyncRun) parse(e fs.DirEntry, isPath1 bool) {
|
||||
switch x := e.(type) {
|
||||
case fs.Object:
|
||||
b.ForObject(x, isPath1)
|
||||
case fs.Directory:
|
||||
if b.opt.CreateEmptySrcDirs {
|
||||
b.ForDir(x, isPath1)
|
||||
}
|
||||
default:
|
||||
fs.Debugf(e, "is unknown")
|
||||
}
|
||||
}
|
||||
|
||||
func (b *bisyncRun) setupListing() {
|
||||
ls1 = newFileList()
|
||||
ls2 = newFileList()
|
||||
|
||||
hashType1 := hash.None
|
||||
hashType2 := hash.None
|
||||
if !b.opt.IgnoreListingChecksum {
|
||||
// Currently bisync just honors --ignore-listing-checksum
|
||||
// (note that this is different from --ignore-checksum)
|
||||
// TODO add full support for checksums and related flags
|
||||
hashType1 = b.fs1.Hashes().GetOne()
|
||||
hashType2 = b.fs2.Hashes().GetOne()
|
||||
}
|
||||
|
||||
ls1.hash = hashType1
|
||||
ls2.hash = hashType2
|
||||
}
|
||||
|
||||
func (b *bisyncRun) ForObject(o fs.Object, isPath1 bool) {
|
||||
tr := accounting.Stats(marchCtx).NewCheckingTransfer(o, "listing file - "+whichPath(isPath1))
|
||||
defer func() {
|
||||
tr.Done(marchCtx, nil)
|
||||
}()
|
||||
var (
|
||||
hashVal string
|
||||
hashErr error
|
||||
)
|
||||
ls := whichLs(isPath1)
|
||||
hashType := ls.hash
|
||||
if hashType != hash.None {
|
||||
hashVal, hashErr = o.Hash(marchCtx, hashType)
|
||||
marchErrLock.Lock()
|
||||
if firstErr == nil {
|
||||
firstErr = hashErr
|
||||
}
|
||||
marchErrLock.Unlock()
|
||||
}
|
||||
time := o.ModTime(marchCtx).In(TZ)
|
||||
id := "" // TODO
|
||||
flags := "-" // "-" for a file and "d" for a directory
|
||||
marchLsLock.Lock()
|
||||
ls.put(o.Remote(), o.Size(), time, hashVal, id, flags)
|
||||
marchLsLock.Unlock()
|
||||
}
|
||||
|
||||
func (b *bisyncRun) ForDir(o fs.Directory, isPath1 bool) {
|
||||
tr := accounting.Stats(marchCtx).NewCheckingTransfer(o, "listing dir - "+whichPath(isPath1))
|
||||
defer func() {
|
||||
tr.Done(marchCtx, nil)
|
||||
}()
|
||||
ls := whichLs(isPath1)
|
||||
time := o.ModTime(marchCtx).In(TZ)
|
||||
id := "" // TODO
|
||||
flags := "d" // "-" for a file and "d" for a directory
|
||||
marchLsLock.Lock()
|
||||
//record size as 0 instead of -1, so bisync doesn't think it's a google doc
|
||||
ls.put(o.Remote(), 0, time, "", id, flags)
|
||||
marchLsLock.Unlock()
|
||||
}
|
||||
|
||||
func whichLs(isPath1 bool) *fileList {
|
||||
ls := ls1
|
||||
if !isPath1 {
|
||||
ls = ls2
|
||||
}
|
||||
return ls
|
||||
}
|
||||
|
||||
func whichPath(isPath1 bool) string {
|
||||
s := "Path1"
|
||||
if !isPath1 {
|
||||
s = "Path2"
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// TODO:
|
||||
// equality check?
|
||||
// unicode stuff
|
@ -45,6 +45,8 @@ type queues struct {
|
||||
renamed1 bilib.Names // renamed on 1 and copied to 2
|
||||
renamed2 bilib.Names // renamed on 2 and copied to 1
|
||||
renameSkipped bilib.Names // not renamed because it was equal
|
||||
skippedDirs1 *fileList
|
||||
skippedDirs2 *fileList
|
||||
deletedonboth bilib.Names
|
||||
}
|
||||
|
||||
@ -217,10 +219,15 @@ func (b *bisyncRun) runLocked(octx context.Context) (err error) {
|
||||
return errors.New("cannot find prior Path1 or Path2 listings, likely due to critical error on prior run")
|
||||
}
|
||||
|
||||
fs.Infof(nil, "Building Path1 and Path2 listings")
|
||||
ls1, ls2, err = b.makeMarchListing(fctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Check for Path1 deltas relative to the prior sync
|
||||
fs.Infof(nil, "Path1 checking for diffs")
|
||||
newListing1 := b.listing1 + "-new"
|
||||
ds1, err := b.findDeltas(fctx, b.fs1, b.listing1, newListing1, "Path1")
|
||||
ds1, err := b.findDeltas(fctx, b.fs1, b.listing1, ls1, "Path1")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -228,8 +235,7 @@ func (b *bisyncRun) runLocked(octx context.Context) (err error) {
|
||||
|
||||
// Check for Path2 deltas relative to the prior sync
|
||||
fs.Infof(nil, "Path2 checking for diffs")
|
||||
newListing2 := b.listing2 + "-new"
|
||||
ds2, err := b.findDeltas(fctx, b.fs2, b.listing2, newListing2, "Path2")
|
||||
ds2, err := b.findDeltas(fctx, b.fs2, b.listing2, ls2, "Path2")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -298,8 +304,7 @@ func (b *bisyncRun) runLocked(octx context.Context) (err error) {
|
||||
b.saveOldListings()
|
||||
// save new listings
|
||||
if noChanges {
|
||||
err1 = bilib.CopyFileIfExists(newListing1, b.listing1)
|
||||
err2 = bilib.CopyFileIfExists(newListing2, b.listing2)
|
||||
b.replaceCurrentListings()
|
||||
} else {
|
||||
if changes1 { // 2to1
|
||||
err1 = b.modifyListing(fctx, b.fs2, b.fs1, results2to1, queues, false)
|
||||
@ -360,7 +365,10 @@ func (b *bisyncRun) runLocked(octx context.Context) (err error) {
|
||||
func (b *bisyncRun) resync(octx, fctx context.Context) error {
|
||||
fs.Infof(nil, "Copying unique Path2 files to Path1")
|
||||
|
||||
filesNow1, err := b.makeListing(fctx, b.fs1, b.newListing1)
|
||||
// TODO: remove this listing eventually.
|
||||
// Listing here is only really necessary for our --ignore-existing logic
|
||||
// which would be more efficiently implemented by setting ci.IgnoreExisting
|
||||
filesNow1, filesNow2, err := b.makeMarchListing(fctx)
|
||||
if err == nil {
|
||||
err = b.checkListing(filesNow1, b.newListing1, "current Path1")
|
||||
}
|
||||
@ -368,10 +376,7 @@ func (b *bisyncRun) resync(octx, fctx context.Context) error {
|
||||
return err
|
||||
}
|
||||
|
||||
filesNow2, err := b.makeListing(fctx, b.fs2, b.newListing2)
|
||||
if err == nil {
|
||||
err = b.checkListing(filesNow2, b.newListing2, "current Path2")
|
||||
}
|
||||
err = b.checkListing(filesNow2, b.newListing2, "current Path2")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -468,13 +473,8 @@ func (b *bisyncRun) resync(octx, fctx context.Context) error {
|
||||
}
|
||||
|
||||
fs.Infof(nil, "Resync updating listings")
|
||||
b.saveOldListings() // TODO: also make replaceCurrentListings?
|
||||
if err := bilib.CopyFileIfExists(b.newListing1, b.listing1); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := bilib.CopyFileIfExists(b.newListing2, b.listing2); err != nil {
|
||||
return err
|
||||
}
|
||||
b.saveOldListings()
|
||||
b.replaceCurrentListings()
|
||||
|
||||
// resync 2to1
|
||||
queues.copy2to1 = bilib.ToNames(copy2to1)
|
||||
@ -574,3 +574,17 @@ func (b *bisyncRun) testFn() {
|
||||
b.opt.TestFn()
|
||||
}
|
||||
}
|
||||
|
||||
func (b *bisyncRun) handleErr(o interface{}, msg string, err error, critical, retryable bool) {
|
||||
if err != nil {
|
||||
if retryable {
|
||||
b.retryable = true
|
||||
}
|
||||
if critical {
|
||||
b.critical = true
|
||||
fs.Errorf(o, "%s: %v", msg, err)
|
||||
} else {
|
||||
fs.Debugf(o, "%s: %v", msg, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -16,6 +16,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(07) :[0m [34mtest sync should pass[0m
|
||||
[36m(08) :[0m [34mbisync[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile is newer[0m - [36mfile1.copy1.txt[0m
|
||||
INFO : - [34mPath1[0m [35mFile is newer[0m - [36mfile1.copy2.txt[0m
|
||||
@ -46,6 +47,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(12) :[0m [34mtest sync should fail[0m
|
||||
[36m(13) :[0m [34mbisync[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile is newer[0m - [36mRCLONE_TEST[0m
|
||||
INFO : - [34mPath1[0m [35mFile is OLDER[0m - [36mfile1.copy1.txt[0m
|
||||
@ -64,6 +66,7 @@ Bisync error: all files were changed
|
||||
[36m(14) :[0m [34mtest sync with force should pass[0m
|
||||
[36m(15) :[0m [34mbisync force[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile is newer[0m - [36mRCLONE_TEST[0m
|
||||
INFO : - [34mPath1[0m [35mFile is OLDER[0m - [36mfile1.copy1.txt[0m
|
||||
|
@ -17,6 +17,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(07) :[0m [34mtest bisync run[0m
|
||||
[36m(08) :[0m [34mbisync[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile is newer[0m - [36msubdir/file20.txt[0m
|
||||
INFO : Path1: 1 changes: 0 new, 1 newer, 0 older, 0 deleted
|
||||
|
@ -49,6 +49,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(31) :[0m [34mtest bisync run[0m
|
||||
[36m(32) :[0m [34mbisync[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile is newer[0m - [36mfile2.txt[0m
|
||||
INFO : - [34mPath1[0m [35mFile was deleted[0m - [36mfile4.txt[0m
|
||||
|
@ -12,6 +12,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(04) :[0m [34mtest 1. see that check-access passes with the initial setup[0m
|
||||
[36m(05) :[0m [34mbisync check-access[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : Checking access health
|
||||
@ -25,6 +26,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(07) :[0m [34mdelete-file {path2/}subdir/RCLONE_TEST[0m
|
||||
[36m(08) :[0m [34mbisync check-access[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : - [34mPath2[0m [35mFile was deleted[0m - [36msubdir/RCLONE_TEST[0m
|
||||
@ -49,6 +51,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(13) :[0m [34mtest 4. run sync with check-access. should pass.[0m
|
||||
[36m(14) :[0m [34mbisync check-access[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : Checking access health
|
||||
@ -62,6 +65,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(16) :[0m [34mdelete-file {path1/}RCLONE_TEST[0m
|
||||
[36m(17) :[0m [34mbisync check-access[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile was deleted[0m - [36mRCLONE_TEST[0m
|
||||
INFO : Path1: 1 changes: 0 new, 0 newer, 0 older, 1 deleted
|
||||
@ -95,6 +99,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(24) :[0m [34mtest 8. run sync with --check-access. should pass.[0m
|
||||
[36m(25) :[0m [34mbisync check-access[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : Checking access health
|
||||
|
@ -18,6 +18,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(07) :[0m [34mbisync check-access filters-file={workdir/}exclude-other-filtersfile.txt[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Using filters file {workdir/}exclude-other-filtersfile.txt
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : Checking access health
|
||||
@ -38,6 +39,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(15) :[0m [34mbisync check-access filters-file={workdir/}exclude-other-filtersfile.txt[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Using filters file {workdir/}exclude-other-filtersfile.txt
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : Checking access health
|
||||
@ -56,6 +58,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(21) :[0m [34mbisync check-access filters-file={workdir/}exclude-other-filtersfile.txt[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Using filters file {workdir/}exclude-other-filtersfile.txt
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile was deleted[0m - [36msubdir/RCLONE_TEST[0m
|
||||
INFO : Path1: 1 changes: 0 new, 0 newer, 0 older, 1 deleted
|
||||
@ -88,6 +91,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(30) :[0m [34mbisync check-access filters-file={workdir/}include-other-filtersfile.txt[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Using filters file {workdir/}include-other-filtersfile.txt
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : Checking access health
|
||||
@ -107,6 +111,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(37) :[0m [34mbisync check-access filters-file={workdir/}include-other-filtersfile.txt[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Using filters file {workdir/}include-other-filtersfile.txt
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : Checking access health
|
||||
@ -126,6 +131,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(44) :[0m [34mbisync check-access filters-file={workdir/}include-other-filtersfile.txt[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Using filters file {workdir/}include-other-filtersfile.txt
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile was deleted[0m - [36msubdir/RCLONE_TEST[0m
|
||||
INFO : - [34mPath1[0m [35mFile was deleted[0m - [36msubdirX/subdirX1/RCLONE_TEST[0m
|
||||
@ -136,8 +142,8 @@ INFO : Path2: 1 changes: 0 new, 0 newer, 0 older, 1 deleted
|
||||
INFO : Checking access health
|
||||
ERROR : Access test failed: Path1 count 3, Path2 count 4 - RCLONE_TEST
|
||||
ERROR : - [34m[0m [35mAccess test failed: Path1 file not found in Path2[0m - [36mRCLONE_TEST[0m
|
||||
ERROR : - [34m[0m [35mAccess test failed: Path2 file not found in Path1[0m - [36msubdirX/subdirX1/RCLONE_TEST[0m
|
||||
ERROR : - [34m[0m [35mAccess test failed: Path2 file not found in Path1[0m - [36msubdir/RCLONE_TEST[0m
|
||||
ERROR : - [34m[0m [35mAccess test failed: Path2 file not found in Path1[0m - [36msubdirX/subdirX1/RCLONE_TEST[0m
|
||||
ERROR : [31mBisync critical error: check file check failed[0m
|
||||
ERROR : [31mBisync aborted. Must run --resync to recover.[0m
|
||||
Bisync error: bisync aborted
|
||||
|
@ -12,6 +12,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(04) :[0m [34mtest 1. see that check-access passes with the initial setup[0m
|
||||
[36m(05) :[0m [34mbisync check-access check-filename=.chk_file[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : Checking access health
|
||||
@ -26,6 +27,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(08) :[0m [34mdelete-file {path2/}subdir/.chk_file[0m
|
||||
[36m(09) :[0m [34mbisync check-access check-filename=.chk_file[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : - [34mPath2[0m [35mFile was deleted[0m - [36msubdir/.chk_file[0m
|
||||
@ -52,6 +54,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(14) :[0m [34mtest 4. run sync with check-access. should pass.[0m
|
||||
[36m(15) :[0m [34mbisync check-access check-filename=.chk_file[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : Checking access health
|
||||
|
@ -51,6 +51,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(20) :[0m [34mtest 7. run normal sync with check-sync enabled (default)[0m
|
||||
[36m(21) :[0m [34mbisync[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : No changes found
|
||||
@ -61,6 +62,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(22) :[0m [34mtest 8. run normal sync with no-check-sync[0m
|
||||
[36m(23) :[0m [34mbisync no-check-sync[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : No changes found
|
||||
|
@ -24,6 +24,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(15) :[0m [34mtest 2. Run bisync without --create-empty-src-dirs[0m
|
||||
[36m(16) :[0m [34mbisync[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : No changes found
|
||||
@ -39,6 +40,7 @@ subdir/
|
||||
[36m(20) :[0m [34mtest 4.Run bisync WITH --create-empty-src-dirs[0m
|
||||
[36m(21) :[0m [34mbisync create-empty-src-dirs[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile is new[0m - [36msubdir[0m
|
||||
INFO : Path1: 1 changes: 1 new, 0 newer, 0 older, 0 deleted
|
||||
@ -68,6 +70,7 @@ subdir/
|
||||
[36m(33) :[0m [34mtest 7. Run bisync without --create-empty-src-dirs[0m
|
||||
[36m(34) :[0m [34mbisync[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile was deleted[0m - [36mRCLONE_TEST[0m
|
||||
INFO : - [34mPath1[0m [35mFile was deleted[0m - [36msubdir[0m
|
||||
@ -115,6 +118,7 @@ subdir/
|
||||
|
||||
[36m(51) :[0m [34mbisync create-empty-src-dirs[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile was deleted[0m - [36msubdir[0m
|
||||
INFO : Path1: 1 changes: 0 new, 0 newer, 0 older, 1 deleted
|
||||
@ -134,6 +138,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(55) :[0m [34mtest 11. bisync again (because if we leave subdir in listings, test will fail due to mismatched modtime)[0m
|
||||
[36m(56) :[0m [34mbisync create-empty-src-dirs[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : No changes found
|
||||
|
@ -66,6 +66,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(30) :[0m [34mtest sync with dry-run[0m
|
||||
[36m(31) :[0m [34mbisync dry-run[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile is newer[0m - [36mfile2.txt[0m
|
||||
INFO : - [34mPath1[0m [35mFile was deleted[0m - [36mfile4.txt[0m
|
||||
@ -120,6 +121,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(33) :[0m [34mtest sync without dry-run[0m
|
||||
[36m(34) :[0m [34mbisync[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile is newer[0m - [36mfile2.txt[0m
|
||||
INFO : - [34mPath1[0m [35mFile was deleted[0m - [36mfile4.txt[0m
|
||||
|
@ -23,6 +23,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(13) :[0m [34mtest bisync run[0m
|
||||
[36m(14) :[0m [34mbisync[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile is newer[0m - [36mfile1.txt[0m
|
||||
INFO : - [34mPath1[0m [35mFile is newer[0m - [36mfile2.txt[0m
|
||||
|
@ -19,6 +19,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(09) :[0m [34mtest normal sync of subdirs with extended chars[0m
|
||||
[36m(10) :[0m [34mbisync subdir=測試_Русский_{spc}_{spc}_ě_áñ[0m
|
||||
INFO : Synching Path1 "{path1/}測試_Русский_ _ _ě_áñ/" with Path2 "{path2/}測試_Русский_ _ _ě_áñ/"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile is new[0m - [36m測試_file1p1[0m
|
||||
INFO : Path1: 1 changes: 1 new, 0 newer, 0 older, 0 deleted
|
||||
@ -45,6 +46,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(14) :[0m [34mdelete-file {path1/}測試_Русский_{spc}_{spc}_ě_áñ/測試_check{spc}file[0m
|
||||
[36m(15) :[0m [34mbisync check-access check-filename=測試_check{spc}file[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile was deleted[0m - [36m測試_Русский_ _ _ě_áñ/測試_check file[0m
|
||||
INFO : Path1: 1 changes: 0 new, 0 newer, 0 older, 1 deleted
|
||||
@ -68,6 +70,7 @@ INFO : Resync updating listings
|
||||
INFO : [32mBisync successful[0m
|
||||
[36m(19) :[0m [34mbisync check-access check-filename=測試_check{spc}file[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : Checking access health
|
||||
@ -92,6 +95,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(25) :[0m [34mbisync filters-file={workdir/}測試_filtersfile.txt[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Using filters file {workdir/}測試_filtersfile.txt
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : No changes found
|
||||
|
@ -38,6 +38,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(26) :[0m [34mtest bisync run[0m
|
||||
[36m(27) :[0m [34mbisync[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile was deleted[0m - [36mРусский.txt[0m
|
||||
INFO : - [34mPath1[0m [35mFile is new[0m - [36mfile1_with white space.txt[0m
|
||||
|
@ -24,6 +24,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(11) :[0m [34mbisync filters-file={workdir/}filtersfile.flt[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Using filters file {workdir/}filtersfile.flt
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile is new[0m - [36msubdir/fileZ.txt[0m
|
||||
INFO : Path1: 1 changes: 1 new, 0 newer, 0 older, 0 deleted
|
||||
|
@ -41,6 +41,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(13) :[0m [34mbisync filters-file={workdir/}filtersfile.txt[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Using filters file {workdir/}filtersfile.txt
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : No changes found
|
||||
|
@ -23,6 +23,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(08) :[0m [34mtest bisync run[0m
|
||||
[36m(09) :[0m [34mbisync ignore-listing-checksum[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile is newer[0m - [36msubdir/file20.txt[0m
|
||||
INFO : Path1: 1 changes: 0 new, 1 newer, 0 older, 0 deleted
|
||||
|
@ -19,6 +19,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(10) :[0m [34mtest sync should fail due to too many local deletes[0m
|
||||
[36m(11) :[0m [34mbisync[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile was deleted[0m - [36mfile1.txt[0m
|
||||
INFO : - [34mPath1[0m [35mFile was deleted[0m - [36mfile2.txt[0m
|
||||
@ -35,6 +36,7 @@ Bisync error: too many deletes
|
||||
[36m(13) :[0m [34mtest change max-delete limit to 60%. sync should run.[0m
|
||||
[36m(14) :[0m [34mbisync max-delete=60[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile was deleted[0m - [36mfile1.txt[0m
|
||||
INFO : - [34mPath1[0m [35mFile was deleted[0m - [36mfile2.txt[0m
|
||||
|
@ -19,6 +19,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(10) :[0m [34mtest sync should fail due to too many path2 deletes[0m
|
||||
[36m(11) :[0m [34mbisync[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : - [34mPath2[0m [35mFile was deleted[0m - [36mfile1.txt[0m
|
||||
@ -35,6 +36,7 @@ Bisync error: too many deletes
|
||||
[36m(13) :[0m [34mtest apply force option. sync should run.[0m
|
||||
[36m(14) :[0m [34mbisync force[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : - [34mPath2[0m [35mFile was deleted[0m - [36mfile1.txt[0m
|
||||
|
@ -23,6 +23,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(10) :[0m [34mtest run bisync with custom options[0m
|
||||
[36m(11) :[0m [34mbisync size-only[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile is newer[0m - [36mfile1.txt[0m
|
||||
INFO : - [34mPath1[0m [35mFile is newer[0m - [36msubdir/file20.txt[0m
|
||||
|
@ -72,6 +72,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(32) :[0m [34mtest run normal bisync[0m
|
||||
[36m(33) :[0m [34mbisync[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : No changes found
|
||||
@ -83,6 +84,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(35) :[0m [34mpurge-children {path2/}[0m
|
||||
[36m(36) :[0m [34mbisync[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
ERROR : Empty current Path2 listing. Cannot sync to an empty directory: {workdir/}{session}.path2.lst-new
|
||||
|
@ -15,6 +15,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(06) :[0m [34mtest 2. run bisync without remove-empty-dirs[0m
|
||||
[36m(07) :[0m [34mbisync[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile was deleted[0m - [36msubdir/file20.txt[0m
|
||||
INFO : Path1: 1 changes: 0 new, 0 newer, 0 older, 1 deleted
|
||||
@ -35,6 +36,7 @@ subdir/
|
||||
[36m(11) :[0m [34mtest 4. run bisync with remove-empty-dirs[0m
|
||||
[36m(12) :[0m [34mbisync remove-empty-dirs[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : No changes found
|
||||
|
@ -18,6 +18,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(10) :[0m [34mtest-func[0m
|
||||
[36m(11) :[0m [34mbisync[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile is newer[0m - [36mfile5.txt[0m
|
||||
INFO : Path1: 1 changes: 0 new, 1 newer, 0 older, 0 deleted
|
||||
@ -51,6 +52,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(18) :[0m [34mtest-func[0m
|
||||
[36m(19) :[0m [34mbisync[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile is new[0m - [36mfile100.txt[0m
|
||||
INFO : - [34mPath1[0m [35mFile is new[0m - [36mfile5.txt[0m
|
||||
@ -268,6 +270,7 @@ INFO : [32mBisync successful[0m
|
||||
[36m(26) :[0m [34mtest-func[0m
|
||||
[36m(27) :[0m [34mbisync[0m
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [34mPath1[0m [35mFile is new[0m - [36mfile5.txt[0m
|
||||
INFO : Path1: 1 changes: 1 new, 0 newer, 0 older, 0 deleted
|
||||
|
@ -1275,6 +1275,9 @@ about _Unison_ and synchronization in general.
|
||||
* Final listings are now generated from sync results, to avoid needing to re-list
|
||||
* Bisync is now much more resilient to changes that happen during a bisync run, and far less prone to critical errors / undetected changes
|
||||
* Bisync is now capable of rolling a file listing back in cases of uncertainty, essentially marking the file as needing to be rechecked next time.
|
||||
* A few basic terminal colors are now supported, controllable with [`--color`](/docs/#color-when) (`AUTO`|`NEVER`|`ALWAYS`)
|
||||
* Initial listing snapshots of Path1 and Path2 are now generated concurrently, using the same "march" infrastructure as `check` and `sync`,
|
||||
for performance improvements and less [risk of error](https://forum.rclone.org/t/bisync-bugs-and-feature-requests/37636#:~:text=4.%20Listings%20should%20alternate%20between%20paths%20to%20minimize%20errors).
|
||||
|
||||
### `v1.64`
|
||||
* Fixed an [issue](https://forum.rclone.org/t/bisync-bugs-and-feature-requests/37636#:~:text=1.%20Dry%20runs%20are%20not%20completely%20dry)
|
||||
|
Loading…
Reference in New Issue
Block a user