Stop single file and --files-from operations iterating through the source bucket.

This works by making sure directory listings that use a filter only
iterate the files provided in the filter (if any).

Single file copies now don't iterate the source or destination
buckets.

Note that this could potentially slow down very long `--files-from`
lists - this is easy to fix (with another flag probably) if it causes
anyone a problem.

Fixes #610
Fixes #769
This commit is contained in:
Nick Craig-Wood
2016-10-07 11:39:39 +01:00
parent ec7cef98d8
commit d033e92234
5 changed files with 97 additions and 16 deletions

View File

@@ -114,6 +114,10 @@ type ListFser interface {
// Fses must support recursion levels of fs.MaxLevel and 1.
// They may return ErrorLevelNotSupported otherwise.
List(out ListOpts, dir string)
// NewObject finds the Object at remote. If it can't be found
// it returns the error ErrorObjectNotFound.
NewObject(remote string) (Object, error)
}
// Fs is the interface a cloud storage system must provide
@@ -121,10 +125,6 @@ type Fs interface {
Info
ListFser
// NewObject finds the Object at remote. If it can't be found
// it returns the error ErrorObjectNotFound.
NewObject(remote string) (Object, error)
// Put in to the remote path with the modTime given of the given size
//
// May create the object even if it returns an error - if so