Adjust S3 upload concurrency with --s3-upload-concurrency

This commit is contained in:
themylogin 2018-06-13 13:02:56 +03:00 committed by Nick Craig-Wood
parent aaa8591661
commit 7d2861ead6
2 changed files with 15 additions and 5 deletions

View File

@ -537,10 +537,11 @@ const (
// Globals
var (
// Flags
s3ACL = flags.StringP("s3-acl", "", "", "Canned ACL used when creating buckets and/or storing objects in S3")
s3StorageClass = flags.StringP("s3-storage-class", "", "", "Storage class to use when uploading S3 objects (STANDARD|REDUCED_REDUNDANCY|STANDARD_IA|ONEZONE_IA)")
s3ChunkSize = fs.SizeSuffix(s3manager.MinUploadPartSize)
s3DisableChecksum = flags.BoolP("s3-disable-checksum", "", false, "Don't store MD5 checksum with object metadata")
s3ACL = flags.StringP("s3-acl", "", "", "Canned ACL used when creating buckets and/or storing objects in S3")
s3StorageClass = flags.StringP("s3-storage-class", "", "", "Storage class to use when uploading S3 objects (STANDARD|REDUCED_REDUNDANCY|STANDARD_IA|ONEZONE_IA)")
s3ChunkSize = fs.SizeSuffix(s3manager.MinUploadPartSize)
s3DisableChecksum = flags.BoolP("s3-disable-checksum", "", false, "Don't store MD5 checksum with object metadata")
s3UploadConcurrency = flags.IntP("s3-upload-concurrency", "", 2, "Concurrency for multipart uploads")
)
// Fs represents a remote s3 server
@ -1352,7 +1353,7 @@ func (o *Object) Update(in io.Reader, src fs.ObjectInfo, options ...fs.OpenOptio
size := src.Size()
uploader := s3manager.NewUploader(o.fs.ses, func(u *s3manager.Uploader) {
u.Concurrency = 2
u.Concurrency = *s3UploadConcurrency
u.LeavePartsOnError = false
u.S3 = o.fs.c
u.PartSize = int64(s3ChunkSize)

View File

@ -402,6 +402,15 @@ Note that 2 chunks of this size are buffered in memory per transfer.
If you are transferring large files over high speed links and you have
enough memory, then increasing this will speed up the transfers.
#### --s3-upload-concurrency ####
Number of chunks of the same file that are uploaded concurrently.
Default is 2.
If you are uploading small amount of large file over high speed link
and these uploads do not fully utilize your bandwidth, then increasing
this may help to speed up the transfers.
### Anonymous access to public buckets ###
If you want to use rclone to access a public bucket, configure with a