mirror of
https://github.com/rclone/rclone.git
synced 2025-01-21 13:49:13 +01:00
press: new name format
This commit is contained in:
parent
2d77521cd3
commit
c96c4cfc5e
@ -6,12 +6,14 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"crypto/md5"
|
"crypto/md5"
|
||||||
|
"encoding/base64"
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -38,7 +40,8 @@ const (
|
|||||||
heuristicBytes = 1048576
|
heuristicBytes = 1048576
|
||||||
minCompressionRatio = 1.1
|
minCompressionRatio = 1.1
|
||||||
|
|
||||||
metaFileExt = ".meta"
|
gzFileExt = ".gz"
|
||||||
|
metaFileExt = ".json"
|
||||||
uncompressedFileExt = ".bin"
|
uncompressedFileExt = ".bin"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -48,6 +51,8 @@ const (
|
|||||||
Gzip = 2
|
Gzip = 2
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var nameRegexp = regexp.MustCompile("^(.+?)\\.([A-Za-z0-9+_]{11})$")
|
||||||
|
|
||||||
// Register with Fs
|
// Register with Fs
|
||||||
func init() {
|
func init() {
|
||||||
// Build compression mode options.
|
// Build compression mode options.
|
||||||
@ -182,16 +187,16 @@ func compressionModeFromName(name string) int {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Converts an int64 to hex
|
// Converts an int64 to base64
|
||||||
func int64ToHex(number int64) string {
|
func int64ToBase64(number int64) string {
|
||||||
intBytes := make([]byte, 8)
|
intBytes := make([]byte, 8)
|
||||||
binary.LittleEndian.PutUint64(intBytes, uint64(number))
|
binary.LittleEndian.PutUint64(intBytes, uint64(number))
|
||||||
return hex.EncodeToString(intBytes)
|
return base64.RawURLEncoding.EncodeToString(intBytes)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Converts hex to int64
|
// Converts base64 to int64
|
||||||
func hexToInt64(hexNumber string) (int64, error) {
|
func base64ToInt64(str string) (int64, error) {
|
||||||
intBytes, err := hex.DecodeString(hexNumber)
|
intBytes, err := base64.RawStdEncoding.DecodeString(str)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
@ -206,24 +211,20 @@ func processFileName(compressedFileName string) (origFileName string, extension
|
|||||||
if extensionPos == -1 {
|
if extensionPos == -1 {
|
||||||
return "", "", 0, errors.New("File name has no extension")
|
return "", "", 0, errors.New("File name has no extension")
|
||||||
}
|
}
|
||||||
nameWithSize := compressedFileName[:extensionPos]
|
|
||||||
extension = compressedFileName[extensionPos:]
|
extension = compressedFileName[extensionPos:]
|
||||||
// Separate the name with the size if this is a compressed file. Otherwise, just return nameWithSize (because it has no size appended)
|
nameWithSize := compressedFileName[:extensionPos]
|
||||||
var name string
|
|
||||||
var size int64
|
|
||||||
if extension == uncompressedFileExt {
|
if extension == uncompressedFileExt {
|
||||||
name = nameWithSize
|
return nameWithSize, extension, -2, nil
|
||||||
size = -2
|
|
||||||
} else {
|
|
||||||
sizeLoc := len(nameWithSize) - 16
|
|
||||||
name = nameWithSize[:sizeLoc]
|
|
||||||
size, err = hexToInt64(nameWithSize[sizeLoc:])
|
|
||||||
if err != nil {
|
|
||||||
return "", "", 0, errors.New("Could not decode size")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
// Return everything
|
match := nameRegexp.FindStringSubmatch(nameWithSize)
|
||||||
return name, extension, size, nil
|
if match == nil || len(match) != 3 {
|
||||||
|
return "", "", 0, errors.New("Invalid filename")
|
||||||
|
}
|
||||||
|
size, err := base64ToInt64(match[2])
|
||||||
|
if err != nil {
|
||||||
|
return "", "", 0, errors.New("Could not decode size")
|
||||||
|
}
|
||||||
|
return match[1], gzFileExt, size, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generates the file name for a metadata file
|
// Generates the file name for a metadata file
|
||||||
@ -236,16 +237,17 @@ func isMetadataFile(filename string) bool {
|
|||||||
return strings.HasSuffix(filename, metaFileExt)
|
return strings.HasSuffix(filename, metaFileExt)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generates the file name for a data file
|
// makeDataName generates the file name for a data file with specified compression mode
|
||||||
func makeDataName(remote string, size int64, mode int) (newRemote string) {
|
func makeDataName(remote string, size int64, mode int) (newRemote string) {
|
||||||
if mode > 0 {
|
if mode > 0 {
|
||||||
newRemote = remote + int64ToHex(size) + ".gz"
|
newRemote = remote + "." + int64ToBase64(size) + gzFileExt
|
||||||
} else {
|
} else {
|
||||||
newRemote = remote + uncompressedFileExt
|
newRemote = remote + uncompressedFileExt
|
||||||
}
|
}
|
||||||
return newRemote
|
return newRemote
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// dataName generates the file name for data file
|
||||||
func (f *Fs) dataName(remote string, size int64, compressed bool) (name string) {
|
func (f *Fs) dataName(remote string, size int64, compressed bool) (name string) {
|
||||||
if !compressed {
|
if !compressed {
|
||||||
return makeDataName(remote, size, Uncompressed)
|
return makeDataName(remote, size, Uncompressed)
|
||||||
@ -253,7 +255,7 @@ func (f *Fs) dataName(remote string, size int64, compressed bool) (name string)
|
|||||||
return makeDataName(remote, size, f.mode)
|
return makeDataName(remote, size, f.mode)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get an Object from a data DirEntry
|
// addData parses an object and adds it to the DirEntries
|
||||||
func (f *Fs) addData(entries *fs.DirEntries, o fs.Object) {
|
func (f *Fs) addData(entries *fs.DirEntries, o fs.Object) {
|
||||||
origFileName, _, size, err := processFileName(o.Remote())
|
origFileName, _, size, err := processFileName(o.Remote())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -267,7 +269,7 @@ func (f *Fs) addData(entries *fs.DirEntries, o fs.Object) {
|
|||||||
*entries = append(*entries, f.newObjectSizeAndNameOnly(o, metaName, size))
|
*entries = append(*entries, f.newObjectSizeAndNameOnly(o, metaName, size))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Directory names are unchanged. Just append.
|
// addDir adds a dir to the dir entries
|
||||||
func (f *Fs) addDir(entries *fs.DirEntries, dir fs.Directory) {
|
func (f *Fs) addDir(entries *fs.DirEntries, dir fs.Directory) {
|
||||||
*entries = append(*entries, f.newDir(dir))
|
*entries = append(*entries, f.newDir(dir))
|
||||||
}
|
}
|
||||||
@ -277,7 +279,7 @@ func (f *Fs) newDir(dir fs.Directory) fs.Directory {
|
|||||||
return dir // We're using the same dir
|
return dir // We're using the same dir
|
||||||
}
|
}
|
||||||
|
|
||||||
// Processes file entries by removing compression data.
|
// processEntries parses the file names and adds metadata to the dir entries
|
||||||
func (f *Fs) processEntries(entries fs.DirEntries) (newEntries fs.DirEntries, err error) {
|
func (f *Fs) processEntries(entries fs.DirEntries) (newEntries fs.DirEntries, err error) {
|
||||||
newEntries = entries[:0] // in place filter
|
newEntries = entries[:0] // in place filter
|
||||||
for _, entry := range entries {
|
for _, entry := range entries {
|
||||||
@ -355,7 +357,7 @@ func (f *Fs) NewObject(ctx context.Context, remote string) (fs.Object, error) {
|
|||||||
return f.newObject(o, mo, meta), err
|
return f.newObject(o, mo, meta), err
|
||||||
}
|
}
|
||||||
|
|
||||||
// checkCompressAndType attempts to find the mime type of the object so we can determine compressibility
|
// checkCompressAndType checks if an object is compressible and determines it's mime type
|
||||||
// returns a multireader with the bytes that were read to determine mime type
|
// returns a multireader with the bytes that were read to determine mime type
|
||||||
func checkCompressAndType(in io.Reader) (newReader io.Reader, compressible bool, mimeType string, err error) {
|
func checkCompressAndType(in io.Reader) (newReader io.Reader, compressible bool, mimeType string, err error) {
|
||||||
in, wrap := accounting.UnWrap(in)
|
in, wrap := accounting.UnWrap(in)
|
||||||
@ -390,7 +392,7 @@ func isCompressible(r io.Reader) (bool, error) {
|
|||||||
return ratio > minCompressionRatio, nil
|
return ratio > minCompressionRatio, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verifies an object hash
|
// verifyObjectHash verifies the Objects hash
|
||||||
func (f *Fs) verifyObjectHash(ctx context.Context, o fs.Object, hasher *hash.MultiHasher, ht hash.Type) error {
|
func (f *Fs) verifyObjectHash(ctx context.Context, o fs.Object, hasher *hash.MultiHasher, ht hash.Type) error {
|
||||||
srcHash := hasher.Sums()[ht]
|
srcHash := hasher.Sums()[ht]
|
||||||
dstHash, err := o.Hash(ctx, ht)
|
dstHash, err := o.Hash(ctx, ht)
|
||||||
|
@ -78,9 +78,8 @@ through any other means than rclone. This will upload files that do not contain
|
|||||||
|
|
||||||
### File names
|
### File names
|
||||||
|
|
||||||
The compressed files will be named `*################.gz` where `*` is the base file and the `#` part is the size
|
The compressed files will be named `*.###########.gz` where `*` is the base file and the `#` part is base64 encoded
|
||||||
of the uncompressed data written as a hexadecimal number. The file names should not be changed by anything other than
|
size of the uncompressed file. The file names should not be changed by anything other than the rclone compression backend.
|
||||||
the rclone compression backend.
|
|
||||||
|
|
||||||
#### Experimental
|
#### Experimental
|
||||||
This remote is currently **experimental**. Things may break and data may be lost. Anything you do with this remote is
|
This remote is currently **experimental**. Things may break and data may be lost. Anything you do with this remote is
|
||||||
|
Loading…
Reference in New Issue
Block a user