1
mirror of https://github.com/rclone/rclone synced 2024-07-27 16:38:21 +02:00

local: only calculate the required hashes for big speedup

Before this change we calculated all possible hashes for the file when
the `Hashes` method was called.

After we only calculate the Hash requested.

Almost all uses of `Hash` just need one checksum.  This will slow down
`rclone lsjson` with the `--hash` flag.  Perhaps lsjson should have a
`--hash-type` flag.

However it will speed up sync/copy/move/check/md5sum/sha1sum etc.

Before it took 12.4 seconds to md5sum a 1GB file, after it takes 3.1
seconds which is the same time the md5sum utility takes.
This commit is contained in:
Nick Craig-Wood 2019-05-11 22:21:37 +01:00
parent 822483aac5
commit c3af0a1eca

View File

@ -709,9 +709,10 @@ func (o *Object) Hash(r hash.Type) (string, error) {
o.fs.objectHashesMu.Lock()
hashes := o.hashes
hashValue, hashFound := o.hashes[r]
o.fs.objectHashesMu.Unlock()
if !o.modTime.Equal(oldtime) || oldsize != o.size || hashes == nil {
if !o.modTime.Equal(oldtime) || oldsize != o.size || hashes == nil || !hashFound {
var in io.ReadCloser
if !o.translatedLink {
@ -722,7 +723,7 @@ func (o *Object) Hash(r hash.Type) (string, error) {
if err != nil {
return "", errors.Wrap(err, "hash: failed to open")
}
hashes, err = hash.Stream(in)
hashes, err = hash.StreamTypes(in, hash.NewHashSet(r))
closeErr := in.Close()
if err != nil {
return "", errors.Wrap(err, "hash: failed to read")
@ -730,11 +731,16 @@ func (o *Object) Hash(r hash.Type) (string, error) {
if closeErr != nil {
return "", errors.Wrap(closeErr, "hash: failed to close")
}
hashValue = hashes[r]
o.fs.objectHashesMu.Lock()
o.hashes = hashes
if o.hashes == nil {
o.hashes = hashes
} else {
o.hashes[r] = hashValue
}
o.fs.objectHashesMu.Unlock()
}
return hashes[r], nil
return hashValue, nil
}
// Size returns the size of an object in bytes