1
mirror of https://github.com/rclone/rclone synced 2024-12-23 14:23:44 +01:00
rclone/fs/chunkedreader/chunkedreader.go
Nick Craig-Wood 27b281ef69 chunkedreader: add --vfs-read-chunk-streams to parallel read chunks
This converts the ChunkedReader into an interface and provides two
implementations one sequential and one parallel.

This can be used to improve the performance of the VFS on high
bandwidth or high latency links.

Fixes #4760
2024-08-14 21:13:09 +01:00

48 lines
1.3 KiB
Go

// Package chunkedreader provides functionality for reading a stream in chunks.
package chunkedreader
import (
"context"
"errors"
"io"
"github.com/rclone/rclone/fs"
)
// io related errors returned by ChunkedReader
var (
ErrorFileClosed = errors.New("file already closed")
ErrorInvalidSeek = errors.New("invalid seek position")
)
// ChunkedReader describes what a chunked reader can do.
type ChunkedReader interface {
io.Reader
io.Seeker
io.Closer
fs.RangeSeeker
Open() (ChunkedReader, error)
}
// New returns a ChunkedReader for the Object.
//
// An initialChunkSize of <= 0 will disable chunked reading.
// If maxChunkSize is greater than initialChunkSize, the chunk size will be
// doubled after each chunk read with a maximum of maxChunkSize.
// A Seek or RangeSeek will reset the chunk size to it's initial value
func New(ctx context.Context, o fs.Object, initialChunkSize int64, maxChunkSize int64, streams int) ChunkedReader {
if initialChunkSize <= 0 {
initialChunkSize = -1
}
if maxChunkSize != -1 && maxChunkSize < initialChunkSize {
maxChunkSize = initialChunkSize
}
if streams < 0 {
streams = 0
}
if streams <= 1 || o.Size() < 0 {
return newSequential(ctx, o, initialChunkSize, maxChunkSize)
}
return newParallel(ctx, o, initialChunkSize, streams)
}