1
0
mirror of https://github.com/rclone/rclone.git synced 2025-12-06 00:03:32 +00:00

Compare commits

...

14 Commits

Author SHA1 Message Date
Nick Craig-Wood
8c8a38711b Replace sync with lib/sync for deadlock detection with deadlock build tag 2021-11-22 18:15:07 +00:00
Nick Craig-Wood
386deb3633 lib/sync: a wrapper for inserting go-deadlock into the rclone build 2021-11-22 18:08:12 +00:00
Nick Craig-Wood
a351484997 sftp: fix timeout on hashing large files by sending keepalives
Before this fix the SFTP sessions could timeout when doing hashes if
they took longer than the --timeout parameter.

This patch sends keepalive packets every minute while a shell command
is running to keep the connection open.

See: https://forum.rclone.org/t/rclone-check-over-sftp-failure-to-calculate-md5-hash-for-large-files/27487
2021-11-22 15:26:29 +00:00
Nick Craig-Wood
099eff8891 sftp: refactor so we only have one way of running remote commands
This also returns errors from running ssh Hash commands which we
didn't do before.
2021-11-22 15:26:29 +00:00
albertony
c4cb167d4a Add rsapkf and Will Holtz to contributors 2021-11-21 19:26:05 +01:00
Will Holtz
38e100ab19 docs/config: more explicit doc for config create --all with params 2021-11-21 19:22:19 +01:00
rsapkf
db95a0d6c3 docs/pcloud: fix typo 2021-11-21 19:16:19 +01:00
Nick Craig-Wood
df07964db3 azureblob: raise --azureblob-upload-concurrency to 16 by default
After speed testing it was discovered that upload speed goes up pretty
much linearly with upload concurrency. This patch changes the default
from 4 to 16 which means that rclone will use 16 * 4M = 64M per
transfer which is OK even for low memory devices.

This adds a note that performance may be increased by increasing
upload concurrency.

See: https://forum.rclone.org/t/performance-of-rclone-vs-azcopy/27437/9
2021-11-18 16:09:02 +00:00
Nick Craig-Wood
fbc4c4ad9a azureblob: remove 100MB upper limit on chunk_size as it is no longer needed 2021-11-18 16:09:02 +00:00
Nick Craig-Wood
4454b3e1ae azureblob: implement --azureblob-upload-concurrency parameter to speed uploads
See: https://forum.rclone.org/t/performance-of-rclone-vs-azcopy/27437
2021-11-18 16:08:57 +00:00
Nick Craig-Wood
f9321fccbb Add deinferno to contributors 2021-11-18 15:51:45 +00:00
Ole Frost
3c2252b7c0 fs/operations: add server-side moves to stats
Fixes #5430
2021-11-18 12:20:56 +00:00
Cnly
51c952654c fstests: treat accountUpgradeRequired as success for OneDrive PublicLink 2021-11-17 17:35:17 +00:00
deinferno
80e47be65f yandex: add permanent deletion support 2021-11-17 16:57:41 +00:00
135 changed files with 395 additions and 177 deletions

View File

@@ -19,7 +19,7 @@ import (
"path"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/Azure/azure-pipeline-go/pipeline"
@@ -50,8 +50,6 @@ const (
timeFormatOut = "2006-01-02T15:04:05.000000000Z07:00"
storageDefaultBaseURL = "blob.core.windows.net"
defaultChunkSize = 4 * fs.Mebi
maxChunkSize = 100 * fs.Mebi
uploadConcurrency = 4
defaultAccessTier = azblob.AccessTierNone
maxTryTimeout = time.Hour * 24 * 365 //max time of an azure web request response window (whether or not data is flowing)
// Default storage account, key and blob endpoint for emulator support,
@@ -134,12 +132,33 @@ msi_client_id, or msi_mi_res_id parameters.`,
Advanced: true,
}, {
Name: "chunk_size",
Help: `Upload chunk size (<= 100 MiB).
Help: `Upload chunk size.
Note that this is stored in memory and there may be up to
"--transfers" chunks stored at once in memory.`,
"--transfers" * "--azureblob-upload-concurrency" chunks stored at once
in memory.`,
Default: defaultChunkSize,
Advanced: true,
}, {
Name: "upload_concurrency",
Help: `Concurrency for multipart uploads.
This is the number of chunks of the same file that are uploaded
concurrently.
If you are uploading small numbers of large files over high-speed
links and these uploads do not fully utilize your bandwidth, then
increasing this may help to speed up the transfers.
In tests, upload speed increases almost linearly with upload
concurrency. For example to fill a gigabit pipe it may be necessary to
raise this to 64. Note that this will use more memory.
Note that chunks are stored in memory and there may be up to
"--transfers" * "--azureblob-upload-concurrency" chunks stored at once
in memory.`,
Default: 16,
Advanced: true,
}, {
Name: "list_chunk",
Help: `Size of blob list.
@@ -257,6 +276,7 @@ type Options struct {
Endpoint string `config:"endpoint"`
SASURL string `config:"sas_url"`
ChunkSize fs.SizeSuffix `config:"chunk_size"`
UploadConcurrency int `config:"upload_concurrency"`
ListChunkSize uint `config:"list_chunk"`
AccessTier string `config:"access_tier"`
ArchiveTierDelete bool `config:"archive_tier_delete"`
@@ -416,9 +436,6 @@ func checkUploadChunkSize(cs fs.SizeSuffix) error {
if cs < minChunkSize {
return fmt.Errorf("%s is less than %s", cs, minChunkSize)
}
if cs > maxChunkSize {
return fmt.Errorf("%s is greater than %s", cs, maxChunkSize)
}
return nil
}
@@ -1667,10 +1684,10 @@ func (o *Object) Update(ctx context.Context, in io.Reader, src fs.ObjectInfo, op
putBlobOptions := azblob.UploadStreamToBlockBlobOptions{
BufferSize: int(o.fs.opt.ChunkSize),
MaxBuffers: uploadConcurrency,
MaxBuffers: o.fs.opt.UploadConcurrency,
Metadata: o.meta,
BlobHTTPHeaders: httpHeaders,
TransferManager: o.fs.newPoolWrapper(uploadConcurrency),
TransferManager: o.fs.newPoolWrapper(o.fs.opt.UploadConcurrency),
}
// Don't retry, return a retry error instead

View File

@@ -17,12 +17,10 @@ import (
// TestIntegration runs integration tests against the remote
func TestIntegration(t *testing.T) {
fstests.Run(t, &fstests.Opt{
RemoteName: "TestAzureBlob:",
NilObject: (*Object)(nil),
TiersToTest: []string{"Hot", "Cool"},
ChunkedUpload: fstests.ChunkedUploadConfig{
MaxChunkSize: maxChunkSize,
},
RemoteName: "TestAzureBlob:",
NilObject: (*Object)(nil),
TiersToTest: []string{"Hot", "Cool"},
ChunkedUpload: fstests.ChunkedUploadConfig{},
})
}

View File

@@ -17,7 +17,7 @@ import (
"path"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/backend/b2/api"

View File

@@ -13,7 +13,7 @@ import (
gohash "hash"
"io"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"github.com/rclone/rclone/backend/b2/api"
"github.com/rclone/rclone/fs"

View File

@@ -23,7 +23,7 @@ import (
"path"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"sync/atomic"
"time"

View File

@@ -13,7 +13,7 @@ import (
"io"
"net/http"
"strconv"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/backend/box/api"

View File

@@ -16,7 +16,7 @@ import (
"sort"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"syscall"
"time"

View File

@@ -11,7 +11,7 @@ import (
"path"
"runtime"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -8,7 +8,7 @@ import (
"fmt"
"io"
"path"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -12,7 +12,7 @@ import (
"net/http"
"net/url"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
cache "github.com/patrickmn/go-cache"

View File

@@ -14,7 +14,7 @@ import (
"path"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -19,7 +19,7 @@ import (
"sort"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -13,7 +13,7 @@ import (
"io"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"unicode/utf8"

View File

@@ -21,7 +21,7 @@ import (
"sort"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"sync/atomic"
"text/template"
"time"

View File

@@ -10,7 +10,7 @@ import (
"context"
"errors"
"fmt"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/dropbox/dropbox-sdk-go-unofficial/v6/dropbox/async"

View File

@@ -25,7 +25,7 @@ import (
"net/url"
"path"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"sync/atomic"
"time"

View File

@@ -12,7 +12,7 @@ import (
"path"
"runtime"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/jlaffaye/ftp"

View File

@@ -5,7 +5,7 @@ package googlephotos
import (
"path"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"github.com/rclone/rclone/backend/googlephotos/api"
)

View File

@@ -15,7 +15,7 @@ import (
"regexp"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/backend/googlephotos/api"

View File

@@ -9,7 +9,7 @@ import (
"io"
"path"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -15,7 +15,7 @@ import (
"path"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -13,7 +13,7 @@ import (
"path/filepath"
"runtime"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"unicode/utf8"

View File

@@ -3,7 +3,7 @@ package local
import (
"io/ioutil"
"os"
"sync"
"github.com/rclone/rclone/lib/sync"
"testing"
"time"

View File

@@ -12,7 +12,7 @@ import (
"sort"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"encoding/hex"

View File

@@ -22,7 +22,7 @@ import (
"io"
"path"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -11,7 +11,7 @@ import (
"io/ioutil"
"path"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -16,7 +16,7 @@ import (
"regexp"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/backend/onedrive/api"

View File

@@ -13,7 +13,7 @@ import (
"hash"
"io"
"sort"
"sync"
"github.com/rclone/rclone/lib/sync"
"github.com/rclone/rclone/fs"
"github.com/rclone/rclone/lib/atexit"

View File

@@ -19,7 +19,7 @@ import (
"sort"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/aws/aws-sdk-go/aws"

View File

@@ -4,7 +4,7 @@ import (
"context"
"fmt"
"net/url"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -10,7 +10,7 @@ import (
"path"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/coreos/go-semver/semver"

View File

@@ -17,7 +17,7 @@ import (
"regexp"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"sync/atomic"
"time"
@@ -42,7 +42,8 @@ const (
hashCommandNotSupported = "none"
minSleep = 100 * time.Millisecond
maxSleep = 2 * time.Second
decayConstant = 2 // bigger for slower decay, exponential
decayConstant = 2 // bigger for slower decay, exponential
keepAliveInterval = time.Minute // send keepalives every this long while running commands
)
var (
@@ -339,6 +340,32 @@ func (c *conn) wait() {
c.err <- c.sshClient.Conn.Wait()
}
// Send a keepalive over the ssh connection
func (c *conn) sendKeepAlive() {
_, _, err := c.sshClient.SendRequest("keepalive@openssh.com", true, nil)
if err != nil {
fs.Debugf(nil, "Failed to send keep alive: %v", err)
}
}
// Send keepalives every interval over the ssh connection until done is closed
func (c *conn) sendKeepAlives(interval time.Duration) (done chan struct{}) {
done = make(chan struct{})
go func() {
t := time.NewTicker(interval)
defer t.Stop()
for {
select {
case <-t.C:
c.sendKeepAlive()
case <-done:
return
}
}
}()
return done
}
// Closes the connection
func (c *conn) close() error {
sftpErr := c.sftpClient.Close()
@@ -1098,6 +1125,9 @@ func (f *Fs) run(ctx context.Context, cmd string) ([]byte, error) {
}
defer f.putSftpConnection(&c, err)
// Send keepalives while the connection is open
defer close(c.sendKeepAlives(keepAliveInterval))
session, err := c.sshClient.NewSession()
if err != nil {
return nil, fmt.Errorf("run: get SFTP session: %w", err)
@@ -1110,10 +1140,12 @@ func (f *Fs) run(ctx context.Context, cmd string) ([]byte, error) {
session.Stdout = &stdout
session.Stderr = &stderr
fs.Debugf(f, "Running remote command: %s", cmd)
err = session.Run(cmd)
if err != nil {
return nil, fmt.Errorf("failed to run %q: %s: %w", cmd, stderr.Bytes(), err)
return nil, fmt.Errorf("failed to run %q: %s: %w", cmd, bytes.TrimSpace(stderr.Bytes()), err)
}
fs.Debugf(f, "Remote command result: %s", bytes.TrimSpace(stdout.Bytes()))
return stdout.Bytes(), nil
}
@@ -1230,8 +1262,6 @@ func (o *Object) Remote() string {
// Hash returns the selected checksum of the file
// If no checksum is available it returns ""
func (o *Object) Hash(ctx context.Context, r hash.Type) (string, error) {
o.fs.addSession() // Show session in use
defer o.fs.removeSession()
if o.fs.opt.DisableHashCheck {
return "", nil
}
@@ -1255,36 +1285,16 @@ func (o *Object) Hash(ctx context.Context, r hash.Type) (string, error) {
return "", hash.ErrUnsupported
}
c, err := o.fs.getSftpConnection(ctx)
if err != nil {
return "", fmt.Errorf("Hash get SFTP connection: %w", err)
}
session, err := c.sshClient.NewSession()
o.fs.putSftpConnection(&c, err)
if err != nil {
return "", fmt.Errorf("Hash put SFTP connection: %w", err)
}
var stdout, stderr bytes.Buffer
session.Stdout = &stdout
session.Stderr = &stderr
escapedPath := shellEscape(o.path())
if o.fs.opt.PathOverride != "" {
escapedPath = shellEscape(path.Join(o.fs.opt.PathOverride, o.remote))
}
err = session.Run(hashCmd + " " + escapedPath)
fs.Debugf(nil, "sftp cmd = %s", escapedPath)
b, err := o.fs.run(ctx, hashCmd+" "+escapedPath)
if err != nil {
_ = session.Close()
fs.Debugf(o, "Failed to calculate %v hash: %v (%s)", r, err, bytes.TrimSpace(stderr.Bytes()))
return "", nil
return "", fmt.Errorf("failed to calculate %v hash: %w", r, err)
}
_ = session.Close()
b := stdout.Bytes()
fs.Debugf(nil, "sftp output = %q", b)
str := parseHash(b)
fs.Debugf(nil, "sftp hash = %q", str)
if r == hash.MD5 {
o.md5sum = &str
} else if r == hash.SHA1 {

View File

@@ -3,7 +3,7 @@
package sftp
import "sync"
import "github.com/rclone/rclone/lib/sync"
// stringLock locks for string IDs passed in
type stringLock struct {

View File

@@ -5,7 +5,7 @@ package sftp
import (
"fmt"
"sync"
"github.com/rclone/rclone/lib/sync"
"testing"
"time"

View File

@@ -13,7 +13,7 @@ import (
"fmt"
"io"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"github.com/rclone/rclone/backend/sharefile/api"
"github.com/rclone/rclone/fs"

View File

@@ -23,7 +23,7 @@ import (
"regexp"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/backend/sugarsync/api"

View File

@@ -4,7 +4,7 @@ import (
"context"
"fmt"
"io"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/backend/union/upstream"

View File

@@ -3,7 +3,7 @@ package policy
import (
"context"
"path"
"sync"
"github.com/rclone/rclone/lib/sync"
"github.com/rclone/rclone/backend/union/upstream"
"github.com/rclone/rclone/fs"

View File

@@ -3,7 +3,7 @@ package policy
import (
"context"
"path"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/backend/union/upstream"

View File

@@ -9,7 +9,7 @@ import (
"path"
"path/filepath"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/backend/union/policy"

View File

@@ -9,7 +9,7 @@ import (
"path"
"path/filepath"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"sync/atomic"
"time"

View File

@@ -6,7 +6,7 @@ import (
"regexp"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -21,7 +21,7 @@ import (
"path"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/backend/webdav/api"

View File

@@ -66,6 +66,11 @@ func init() {
})
},
Options: append(oauthutil.SharedOptions, []fs.Option{{
Name: "hard_delete",
Help: "Delete files permanently rather than putting them into the trash.",
Default: false,
Advanced: true,
}, {
Name: config.ConfigEncoding,
Help: config.ConfigEncodingHelp,
Advanced: true,
@@ -79,8 +84,9 @@ func init() {
// Options defines the configuration for this backend
type Options struct {
Token string `config:"token"`
Enc encoder.MultiEncoder `config:"encoding"`
Token string `config:"token"`
HardDelete bool `config:"hard_delete"`
Enc encoder.MultiEncoder `config:"encoding"`
}
// Fs represents a remote yandex
@@ -630,7 +636,7 @@ func (f *Fs) purgeCheck(ctx context.Context, dir string, check bool) error {
}
}
//delete directory
return f.delete(ctx, root, false)
return f.delete(ctx, root, f.opt.HardDelete)
}
// Rmdir deletes the container
@@ -1141,7 +1147,7 @@ func (o *Object) Update(ctx context.Context, in io.Reader, src fs.ObjectInfo, op
// Remove an object
func (o *Object) Remove(ctx context.Context) error {
return o.fs.delete(ctx, o.filePath(), false)
return o.fs.delete(ctx, o.filePath(), o.fs.opt.HardDelete)
}
// MimeType of an Object if known, "" otherwise

View File

@@ -19,7 +19,7 @@ import (
"runtime"
"sort"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"text/template"
"time"

View File

@@ -19,7 +19,7 @@ import (
"runtime/pprof"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -9,7 +9,7 @@ import (
"io"
"os"
"path"
"sync"
"github.com/rclone/rclone/lib/sync"
"sync/atomic"
"time"

View File

@@ -12,7 +12,7 @@ import (
"os"
"path/filepath"
"sort"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
)

View File

@@ -7,7 +7,7 @@ import (
"os"
"runtime"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/cmd"

View File

@@ -5,7 +5,7 @@ import (
"errors"
"log"
"sort"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -5,7 +5,7 @@ import (
"context"
"fmt"
"path"
"sync"
"github.com/rclone/rclone/lib/sync"
"github.com/rclone/rclone/fs"
"github.com/rclone/rclone/fs/walk"

View File

@@ -6,7 +6,7 @@ import (
"bytes"
"fmt"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -3,7 +3,7 @@ package rcd
import (
"context"
"log"
"sync"
"github.com/rclone/rclone/lib/sync"
sysdnotify "github.com/iguanesolutions/go-systemd/v5/notify"
"github.com/rclone/rclone/cmd"

View File

@@ -10,7 +10,7 @@ import (
"path/filepath"
"reflect"
"sort"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
sysdnotify "github.com/iguanesolutions/go-systemd/v5/notify"

View File

@@ -14,7 +14,7 @@ import (
"os"
"os/user"
"strconv"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/cmd"

View File

@@ -5,7 +5,7 @@ package restic
import (
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"github.com/rclone/rclone/fs"
)

View File

@@ -16,7 +16,7 @@ import (
"sort"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/cmd"

View File

@@ -3,7 +3,7 @@ package memory
import (
"context"
"runtime"
"sync"
"github.com/rclone/rclone/lib/sync"
"github.com/rclone/rclone/cmd"
"github.com/rclone/rclone/cmd/test"

View File

@@ -550,3 +550,6 @@ put them back in again.` >}}
* Fredric Arklid <fredric.arklid@consid.se>
* Andy Jackson <Andrew.Jackson@bl.uk>
* Sinan Tan <i@tinytangent.com>
* deinferno <14363193+deinferno@users.noreply.github.com>
* rsapkf <rsapkfff@pm.me>
* Will Holtz <wholtz@gmail.com>

View File

@@ -81,6 +81,14 @@ key. It is stored using RFC3339 Format time with nanosecond
precision. The metadata is supplied during directory listings so
there is no overhead to using it.
### Performance
When uploading large files, increasing the value of
`--azureblob-upload-concurrency` will increase performance at the cost
of using more memory. The default of 16 is set quite conservatively to
use less memory. It maybe be necessary raise it to 64 or higher to
fully utilize a 1 GBit/s link with a single file transfer.
### Restricted filename characters
In addition to the [default restricted characters set](/overview/#restricted-characters)

View File

@@ -107,8 +107,9 @@ At the end of the non interactive process, rclone will return a result
with `State` as empty string.
If `--all` is passed then rclone will ask all the config questions,
not just the post config questions. Any parameters are used as
defaults for questions as usual.
not just the post config questions. Parameters that are supplied on
the command line or from environment variables are used as defaults
for questions as usual.
Note that `bin/config.py` in the rclone source implements this protocol
as a readable demonstration.

View File

@@ -80,7 +80,7 @@ List all the files in your pCloud
rclone ls remote:
To copy a local directory to an pCloud directory called backup
To copy a local directory to a pCloud directory called backup
rclone copy /home/source remote:backup

View File

@@ -620,7 +620,7 @@ issue](https://github.com/pkg/sftp/issues/156) is fixed.
Note that since SFTP isn't HTTP based the following flags don't work
with it: `--dump-headers`, `--dump-bodies`, `--dump-auth`
Note that `--timeout` isn't supported (but `--contimeout` is).
Note that `--timeout` and `--contimeout` are both supported.
## C14 {#c14}

View File

@@ -175,6 +175,15 @@ Leave blank to use the provider defaults.
- Type: string
- Default: ""
#### --yandex-hard-delete
Delete files permanently rather than putting them into the trash.
- Config: hard_delete
- Env Var: RCLONE_YANDEX_HARD_DELETE
- Type: bool
- Default: false
#### --yandex-encoding
This sets the encoding for the backend.

View File

@@ -6,7 +6,7 @@ import (
"errors"
"fmt"
"io"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"unicode/utf8"

View File

@@ -2,7 +2,7 @@ package accounting
import (
"context"
"sync"
"github.com/rclone/rclone/lib/sync"
"github.com/rclone/rclone/fs"
)

View File

@@ -6,7 +6,7 @@ import (
"fmt"
"sort"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -2,7 +2,7 @@ package accounting
import (
"context"
"sync"
"github.com/rclone/rclone/lib/sync"
"github.com/rclone/rclone/fs/rc"

View File

@@ -4,11 +4,11 @@ import (
"context"
"errors"
"fmt"
"sync"
"time"
"github.com/rclone/rclone/fs"
"github.com/rclone/rclone/fs/rc"
"github.com/rclone/rclone/lib/sync"
"golang.org/x/time/rate"
)

View File

@@ -4,7 +4,7 @@ import (
"context"
"encoding/json"
"io"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -5,7 +5,7 @@ import (
"fmt"
"sort"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"github.com/rclone/rclone/fs"
"github.com/rclone/rclone/fs/rc"

View File

@@ -6,7 +6,7 @@ import (
"context"
"errors"
"io"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -9,7 +9,7 @@ import (
"io/ioutil"
"math/rand"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"testing"
"testing/iotest"
"time"

2
fs/cache/cache.go vendored
View File

@@ -4,7 +4,7 @@ package cache
import (
"context"
"runtime"
"sync"
"github.com/rclone/rclone/lib/sync"
"github.com/rclone/rclone/fs"
"github.com/rclone/rclone/fs/filter"

View File

@@ -4,7 +4,7 @@ import (
"context"
"errors"
"io"
"sync"
"github.com/rclone/rclone/lib/sync"
"github.com/rclone/rclone/fs"
"github.com/rclone/rclone/fs/hash"

View File

@@ -8,7 +8,7 @@ import (
"os"
"path/filepath"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"github.com/Unknwon/goconfig"
"github.com/rclone/rclone/fs"

View File

@@ -2,7 +2,7 @@ package config
import (
"encoding/json"
"sync"
"github.com/rclone/rclone/lib/sync"
)
// defaultStorage implements config.Storage, providing in-memory config.

View File

@@ -6,7 +6,7 @@ import (
"io/ioutil"
"os"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"testing"
"time"

View File

@@ -7,7 +7,7 @@ import (
"fmt"
"os"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"testing"
"time"

View File

@@ -5,7 +5,7 @@ import (
"net"
"runtime"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -12,7 +12,7 @@ import (
"net/http"
"net/http/cookiejar"
"net/http/httputil"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -7,7 +7,7 @@ import (
"path"
"sort"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"github.com/rclone/rclone/fs"
"github.com/rclone/rclone/fs/dirtree"

View File

@@ -7,7 +7,7 @@ import (
"errors"
"fmt"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"testing"
_ "github.com/rclone/rclone/backend/local"

View File

@@ -10,7 +10,7 @@ import (
"os"
"regexp"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"sync/atomic"
"github.com/rclone/rclone/fs"

View File

@@ -18,7 +18,7 @@ import (
"sort"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"sync/atomic"
"time"
@@ -405,7 +405,7 @@ func Copy(ctx context.Context, f fs.Fs, dst fs.Object, remote string, src fs.Obj
if err == nil {
dst = newDst
in.ServerSideCopyEnd(dst.Size()) // account the bytes for the server-side transfer
err = in.Close()
_ = in.Close()
} else {
_ = in.Close()
}
@@ -598,6 +598,8 @@ func Move(ctx context.Context, fdst fs.Fs, dst fs.Object, remote string, src fs.
}
}
// Move dst <- src
in := tr.Account(ctx, nil) // account the transfer
in.ServerSideCopyStart()
newDst, err = doMove(ctx, src, remote)
switch err {
case nil:
@@ -606,13 +608,16 @@ func Move(ctx context.Context, fdst fs.Fs, dst fs.Object, remote string, src fs.
} else {
fs.Infof(src, "Moved (server-side)")
}
in.ServerSideCopyEnd(newDst.Size()) // account the bytes for the server-side transfer
_ = in.Close()
return newDst, nil
case fs.ErrorCantMove:
fs.Debugf(src, "Can't move, switching to copy")
_ = in.Close()
default:
err = fs.CountError(err)
fs.Errorf(src, "Couldn't move: %v", err)
_ = in.Close()
return newDst, err
}
}

View File

@@ -4,7 +4,7 @@ import (
"context"
"errors"
"io"
"sync"
"github.com/rclone/rclone/lib/sync"
"github.com/rclone/rclone/fs"
"github.com/rclone/rclone/fs/fserrors"

View File

@@ -7,7 +7,7 @@ import (
"errors"
"fmt"
"runtime/debug"
"sync"
"github.com/rclone/rclone/lib/sync"
"sync/atomic"
"time"

View File

@@ -15,7 +15,7 @@ import (
"regexp"
"sort"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/prometheus/client_golang/prometheus"

View File

@@ -6,7 +6,7 @@ import (
"context"
"sort"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
)
// Func defines a type for a remote control function

View File

@@ -12,7 +12,7 @@ import (
"path/filepath"
"regexp"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"github.com/rclone/rclone/fs"
"github.com/rclone/rclone/fs/config"

View File

@@ -6,7 +6,7 @@ import (
"math/bits"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"github.com/aalpar/deheap"
"github.com/rclone/rclone/fs"

View File

@@ -3,7 +3,7 @@ package sync
import (
"container/heap"
"context"
"sync"
"github.com/rclone/rclone/lib/sync"
"sync/atomic"
"testing"

View File

@@ -8,7 +8,7 @@ import (
"path"
"sort"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -8,7 +8,7 @@ import (
"path"
"sort"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -6,7 +6,7 @@ import (
"fmt"
"io"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"testing"
"github.com/rclone/rclone/fs"

View File

@@ -441,6 +441,10 @@ func Run(t *testing.T, opt *Opt) {
}
require.NoError(t, err, fmt.Sprintf("unexpected error: %v", err))
// Get fsInfo which contains type, etc. of the fs
fsInfo, _, _, _, err := fs.ConfigFs(subRemoteName)
require.NoError(t, err, fmt.Sprintf("unexpected error: %v", err))
// Skip the rest if it failed
skipIfNotOk(t)
@@ -1587,12 +1591,30 @@ func Run(t *testing.T, opt *Opt) {
t.Run("PublicLink", func(t *testing.T) {
skipIfNotOk(t)
doPublicLink := f.Features().PublicLink
if doPublicLink == nil {
publicLinkFunc := f.Features().PublicLink
if publicLinkFunc == nil {
t.Skip("FS has no PublicLinker interface")
}
type PublicLinkFunc func(ctx context.Context, remote string, expire fs.Duration, unlink bool) (link string, err error)
wrapPublicLinkFunc := func(f PublicLinkFunc) PublicLinkFunc {
return func(ctx context.Context, remote string, expire fs.Duration, unlink bool) (link string, err error) {
link, err = publicLinkFunc(ctx, remote, expire, unlink)
if err == nil {
return
}
// For OneDrive Personal, link expiry is a premium feature
// Don't let it fail the test (https://github.com/rclone/rclone/issues/5420)
if fsInfo.Name == "onedrive" && strings.Contains(err.Error(), "accountUpgradeRequired") {
t.Log("treating accountUpgradeRequired as success for PublicLink")
link, err = "bogus link to "+remote, nil
}
return
}
}
expiry := fs.Duration(60 * time.Second)
doPublicLink := wrapPublicLinkFunc(publicLinkFunc)
// if object not found
link, err := doPublicLink(ctx, file1.Path+"_does_not_exist", expiry, false)
@@ -1639,7 +1661,7 @@ func Run(t *testing.T, opt *Opt) {
_, err = subRemote.Put(ctx, buf, obji)
require.NoError(t, err)
link4, err := subRemote.Features().PublicLink(ctx, "", expiry, false)
link4, err := wrapPublicLinkFunc(subRemote.Features().PublicLink)(ctx, "", expiry, false)
require.NoError(t, err, "Sharing root in a sub-remote should work")
require.NotEqual(t, "", link4, "Link should not be empty")
}

View File

@@ -17,7 +17,7 @@ import (
"sort"
"strconv"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

View File

@@ -11,7 +11,7 @@ import (
"path/filepath"
"regexp"
"strings"
"sync"
"github.com/rclone/rclone/lib/sync"
"time"
"github.com/rclone/rclone/fs"

4
go.mod
View File

@@ -10,7 +10,7 @@ require (
github.com/Azure/azure-storage-blob-go v0.14.0
github.com/Azure/go-autorest/autorest/adal v0.9.17
github.com/Azure/go-ntlmssp v0.0.0-20200615164410-66371956d46c
github.com/Max-Sum/base32768 v0.0.0-20191205131208-7937843c71d5 // indirect
github.com/Max-Sum/base32768 v0.0.0-20191205131208-7937843c71d5
github.com/Unknwon/goconfig v0.0.0-20200908083735-df7de6a44db8
github.com/a8m/tree v0.0.0-20210414114729-ce3525c5c2ef
github.com/aalpar/deheap v0.0.0-20210914013432-0cc84d79dec3
@@ -49,6 +49,7 @@ require (
github.com/prometheus/client_golang v1.11.0
github.com/putdotio/go-putio/putio v0.0.0-20200123120452-16d982cac2b8
github.com/rfjakob/eme v1.1.2
github.com/sasha-s/go-deadlock v0.3.1
github.com/shirou/gopsutil/v3 v3.21.10
github.com/sirupsen/logrus v1.8.1
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966
@@ -108,6 +109,7 @@ require (
github.com/mattn/go-isatty v0.0.14 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
github.com/pengsrc/go-shared v0.2.1-0.20190131101655-1999055a4a14 // indirect
github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5 // indirect
github.com/prometheus/client_model v0.2.0 // indirect
github.com/prometheus/common v0.32.1 // indirect
github.com/prometheus/procfs v0.7.3 // indirect

4
go.sum
View File

@@ -502,6 +502,8 @@ github.com/pborman/getopt v0.0.0-20180729010549-6fdd0a2c7117/go.mod h1:85jBQOZwp
github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
github.com/pengsrc/go-shared v0.2.1-0.20190131101655-1999055a4a14 h1:XeOYlK9W1uCmhjJSsY78Mcuh7MVkNjTzmHx1yBzizSU=
github.com/pengsrc/go-shared v0.2.1-0.20190131101655-1999055a4a14/go.mod h1:jVblp62SafmidSkvWrXyxAme3gaTfEtWwRPGz5cpvHg=
github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5 h1:q2e307iGHPdTGp0hoxKjt1H5pDo6utceo3dQVK3I5XQ=
github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5/go.mod h1:jvVRKCrJTQWu0XVbaOlby/2lO20uSCHEMzzplHXte1o=
github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
@@ -555,6 +557,8 @@ github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46/go.mod h1:uAQ5PCi+MFsC7HjREoAz1BU+Mq60+05gifQSsHSDG/8=
github.com/sasha-s/go-deadlock v0.3.1 h1:sqv7fDNShgjcaxkO0JNcOAlr8B9+cV5Ey/OB71efZx0=
github.com/sasha-s/go-deadlock v0.3.1/go.mod h1:F73l+cr82YSh10GxyRI6qZiCgK64VaZjwesgfQ1/iLM=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/shirou/gopsutil/v3 v3.21.10 h1:flTg1DrnV/UVrBqjLgVgDJzx6lf+91rC64/dBHmO2IA=

Some files were not shown because too many files have changed in this diff Show More