1
0
mirror of https://github.com/rclone/rclone.git synced 2026-01-07 11:03:15 +00:00

Compare commits

..

1 Commits

Author SHA1 Message Date
Nick Craig-Wood
c917d2d5b4 s3: fix --s3-versions when copying a single object
Before this change, if --s3-versions was enabled, then copying a
single object from a subdirectory would fail.

This was due to an incorrect comparison in the NewFs code.

This fixes the change and introduces a new unit tests.
2022-09-05 18:56:11 +01:00
17 changed files with 38 additions and 63 deletions

View File

@@ -53,14 +53,6 @@ doing that so it may be necessary to roll back dependencies to the
version specified by `make updatedirect` in order to get rclone to
build.
## Tidy beta
At some point after the release run
bin/tidy-beta v1.55
where the version number is that of a couple ago to remove old beta binaries.
## Making a point release
If rclone needs a point release due to some horrendous bug:

View File

@@ -1210,7 +1210,6 @@ func newFs(ctx context.Context, name, path string, m configmap.Mapper) (*Fs, err
WriteMimeType: true,
CanHaveEmptyDirectories: true,
ServerSideAcrossConfigs: opt.ServerSideAcrossConfigs,
FilterAware: true,
}).Fill(ctx, f)
// Create a new authorized Drive client.

View File

@@ -518,9 +518,6 @@ func (f *Fs) InternalTestCopyID(t *testing.T) {
// TestIntegration/FsMkdir/FsPutFiles/Internal/AgeQuery
func (f *Fs) InternalTestAgeQuery(t *testing.T) {
// Check set up for filtering
assert.True(t, f.Features().FilterAware)
opt := &filter.Opt{}
err := opt.MaxAge.Set("1h")
assert.NoError(t, err)

View File

@@ -300,7 +300,6 @@ func NewFs(ctx context.Context, name, root string, m configmap.Mapper) (fs.Fs, e
ReadMetadata: true,
WriteMetadata: true,
UserMetadata: xattrSupported, // can only R/W general purpose metadata if xattrs are supported
FilterAware: true,
}).Fill(ctx, f)
if opt.FollowSymlinks {
f.lstat = os.Stat

View File

@@ -378,9 +378,6 @@ func TestFilter(t *testing.T) {
r.WriteFile("excluded", "excluded file", when)
f := r.Flocal.(*Fs)
// Check set up for filtering
assert.True(t, f.Features().FilterAware)
// Add a filter
ctx, fi := filter.AddConfig(ctx)
require.NoError(t, fi.AddRule("+ included"))

View File

@@ -2758,7 +2758,8 @@ func (f *Fs) getMetaDataListing(ctx context.Context, wantRemote string) (info *s
if isDirectory {
return nil
}
if wantRemote != gotRemote {
// compare the base name only since the listing will have a prefix
if path.Base(wantRemote) != path.Base(gotRemote) {
return nil
}
info = object

View File

@@ -6,12 +6,16 @@ import (
"context"
"crypto/md5"
"fmt"
"path"
"strings"
"testing"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/rclone/rclone/fs"
"github.com/rclone/rclone/fs/cache"
"github.com/rclone/rclone/fs/fspath"
"github.com/rclone/rclone/fs/hash"
"github.com/rclone/rclone/fstest"
"github.com/rclone/rclone/fstest/fstests"
@@ -250,7 +254,7 @@ func (f *Fs) InternalTestVersions(t *testing.T) {
time.Sleep(2 * time.Second)
// Create an object
const fileName = "test-versions.txt"
const fileName = "versions/test-versions.txt"
contents := random.String(100)
item := fstest.NewItem(fileName, contents, fstest.Time("2001-05-06T04:05:06.499999999Z"))
obj := fstests.PutTestContents(ctx, t, f, &item, contents, true)
@@ -280,7 +284,7 @@ func (f *Fs) InternalTestVersions(t *testing.T) {
}()
// Read the contents
entries, err := f.List(ctx, "")
entries, err := f.List(ctx, "versions")
require.NoError(t, err)
tests := 0
var fileNameVersion string
@@ -295,12 +299,24 @@ func (f *Fs) InternalTestVersions(t *testing.T) {
t.Run("ReadVersion", func(t *testing.T) {
assert.Equal(t, contents, fstests.ReadObject(ctx, t, entry.(fs.Object), -1))
})
t.Run("NewFs", func(t *testing.T) {
// Check we can find the object with NewFs
fPath := fs.ConfigString(f)
fPath = strings.Replace(fPath, ":", ",versions=true:", 1)
subFPath := fspath.JoinRootPath(fPath, entry.Remote())
subF, err := cache.Get(ctx, subFPath)
require.Equal(t, fs.ErrorIsFile, err, "Remote %q didn't find a file", subFPath)
require.NotNil(t, subF)
o, err := subF.NewObject(ctx, path.Base(entry.Remote()))
require.NoError(t, err)
assert.Equal(t, contents, fstests.ReadObject(ctx, t, o, -1))
})
assert.WithinDuration(t, obj.(*Object).lastModified, versionTime, time.Second, "object time must be with 1 second of version time")
fileNameVersion = remote
tests++
}
}
assert.Equal(t, 2, tests, "object missing from listing")
assert.Equal(t, 2, tests, "object missing from listing: %v", entries)
// Check we can read the object with a version suffix
t.Run("NewObject", func(t *testing.T) {

View File

@@ -15,7 +15,6 @@ else
fi
rclone ${dry_run} -vv -P --checkers 16 --transfers 16 delete \
--fast-list \
--include "/${version}**" \
--include "/branch/*/${version}**" \
--include "/branch/${version}**" \
memstore:beta-rclone-org

View File

@@ -1868,22 +1868,13 @@ By default, rclone doesn't keep track of renamed files, so if you
rename a file locally then sync it to a remote, rclone will delete the
old file on the remote and upload a new copy.
An rclone sync with `--track-renames` runs like a normal sync, but keeps
track of objects which exist in the destination but not in the source
(which would normally be deleted), and which objects exist in the
source but not the destination (which would normally be transferred).
These objects are then candidates for renaming.
If you use this flag, and the remote supports server-side copy or
server-side move, and the source and destination have a compatible
hash, then this will track renames during `sync`
operations and perform renaming server-side.
After the sync, rclone matches up the source only and destination only
objects using the `--track-renames-strategy` specified and either
renames the destination object or transfers the source and deletes the
destination object. `--track-renames` is stateless like all of
rclone's syncs.
To use this flag the destination must support server-side copy or
server-side move, and to use a hash based `--track-renames-strategy`
(the default) the source and the destination must have a compatible
hash.
Files will be matched by size and hash - if both match then a rename
will be considered.
If the destination does not support server-side copy or move, rclone
will fall back to the default behaviour and log an error level message
@@ -1901,7 +1892,7 @@ Note also that `--track-renames` is incompatible with
### --track-renames-strategy (hash,modtime,leaf,size) ###
This option changes the file matching criteria for `--track-renames`.
This option changes the matching criteria for `--track-renames`.
The matching is controlled by a comma separated selection of these tokens:
@@ -1910,15 +1901,15 @@ The matching is controlled by a comma separated selection of these tokens:
- `leaf` - the name of the file not including its directory name
- `size` - the size of the file (this is always enabled)
The default option is `hash`.
Using `--track-renames-strategy modtime,leaf` would match files
So using `--track-renames-strategy modtime,leaf` would match files
based on modification time, the leaf of the file name and the size
only.
Using `--track-renames-strategy modtime` or `leaf` can enable
`--track-renames` support for encrypted destinations.
If nothing is specified, the default option is matching by `hash`es.
Note that the `hash` strategy is not supported with encrypted destinations.
### --delete-(before,during,after) ###

View File

@@ -29,12 +29,6 @@ var (
return errors.New("no config file set handler")
}
// Check if the config file has the named section
//
// This is a function pointer to decouple the config
// implementation from the fs
ConfigFileHasSection = func(section string) bool { return false }
// CountError counts an error. If any errors have been
// counted then rclone will exit with a non zero error code.
//

View File

@@ -117,9 +117,6 @@ func init() {
// Set the function pointers up in fs
fs.ConfigFileGet = FileGetFlag
fs.ConfigFileSet = SetValueAndSave
fs.ConfigFileHasSection = func(section string) bool {
return LoadedData().HasSection(section)
}
configPath = makeConfigPath()
cacheDir = makeCacheDir() // Has fallback to tempDir, so set that first
data = newDefaultStorage()

View File

@@ -29,7 +29,6 @@ type Features struct {
ReadMetadata bool // can read metadata from objects
WriteMetadata bool // can write metadata to objects
UserMetadata bool // can read/write general purpose metadata
FilterAware bool // can make use of filters if provided for listing
// Purge all files in the directory specified
//
@@ -321,7 +320,6 @@ func (ft *Features) Mask(ctx context.Context, f Fs) *Features {
// ft.IsLocal = ft.IsLocal && mask.IsLocal Don't propagate IsLocal
ft.SlowModTime = ft.SlowModTime && mask.SlowModTime
ft.SlowHash = ft.SlowHash && mask.SlowHash
ft.FilterAware = ft.FilterAware && mask.FilterAware
if mask.Purge == nil {
ft.Purge = nil

View File

@@ -83,7 +83,7 @@ func (m *March) makeListDir(ctx context.Context, f fs.Fs, includeAll bool) listD
if !(ci.UseListR && f.Features().ListR != nil) && // !--fast-list active and
!(ci.NoTraverse && fi.HaveFilesFrom()) { // !(--files-from and --no-traverse)
return func(dir string) (entries fs.DirEntries, err error) {
dirCtx := filter.SetUseFilter(m.Ctx, f.Features().FilterAware && !includeAll) // make filter-aware backends constrain List
dirCtx := filter.SetUseFilter(m.Ctx, !includeAll) // make filter-aware backends constrain List
return list.DirSorted(dirCtx, f, includeAll, dir)
}
}
@@ -100,7 +100,7 @@ func (m *March) makeListDir(ctx context.Context, f fs.Fs, includeAll bool) listD
mu.Lock()
defer mu.Unlock()
if !started {
dirCtx := filter.SetUseFilter(m.Ctx, f.Features().FilterAware && !includeAll) // make filter-aware backends constrain List
dirCtx := filter.SetUseFilter(m.Ctx, !includeAll) // make filter-aware backends constrain List
dirs, dirsErr = walk.NewDirTree(dirCtx, f, m.Dir, includeAll, ci.MaxDepth)
started = true
}

View File

@@ -26,9 +26,6 @@ import (
// up with drive letters.
func NewFs(ctx context.Context, path string) (Fs, error) {
Debugf(nil, "Creating backend with remote %q", path)
if ConfigFileHasSection(path) {
Logf(nil, "%q refers to a local folder, use %q to refer to your remote or %q to hide this warning", path, path+":", "./"+path)
}
fsInfo, configName, fsPath, config, err := ConfigFs(path)
if err != nil {
return nil, err

View File

@@ -64,7 +64,7 @@ type Func func(path string, entries fs.DirEntries, err error) error
func Walk(ctx context.Context, f fs.Fs, path string, includeAll bool, maxLevel int, fn Func) error {
ci := fs.GetConfig(ctx)
fi := filter.GetConfig(ctx)
ctx = filter.SetUseFilter(ctx, f.Features().FilterAware && !includeAll) // make filter-aware backends constrain List
ctx = filter.SetUseFilter(ctx, !includeAll) // make filter-aware backends constrain List
if ci.NoTraverse && fi.HaveFilesFrom() {
return walkR(ctx, f, path, includeAll, maxLevel, fn, fi.MakeListR(ctx, f.NewObject))
}
@@ -158,7 +158,7 @@ func ListR(ctx context.Context, f fs.Fs, path string, includeAll bool, maxLevel
fi.UsesDirectoryFilters() { // ...using any directory filters
return listRwalk(ctx, f, path, includeAll, maxLevel, listType, fn)
}
ctx = filter.SetUseFilter(ctx, f.Features().FilterAware && !includeAll) // make filter-aware backends constrain List
ctx = filter.SetUseFilter(ctx, !includeAll) // make filter-aware backends constrain List
return listR(ctx, f, path, includeAll, listType, fn, doListR, listType.Dirs() && f.Features().BucketBased)
}

2
go.mod
View File

@@ -2,8 +2,6 @@ module github.com/rclone/rclone
go 1.17
replace github.com/jlaffaye/ftp v0.0.0-20220630165035-11536801d1ff => github.com/ncw/ftp v0.0.0-20220914071703-a2f63f63c5ce
require (
bazil.org/fuse v0.0.0-20200524192727-fb710f7dfd05
github.com/Azure/azure-pipeline-go v0.2.3

4
go.sum
View File

@@ -378,6 +378,8 @@ github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZ
github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc=
github.com/jellevandenhooff/dkim v0.0.0-20150330215556-f50fe3d243e1/go.mod h1:E0B/fFc00Y+Rasa88328GlI/XbtyysCtTHZS8h7IrBU=
github.com/jlaffaye/ftp v0.0.0-20190624084859-c1312a7102bf/go.mod h1:lli8NYPQOFy3O++YmYbqVgOcQ1JPCwdOy+5zSjKJ9qY=
github.com/jlaffaye/ftp v0.0.0-20220630165035-11536801d1ff h1:tN6UCYCBFNrPwvKf4RP9cIhGo6GcZ/IQTN8nqD7eCok=
github.com/jlaffaye/ftp v0.0.0-20220630165035-11536801d1ff/go.mod h1:hhq4G4crv+nW2qXtNYcuzLeOudG92Ps37HEKeg2e3lE=
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
@@ -456,8 +458,6 @@ github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjY
github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae/go.mod h1:qAyveg+e4CE+eKJXWVjKXM4ck2QobLqTDytGJbLLhJg=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/ncw/ftp v0.0.0-20220914071703-a2f63f63c5ce h1:nuGzYyYm5OdPVHFqRou5KbSq101NFayEbz5Vj1/HMgg=
github.com/ncw/ftp v0.0.0-20220914071703-a2f63f63c5ce/go.mod h1:hhq4G4crv+nW2qXtNYcuzLeOudG92Ps37HEKeg2e3lE=
github.com/ncw/go-acd v0.0.0-20201019170801-fe55f33415b1 h1:nAjWYc03awJAjsozNehdGZsm5LP7AhLOvjgbS8zN1tk=
github.com/ncw/go-acd v0.0.0-20201019170801-fe55f33415b1/go.mod h1:MLIrzg7gp/kzVBxRE1olT7CWYMCklcUWU+ekoxOD9x0=
github.com/ncw/swift/v2 v2.0.1 h1:q1IN8hNViXEv8Zvg3Xdis4a3c4IlIGezkYz09zQL5J0=