diff --git a/cmd/bisync/resolve.go b/cmd/bisync/resolve.go index d058978f2..1a3d4e716 100644 --- a/cmd/bisync/resolve.go +++ b/cmd/bisync/resolve.go @@ -4,8 +4,6 @@ import ( "context" "fmt" "math" - "mime" - "path" "strings" "time" @@ -13,6 +11,7 @@ import ( "github.com/rclone/rclone/fs" "github.com/rclone/rclone/fs/operations" "github.com/rclone/rclone/lib/terminal" + "github.com/rclone/rclone/lib/transform" ) // Prefer describes strategies for resolving sync conflicts @@ -130,6 +129,7 @@ type ( path2 namePair } ) + type namePair struct { oldName string newName string @@ -240,24 +240,7 @@ func SuffixName(ctx context.Context, remote, suffix string) string { } ci := fs.GetConfig(ctx) if ci.SuffixKeepExtension { - var ( - base = remote - exts = "" - first = true - ext = path.Ext(remote) - ) - for ext != "" { - // Look second and subsequent extensions in mime types. - // If they aren't found then don't keep it as an extension. - if !first && mime.TypeByExtension(ext) == "" { - break - } - base = base[:len(base)-len(ext)] - exts = ext + exts - first = false - ext = path.Ext(base) - } - return base + suffix + exts + return transform.SuffixKeepExtension(remote, suffix) } return remote + suffix } diff --git a/cmd/help.go b/cmd/help.go index f65ff6d53..c2f7c2b37 100644 --- a/cmd/help.go +++ b/cmd/help.go @@ -16,6 +16,7 @@ import ( "github.com/rclone/rclone/fs/log/logflags" "github.com/rclone/rclone/fs/rc/rcflags" "github.com/rclone/rclone/lib/atexit" + "github.com/rclone/rclone/lib/transform/transformflags" "github.com/spf13/cobra" "github.com/spf13/pflag" "golang.org/x/text/cases" @@ -137,6 +138,7 @@ func setupRootCommand(rootCmd *cobra.Command) { // Add global flags configflags.AddFlags(ci, pflag.CommandLine) filterflags.AddFlags(pflag.CommandLine) + transformflags.AddFlags(pflag.CommandLine) rcflags.AddFlags(pflag.CommandLine) logflags.AddFlags(pflag.CommandLine) @@ -191,7 +193,6 @@ func setupRootCommand(rootCmd *cobra.Command) { }) cobra.OnInitialize(initConfig) - } // Traverse the tree of commands running fn on each diff --git a/fs/march/march.go b/fs/march/march.go index b6da6c3fc..819f35cf3 100644 --- a/fs/march/march.go +++ b/fs/march/march.go @@ -15,6 +15,7 @@ import ( "github.com/rclone/rclone/fs/filter" "github.com/rclone/rclone/fs/list" "github.com/rclone/rclone/fs/walk" + "github.com/rclone/rclone/lib/transform" "golang.org/x/sync/errgroup" "golang.org/x/text/unicode/norm" ) @@ -86,6 +87,7 @@ func (m *March) srcKey(entry fs.DirEntry) string { return "" } name := path.Base(entry.Remote()) + name = transform.Path(name, fs.DirEntryType(entry) == "directory") for _, transform := range m.transforms { name = transform(name) } @@ -94,7 +96,14 @@ func (m *March) srcKey(entry fs.DirEntry) string { // dstKey turns a directory entry into a sort key using the defined transforms. func (m *March) dstKey(entry fs.DirEntry) string { - return m.srcKey(entry) // FIXME actually do something different + if entry == nil { + return "" + } + name := path.Base(entry.Remote()) + for _, transform := range m.transforms { + name = transform(name) + } + return name } // makeListDir makes constructs a listing function for the given fs @@ -454,7 +463,6 @@ func (m *March) processJob(job listDirJob) ([]listDirJob, error) { noDst: true, }) } - }, func(dst fs.DirEntry) { recurse := m.Callback.DstOnly(dst) if recurse && job.dstDepth > 0 { diff --git a/fs/operations/copy.go b/fs/operations/copy.go index 8228a14c7..967fd69f5 100644 --- a/fs/operations/copy.go +++ b/fs/operations/copy.go @@ -21,6 +21,7 @@ import ( "github.com/rclone/rclone/fs/hash" "github.com/rclone/rclone/lib/atexit" "github.com/rclone/rclone/lib/pacer" + "github.com/rclone/rclone/lib/transform" ) // State of the copy @@ -390,7 +391,7 @@ func Copy(ctx context.Context, f fs.Fs, dst fs.Object, remote string, src fs.Obj f: f, dstFeatures: f.Features(), dst: dst, - remote: remote, + remote: transform.Path(remote, false), src: src, ci: ci, tr: tr, @@ -399,7 +400,7 @@ func Copy(ctx context.Context, f fs.Fs, dst fs.Object, remote string, src fs.Obj } c.hashType, c.hashOption = CommonHash(ctx, f, src.Fs()) if c.dst != nil { - c.remote = c.dst.Remote() + c.remote = transform.Path(c.dst.Remote(), false) } // Are we using partials? // diff --git a/fs/operations/operations.go b/fs/operations/operations.go index 6d3ca4f76..ca1e8ee0a 100644 --- a/fs/operations/operations.go +++ b/fs/operations/operations.go @@ -39,6 +39,7 @@ import ( "github.com/rclone/rclone/lib/pacer" "github.com/rclone/rclone/lib/random" "github.com/rclone/rclone/lib/readers" + "github.com/rclone/rclone/lib/transform" "golang.org/x/sync/errgroup" "golang.org/x/text/unicode/norm" ) @@ -424,6 +425,8 @@ func MoveTransfer(ctx context.Context, fdst fs.Fs, dst fs.Object, remote string, // move - see Move for help func move(ctx context.Context, fdst fs.Fs, dst fs.Object, remote string, src fs.Object, isTransfer bool) (newDst fs.Object, err error) { + origRemote := remote // avoid double-transform on fallback to copy + remote = transform.Path(remote, false) ci := fs.GetConfig(ctx) var tr *accounting.Transfer if isTransfer { @@ -447,7 +450,7 @@ func move(ctx context.Context, fdst fs.Fs, dst fs.Object, remote string, src fs. if doMove := fdst.Features().Move; doMove != nil && (SameConfig(src.Fs(), fdst) || (SameRemoteType(src.Fs(), fdst) && (fdst.Features().ServerSideAcrossConfigs || ci.ServerSideAcrossConfigs))) { // Delete destination if it exists and is not the same file as src (could be same file while seemingly different if the remote is case insensitive) if dst != nil { - remote = dst.Remote() + remote = transform.Path(dst.Remote(), false) if !SameObject(src, dst) { err = DeleteFile(ctx, dst) if err != nil { @@ -488,7 +491,7 @@ func move(ctx context.Context, fdst fs.Fs, dst fs.Object, remote string, src fs. } } // Move not found or didn't work so copy dst <- src - newDst, err = Copy(ctx, fdst, dst, remote, src) + newDst, err = Copy(ctx, fdst, dst, origRemote, src) if err != nil { fs.Errorf(src, "Not deleting source as copy failed: %v", err) return newDst, err @@ -516,24 +519,7 @@ func SuffixName(ctx context.Context, remote string) string { return remote } if ci.SuffixKeepExtension { - var ( - base = remote - exts = "" - first = true - ext = path.Ext(remote) - ) - for ext != "" { - // Look second and subsequent extensions in mime types. - // If they aren't found then don't keep it as an extension. - if !first && mime.TypeByExtension(ext) == "" { - break - } - base = base[:len(base)-len(ext)] - exts = ext + exts - first = false - ext = path.Ext(base) - } - return base + ci.Suffix + exts + return transform.SuffixKeepExtension(remote, ci.Suffix) } return remote + ci.Suffix } diff --git a/fs/sync/sync.go b/fs/sync/sync.go index 0e015769e..008a25ed0 100644 --- a/fs/sync/sync.go +++ b/fs/sync/sync.go @@ -20,6 +20,7 @@ import ( "github.com/rclone/rclone/fs/march" "github.com/rclone/rclone/fs/operations" "github.com/rclone/rclone/lib/errcount" + "github.com/rclone/rclone/lib/transform" "golang.org/x/sync/errgroup" ) @@ -1254,8 +1255,8 @@ func (s *syncCopyMove) SrcOnly(src fs.DirEntry) (recurse bool) { s.logger(s.ctx, operations.MissingOnDst, src, nil, fs.ErrorIsDir) // Create the directory and make sure the Metadata/ModTime is correct - s.copyDirMetadata(s.ctx, s.fdst, nil, x.Remote(), x) - s.markDirModified(x.Remote()) + s.copyDirMetadata(s.ctx, s.fdst, nil, transform.Path(x.Remote(), true), x) + s.markDirModified(transform.Path(x.Remote(), true)) return true default: panic("Bad object in DirEntries") @@ -1288,6 +1289,8 @@ func (s *syncCopyMove) Match(ctx context.Context, dst, src fs.DirEntry) (recurse } case fs.Directory: // Do the same thing to the entire contents of the directory + srcX = fs.NewOverrideDirectory(srcX, transform.Path(src.Remote(), true)) + src = srcX s.markParentNotEmpty(src) dstX, ok := dst.(fs.Directory) if ok { @@ -1372,6 +1375,11 @@ func moveDir(ctx context.Context, fdst, fsrc fs.Fs, deleteEmptySrcDirs bool, cop return runSyncCopyMove(ctx, fdst, fsrc, fs.DeleteModeOff, true, deleteEmptySrcDirs, copyEmptySrcDirs) } +// Transform renames fdst in place +func Transform(ctx context.Context, fdst fs.Fs, deleteEmptySrcDirs bool, copyEmptySrcDirs bool) error { + return runSyncCopyMove(ctx, fdst, fdst, fs.DeleteModeOff, true, deleteEmptySrcDirs, copyEmptySrcDirs) +} + // MoveDir moves fsrc into fdst func MoveDir(ctx context.Context, fdst, fsrc fs.Fs, deleteEmptySrcDirs bool, copyEmptySrcDirs bool) error { fi := filter.GetConfig(ctx) diff --git a/fs/sync/sync_test.go b/fs/sync/sync_test.go index 4c6407190..a921baf35 100644 --- a/fs/sync/sync_test.go +++ b/fs/sync/sync_test.go @@ -27,6 +27,7 @@ import ( "github.com/rclone/rclone/fs/hash" "github.com/rclone/rclone/fs/operations" "github.com/rclone/rclone/fstest" + "github.com/rclone/rclone/lib/transform" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "golang.org/x/text/unicode/norm" @@ -2980,7 +2981,7 @@ func predictDstFromLogger(ctx context.Context) context.Context { if winner.Err != nil { errMsg = ";" + winner.Err.Error() } - operations.SyncFprintf(opt.JSON, "%s;%s;%v;%s%s\n", file.ModTime(ctx).Local().Format(timeFormat), checksum, file.Size(), file.Remote(), errMsg) + operations.SyncFprintf(opt.JSON, "%s;%s;%v;%s%s\n", file.ModTime(ctx).Local().Format(timeFormat), checksum, file.Size(), transform.Path(file.Remote(), false), errMsg) // TODO: should the transform be handled in the sync instead of here? } } return operations.WithSyncLogger(ctx, opt) diff --git a/fs/sync/sync_transform_test.go b/fs/sync/sync_transform_test.go new file mode 100644 index 000000000..892b040b1 --- /dev/null +++ b/fs/sync/sync_transform_test.go @@ -0,0 +1,457 @@ +// Test transform + +package sync + +import ( + "cmp" + "context" + "fmt" + "path/filepath" + "slices" + "strings" + "testing" + + _ "github.com/rclone/rclone/backend/all" + "github.com/rclone/rclone/fs" + "github.com/rclone/rclone/fs/filter" + "github.com/rclone/rclone/fs/operations" + "github.com/rclone/rclone/fs/walk" + "github.com/rclone/rclone/fstest" + "github.com/rclone/rclone/lib/transform" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "golang.org/x/text/unicode/norm" +) + +var debug = `` + +func TestTransform(t *testing.T) { + type args struct { + TransformOpt []string + TransformBackOpt []string + Lossless bool // whether the TransformBackAlgo is always losslessly invertible + } + tests := []struct { + name string + args args + }{ + {name: "NFC", args: args{ + TransformOpt: []string{"nfc"}, + TransformBackOpt: []string{"nfd"}, + Lossless: false, + }}, + {name: "NFD", args: args{ + TransformOpt: []string{"nfd"}, + TransformBackOpt: []string{"nfc"}, + Lossless: false, + }}, + {name: "base64", args: args{ + TransformOpt: []string{"base64encode"}, + TransformBackOpt: []string{"base64encode"}, + Lossless: false, + }}, + {name: "prefix", args: args{ + TransformOpt: []string{"prefix=PREFIX"}, + TransformBackOpt: []string{"trimprefix=PREFIX"}, + Lossless: true, + }}, + {name: "suffix", args: args{ + TransformOpt: []string{"suffix=SUFFIX"}, + TransformBackOpt: []string{"trimsuffix=SUFFIX"}, + Lossless: true, + }}, + {name: "truncate", args: args{ + TransformOpt: []string{"truncate=10"}, + TransformBackOpt: []string{"truncate=10"}, + Lossless: false, + }}, + {name: "encoder", args: args{ + TransformOpt: []string{"encoder=Colon,SquareBracket"}, + TransformBackOpt: []string{"decoder=Colon,SquareBracket"}, + Lossless: true, + }}, + {name: "ISO-8859-1", args: args{ + TransformOpt: []string{"ISO-8859-1"}, + TransformBackOpt: []string{"ISO-8859-1"}, + Lossless: false, + }}, + {name: "charmap", args: args{ + TransformOpt: []string{"all,charmap=ISO-8859-7"}, + TransformBackOpt: []string{"all,charmap=ISO-8859-7"}, + Lossless: false, + }}, + {name: "lowercase", args: args{ + TransformOpt: []string{"all,lowercase"}, + TransformBackOpt: []string{"all,lowercase"}, + Lossless: false, + }}, + {name: "ascii", args: args{ + TransformOpt: []string{"all,ascii"}, + TransformBackOpt: []string{"all,ascii"}, + Lossless: false, + }}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + r := fstest.NewRun(t) + defer r.Finalise() + + r.Mkdir(context.Background(), r.Flocal) + r.Mkdir(context.Background(), r.Fremote) + items := makeTestFiles(t, r, "dir1") + deleteDSStore(t, r) + r.CheckRemoteListing(t, items, nil) + r.CheckLocalListing(t, items, nil) + + err := transform.SetOptions(context.Background(), tt.args.TransformOpt...) + require.NoError(t, err) + + err = Sync(context.Background(), r.Fremote, r.Flocal, true) + assert.NoError(t, err) + compareNames(t, r, items) + + err = transform.SetOptions(context.Background(), tt.args.TransformBackOpt...) + require.NoError(t, err) + err = Sync(context.Background(), r.Fremote, r.Flocal, true) + assert.NoError(t, err) + compareNames(t, r, items) + + if tt.args.Lossless { + deleteDSStore(t, r) + r.CheckRemoteItems(t, items...) + } + }) + } +} + +const alphabet = "abcdefg123456789" + +var extras = []string{"apple", "banana", "appleappleapplebanana", "splitbananasplit"} + +func makeTestFiles(t *testing.T, r *fstest.Run, dir string) []fstest.Item { + t.Helper() + n := 0 + // Create test files + items := []fstest.Item{} + for _, c := range alphabet { + var out strings.Builder + for i := rune(0); i < 7; i++ { + out.WriteRune(c + i) + } + fileName := filepath.Join(dir, fmt.Sprintf("%04d-%s.txt", n, out.String())) + fileName = strings.ToValidUTF8(fileName, "") + + if debug != "" { + fileName = debug + } + + item := r.WriteObject(context.Background(), fileName, fileName, t1) + r.WriteFile(fileName, fileName, t1) + items = append(items, item) + n++ + + if debug != "" { + break + } + } + + for _, extra := range extras { + item := r.WriteObject(context.Background(), extra, extra, t1) + r.WriteFile(extra, extra, t1) + items = append(items, item) + } + + return items +} + +func deleteDSStore(t *testing.T, r *fstest.Run) { + ctxDSStore, fi := filter.AddConfig(context.Background()) + err := fi.AddRule(`+ *.DS_Store`) + assert.NoError(t, err) + err = fi.AddRule(`- **`) + assert.NoError(t, err) + err = operations.Delete(ctxDSStore, r.Fremote) + assert.NoError(t, err) +} + +func compareNames(t *testing.T, r *fstest.Run, items []fstest.Item) { + var entries fs.DirEntries + + deleteDSStore(t, r) + err := walk.ListR(context.Background(), r.Fremote, "", true, -1, walk.ListObjects, func(e fs.DirEntries) error { + entries = append(entries, e...) + return nil + }) + assert.NoError(t, err) + entries = slices.DeleteFunc(entries, func(E fs.DirEntry) bool { // remove those pesky .DS_Store files + if strings.Contains(E.Remote(), ".DS_Store") { + err := operations.DeleteFile(context.Background(), E.(fs.Object)) + assert.NoError(t, err) + return true + } + return false + }) + require.Equal(t, len(items), entries.Len()) + + // sort by CONVERTED name + slices.SortStableFunc(items, func(a, b fstest.Item) int { + aConv := transform.Path(a.Path, false) + bConv := transform.Path(b.Path, false) + return cmp.Compare(aConv, bConv) + }) + slices.SortStableFunc(entries, func(a, b fs.DirEntry) int { + return cmp.Compare(a.Remote(), b.Remote()) + }) + + for i, e := range entries { + expect := transform.Path(items[i].Path, false) + msg := fmt.Sprintf("expected %v, got %v", detectEncoding(expect), detectEncoding(e.Remote())) + assert.Equal(t, expect, e.Remote(), msg) + } +} + +func detectEncoding(s string) string { + if norm.NFC.IsNormalString(s) && norm.NFD.IsNormalString(s) { + return "BOTH" + } + if !norm.NFC.IsNormalString(s) && norm.NFD.IsNormalString(s) { + return "NFD" + } + if norm.NFC.IsNormalString(s) && !norm.NFD.IsNormalString(s) { + return "NFC" + } + return "OTHER" +} + +func TestTransformCopy(t *testing.T) { + ctx := context.Background() + r := fstest.NewRun(t) + err := transform.SetOptions(ctx, "all,suffix_keep_extension=_somesuffix") + require.NoError(t, err) + file1 := r.WriteFile("sub dir/hello world.txt", "hello world", t1) + + r.Mkdir(ctx, r.Fremote) + ctx = predictDstFromLogger(ctx) + err = Sync(ctx, r.Fremote, r.Flocal, true) + testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t) + require.NoError(t, err) + + r.CheckLocalItems(t, file1) + r.CheckRemoteItems(t, fstest.NewItem("sub dir_somesuffix/hello world_somesuffix.txt", "hello world", t1)) +} + +func TestDoubleTransform(t *testing.T) { + ctx := context.Background() + r := fstest.NewRun(t) + err := transform.SetOptions(ctx, "all,prefix=tac", "all,prefix=tic") + require.NoError(t, err) + file1 := r.WriteFile("toe/toe", "hello world", t1) + + r.Mkdir(ctx, r.Fremote) + ctx = predictDstFromLogger(ctx) + err = Sync(ctx, r.Fremote, r.Flocal, true) + testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t) + require.NoError(t, err) + + r.CheckLocalItems(t, file1) + r.CheckRemoteItems(t, fstest.NewItem("tictactoe/tictactoe", "hello world", t1)) +} + +func TestFileTag(t *testing.T) { + ctx := context.Background() + r := fstest.NewRun(t) + err := transform.SetOptions(ctx, "file,prefix=tac", "file,prefix=tic") + require.NoError(t, err) + file1 := r.WriteFile("toe/toe/toe", "hello world", t1) + + r.Mkdir(ctx, r.Fremote) + ctx = predictDstFromLogger(ctx) + err = Sync(ctx, r.Fremote, r.Flocal, true) + testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t) + require.NoError(t, err) + + r.CheckLocalItems(t, file1) + r.CheckRemoteItems(t, fstest.NewItem("toe/toe/tictactoe", "hello world", t1)) +} + +func TestNoTag(t *testing.T) { + ctx := context.Background() + r := fstest.NewRun(t) + err := transform.SetOptions(ctx, "prefix=tac", "prefix=tic") + require.NoError(t, err) + file1 := r.WriteFile("toe/toe/toe", "hello world", t1) + + r.Mkdir(ctx, r.Fremote) + ctx = predictDstFromLogger(ctx) + err = Sync(ctx, r.Fremote, r.Flocal, true) + testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t) + require.NoError(t, err) + + r.CheckLocalItems(t, file1) + r.CheckRemoteItems(t, fstest.NewItem("toe/toe/tictactoe", "hello world", t1)) +} + +func TestDirTag(t *testing.T) { + ctx := context.Background() + r := fstest.NewRun(t) + err := transform.SetOptions(ctx, "dir,prefix=tac", "dir,prefix=tic") + require.NoError(t, err) + r.WriteFile("toe/toe/toe.txt", "hello world", t1) + _, err = operations.MkdirModTime(ctx, r.Flocal, "empty_dir", t1) + require.NoError(t, err) + + r.Mkdir(ctx, r.Fremote) + ctx = predictDstFromLogger(ctx) + err = Sync(ctx, r.Fremote, r.Flocal, true) + testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t) + require.NoError(t, err) + + r.CheckLocalListing(t, []fstest.Item{fstest.NewItem("toe/toe/toe.txt", "hello world", t1)}, []string{"empty_dir", "toe", "toe/toe"}) + r.CheckRemoteListing(t, []fstest.Item{fstest.NewItem("tictactoe/tictactoe/toe.txt", "hello world", t1)}, []string{"tictacempty_dir", "tictactoe", "tictactoe/tictactoe"}) +} + +func TestAllTag(t *testing.T) { + ctx := context.Background() + r := fstest.NewRun(t) + err := transform.SetOptions(ctx, "all,prefix=tac", "all,prefix=tic") + require.NoError(t, err) + r.WriteFile("toe/toe/toe.txt", "hello world", t1) + _, err = operations.MkdirModTime(ctx, r.Flocal, "empty_dir", t1) + require.NoError(t, err) + + r.Mkdir(ctx, r.Fremote) + ctx = predictDstFromLogger(ctx) + err = Sync(ctx, r.Fremote, r.Flocal, true) + testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t) + require.NoError(t, err) + + r.CheckLocalListing(t, []fstest.Item{fstest.NewItem("toe/toe/toe.txt", "hello world", t1)}, []string{"empty_dir", "toe", "toe/toe"}) + r.CheckRemoteListing(t, []fstest.Item{fstest.NewItem("tictactoe/tictactoe/tictactoe.txt", "hello world", t1)}, []string{"tictacempty_dir", "tictactoe", "tictactoe/tictactoe"}) + err = operations.Check(ctx, &operations.CheckOpt{Fsrc: r.Flocal, Fdst: r.Fremote}) // should not error even though dst has transformed names + assert.NoError(t, err) +} + +func TestRunTwice(t *testing.T) { + ctx := context.Background() + r := fstest.NewRun(t) + err := transform.SetOptions(ctx, "dir,prefix=tac", "dir,prefix=tic") + require.NoError(t, err) + file1 := r.WriteFile("toe/toe/toe.txt", "hello world", t1) + + r.Mkdir(ctx, r.Fremote) + ctx = predictDstFromLogger(ctx) + err = Sync(ctx, r.Fremote, r.Flocal, true) + testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t) + require.NoError(t, err) + + r.CheckLocalItems(t, file1) + r.CheckRemoteItems(t, fstest.NewItem("tictactoe/tictactoe/toe.txt", "hello world", t1)) + + // result should not change second time, since src is unchanged + ctx = predictDstFromLogger(ctx) + err = Sync(ctx, r.Fremote, r.Flocal, true) + testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t) + require.NoError(t, err) + + r.CheckLocalItems(t, file1) + r.CheckRemoteItems(t, fstest.NewItem("tictactoe/tictactoe/toe.txt", "hello world", t1)) +} + +func TestSyntax(t *testing.T) { + ctx := context.Background() + err := transform.SetOptions(ctx, "prefix") + assert.Error(t, err) // should error as required value is missing + + err = transform.SetOptions(ctx, "banana") + assert.Error(t, err) // should error as unrecognized option + + err = transform.SetOptions(ctx, "=123") + assert.Error(t, err) // should error as required key is missing + + err = transform.SetOptions(ctx, "prefix=123") + assert.NoError(t, err) // should not error +} + +func TestConflicting(t *testing.T) { + ctx := context.Background() + r := fstest.NewRun(t) + err := transform.SetOptions(ctx, "prefix=tac", "trimprefix=tac") + require.NoError(t, err) + file1 := r.WriteFile("toe/toe/toe", "hello world", t1) + + r.Mkdir(ctx, r.Fremote) + ctx = predictDstFromLogger(ctx) + err = Sync(ctx, r.Fremote, r.Flocal, true) + testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t) + require.NoError(t, err) + + // should result in no change as prefix and trimprefix cancel out + r.CheckLocalItems(t, file1) + r.CheckRemoteItems(t, fstest.NewItem("toe/toe/toe", "hello world", t1)) +} + +func TestMove(t *testing.T) { + ctx := context.Background() + r := fstest.NewRun(t) + err := transform.SetOptions(ctx, "all,prefix=tac", "all,prefix=tic") + require.NoError(t, err) + r.WriteFile("toe/toe/toe.txt", "hello world", t1) + _, err = operations.MkdirModTime(ctx, r.Flocal, "empty_dir", t1) + require.NoError(t, err) + + r.Mkdir(ctx, r.Fremote) + ctx = predictDstFromLogger(ctx) + err = MoveDir(ctx, r.Fremote, r.Flocal, true, true) + testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t) + require.NoError(t, err) + + r.CheckLocalListing(t, []fstest.Item{}, []string{}) + r.CheckRemoteListing(t, []fstest.Item{fstest.NewItem("tictactoe/tictactoe/tictactoe.txt", "hello world", t1)}, []string{"tictacempty_dir", "tictactoe", "tictactoe/tictactoe"}) +} + +func TestBase64(t *testing.T) { + ctx := context.Background() + r := fstest.NewRun(t) + err := transform.SetOptions(ctx, "all,base64encode") + require.NoError(t, err) + file1 := r.WriteFile("toe/toe/toe.txt", "hello world", t1) + + r.Mkdir(ctx, r.Fremote) + ctx = predictDstFromLogger(ctx) + err = Sync(ctx, r.Fremote, r.Flocal, true) + testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t) + require.NoError(t, err) + + r.CheckLocalItems(t, file1) + r.CheckRemoteItems(t, fstest.NewItem("dG9l/dG9l/dG9lLnR4dA==", "hello world", t1)) + + // round trip + err = transform.SetOptions(ctx, "all,base64decode") + require.NoError(t, err) + ctx = predictDstFromLogger(ctx) + err = Sync(ctx, r.Flocal, r.Fremote, true) + testLoggerVsLsf(ctx, r.Flocal, operations.GetLoggerOpt(ctx).JSON, t) + require.NoError(t, err) + + r.CheckLocalItems(t, file1) + r.CheckRemoteItems(t, fstest.NewItem("dG9l/dG9l/dG9lLnR4dA==", "hello world", t1)) +} + +func TestError(t *testing.T) { + ctx := context.Background() + r := fstest.NewRun(t) + err := transform.SetOptions(ctx, "all,prefix=ta/c") // has illegal character + require.NoError(t, err) + file1 := r.WriteFile("toe/toe/toe", "hello world", t1) + + r.Mkdir(ctx, r.Fremote) + // ctx = predictDstFromLogger(ctx) + err = Sync(ctx, r.Fremote, r.Flocal, true) + // testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t) + assert.Error(t, err) + + r.CheckLocalListing(t, []fstest.Item{file1}, []string{"toe", "toe/toe"}) + r.CheckRemoteListing(t, []fstest.Item{}, []string{}) + err = transform.SetOptions(ctx, "") // has illegal character + assert.NoError(t, err) +} diff --git a/lib/encoder/encoder.go b/lib/encoder/encoder.go index b1ff541a3..c8f41d76c 100644 --- a/lib/encoder/encoder.go +++ b/lib/encoder/encoder.go @@ -151,8 +151,8 @@ func init() { alias("Dot", EncodeDot) } -// validStrings returns all the valid MultiEncoder strings -func validStrings() string { +// ValidStrings returns all the valid MultiEncoder strings +func ValidStrings() string { var out []string for k := range nameToEncoding { out = append(out, k) @@ -192,7 +192,7 @@ func (mask *MultiEncoder) Set(in string) error { } else { i, err := strconv.ParseUint(part, 0, 0) if err != nil { - return fmt.Errorf("bad encoding %q: possible values are: %s", part, validStrings()) + return fmt.Errorf("bad encoding %q: possible values are: %s", part, ValidStrings()) } out |= MultiEncoder(i) } @@ -313,8 +313,7 @@ func (mask MultiEncoder) Encode(in string) string { } if mask.Has(EncodeAsterisk) { // * switch r { - case '*', - '*': + case '*', '*': return true } } @@ -346,64 +345,55 @@ func (mask MultiEncoder) Encode(in string) string { } if mask.Has(EncodeQuestion) { // ? switch r { - case '?', - '?': + case '?', '?': return true } } if mask.Has(EncodeColon) { // : switch r { - case ':', - ':': + case ':', ':': return true } } if mask.Has(EncodePipe) { // | switch r { - case '|', - '|': + case '|', '|': return true } } if mask.Has(EncodeDoubleQuote) { // " switch r { - case '"', - '"': + case '"', '"': return true } } if mask.Has(EncodeSingleQuote) { // ' switch r { - case '\'', - ''': + case '\'', ''': return true } } if mask.Has(EncodeBackQuote) { // ` switch r { - case '`', - '`': + case '`', '`': return true } } if mask.Has(EncodeDollar) { // $ switch r { - case '$', - '$': + case '$', '$': return true } } if mask.Has(EncodeSlash) { // / switch r { - case '/', - '/': + case '/', '/': return true } } if mask.Has(EncodeBackSlash) { // \ switch r { - case '\\', - '\': + case '\\', '\': return true } } @@ -416,15 +406,13 @@ func (mask MultiEncoder) Encode(in string) string { } if mask.Has(EncodeHash) { // # switch r { - case '#', - '#': + case '#', '#': return true } } if mask.Has(EncodePercent) { // % switch r { - case '%', - '%': + case '%', '%': return true } } @@ -1182,6 +1170,7 @@ func appendQuotedBytes(w io.Writer, s string) { _, _ = fmt.Fprintf(w, string(QuoteRune)+"%02X", b) } } + func appendUnquotedByte(w io.Writer, s string) bool { if len(s) < 2 { return false @@ -1202,12 +1191,15 @@ func (identity) Decode(in string) string { return in } func (i identity) FromStandardPath(s string) string { return FromStandardPath(i, s) } + func (i identity) FromStandardName(s string) string { return FromStandardName(i, s) } + func (i identity) ToStandardPath(s string) string { return ToStandardPath(i, s) } + func (i identity) ToStandardName(s string) string { return ToStandardName(i, s) } diff --git a/lib/transform/cmap.go b/lib/transform/cmap.go new file mode 100644 index 000000000..7f676e792 --- /dev/null +++ b/lib/transform/cmap.go @@ -0,0 +1,71 @@ +package transform + +import ( + "fmt" + "strings" + "sync" + + "github.com/rclone/rclone/fs" + "golang.org/x/text/encoding/charmap" +) + +var ( + cmaps = map[int]*charmap.Charmap{} + lock sync.Mutex +) + +type cmapChoices struct{} + +func (cmapChoices) Choices() []string { + choices := make([]string, 1) + i := 0 + for _, enc := range charmap.All { + c, ok := enc.(*charmap.Charmap) + if !ok { + continue + } + name := strings.ReplaceAll(c.String(), " ", "-") + if name == "" { + name = fmt.Sprintf("unknown-%d", i) + } + lock.Lock() + cmaps[i] = c + lock.Unlock() + choices = append(choices, name) + i++ + } + return choices +} + +func (cmapChoices) Type() string { + return "string" +} + +func charmapByID(cm fs.Enum[cmapChoices]) *charmap.Charmap { + lock.Lock() + c, ok := cmaps[int(cm)] + lock.Unlock() + if ok { + return c + } + return nil +} + +func encodeWithReplacement(s string, cmap *charmap.Charmap) string { + return strings.Map(func(r rune) rune { + b, ok := cmap.EncodeRune(r) + if !ok { + return '_' + } + return cmap.DecodeByte(b) + }, s) +} + +func toASCII(s string) string { + return strings.Map(func(r rune) rune { + if r <= 127 { + return r + } + return -1 + }, s) +} diff --git a/lib/transform/options.go b/lib/transform/options.go new file mode 100644 index 000000000..30243bcc7 --- /dev/null +++ b/lib/transform/options.go @@ -0,0 +1,240 @@ +package transform + +import ( + "context" + "errors" + "strings" + + "github.com/rclone/rclone/fs" +) + +func init() { + fs.RegisterGlobalOptions(fs.OptionsInfo{Name: "name_transform", Opt: &Opt.Flags, Options: OptionsInfo, Reload: Reload}) +} + +type transform struct { + key transformAlgo // for example, "prefix" + value string // for example, "some_prefix_" + tag tag // file, dir, or all +} + +// Options stores the parsed and unparsed transform options. +// their order must never be changed or sorted. +type Options struct { + Flags Flags // unparsed flag value like "file,prefix=ABC" + transforms []transform // parsed from NameTransform +} + +// Flags is a slice of unparsed values set from command line flags or env vars +type Flags struct { + NameTransform []string `config:"name_transform"` +} + +// Opt is the default options modified by the environment variables and command line flags +var Opt Options + +// tag controls which part of the file path is affected (file, dir, all) +type tag int + +// tag modes +const ( + file tag = iota // Only transform the leaf name of files (default) + dir // Only transform name of directories - these may appear anywhere in the path + all // Transform the entire path for files and directories +) + +// OptionsInfo describes the Options in use +var OptionsInfo = fs.Options{{ + Name: "name_transform", + Default: []string{}, + Help: "TODO", + Groups: "Filter", +}} + +// Reload the transform options from the flags +func Reload(ctx context.Context) (err error) { + return newOpt(Opt) +} + +// SetOptions sets the options from flags passed in. +// Any existing flags will be overwritten. +// s should be in the same format as cmd line flags, i.e. "all,prefix=XXX" +func SetOptions(ctx context.Context, s ...string) (err error) { + Opt = Options{Flags: Flags{NameTransform: s}} + return Reload(ctx) +} + +// overwite Opt.transforms with values from Opt.Flags +func newOpt(opt Options) (err error) { + Opt.transforms = []transform{} + + for _, transform := range opt.Flags.NameTransform { + t, err := parse(transform) + if err != nil { + return err + } + Opt.transforms = append(Opt.transforms, t) + } + return nil +} + +// parse a single instance of --name-transform +func parse(s string) (t transform, err error) { + if s == "" { + return t, nil + } + s = t.parseTag(s) + err = t.parseKeyVal(s) + return t, err +} + +// parse the tag (file/dir/all), set the option accordingly, and return the trimmed string +// +// we don't worry about errors here because it will error anyway as an invalid key +func (t *transform) parseTag(s string) string { + if strings.HasPrefix(s, "file,") { + t.tag = file + return strings.TrimPrefix(s, "file,") + } + if strings.HasPrefix(s, "dir,") { + t.tag = dir + return strings.TrimPrefix(s, "dir,") + } + if strings.HasPrefix(s, "all,") { + t.tag = all + return strings.TrimPrefix(s, "all,") + } + return s +} + +// parse key and value (if any) by splitting on '=' sign +// (file/dir/all tag has already been trimmed) +func (t *transform) parseKeyVal(s string) (err error) { + if !strings.ContainsRune(s, '=') { + err = t.key.Set(s) + if err != nil { + return err + } + if t.requiresValue() { + fs.Debugf(nil, "received %v", s) + return errors.New("value is required for " + t.key.String()) + } + return nil + } + split := strings.Split(s, "=") + if len(split) != 2 { + return errors.New("too many values") + } + if split[0] == "" { + return errors.New("key cannot be blank") + } + err = t.key.Set(split[0]) + if err != nil { + return err + } + t.value = split[1] + return nil +} + +// returns true if this particular algorithm requires a value +func (t *transform) requiresValue() bool { + switch t.key { + case ConvFindReplace: + return true + case ConvPrefix: + return true + case ConvSuffix: + return true + case ConvSuffixKeepExtension: + return true + case ConvTrimPrefix: + return true + case ConvTrimSuffix: + return true + case ConvIndex: + return true + case ConvDate: + return true + case ConvTruncate: + return true + case ConvEncoder: + return true + case ConvDecoder: + return true + } + return false +} + +// transformAlgo describes conversion setting +type transformAlgo = fs.Enum[transformChoices] + +// Supported transform options +const ( + ConvNone transformAlgo = iota + ConvToNFC + ConvToNFD + ConvToNFKC + ConvToNFKD + ConvFindReplace + ConvPrefix + ConvSuffix + ConvSuffixKeepExtension + ConvTrimPrefix + ConvTrimSuffix + ConvIndex + ConvDate + ConvTruncate + ConvBase64Encode + ConvBase64Decode + ConvEncoder + ConvDecoder + ConvISO8859_1 + ConvWindows1252 + ConvMacintosh + ConvCharmap + ConvLowercase + ConvUppercase + ConvTitlecase + ConvASCII + ConvURL + ConvMapper +) + +type transformChoices struct{} + +func (transformChoices) Choices() []string { + return []string{ + ConvNone: "none", + ConvToNFC: "nfc", + ConvToNFD: "nfd", + ConvToNFKC: "nfkc", + ConvToNFKD: "nfkd", + ConvFindReplace: "replace", + ConvPrefix: "prefix", + ConvSuffix: "suffix", + ConvSuffixKeepExtension: "suffix_keep_extension", + ConvTrimPrefix: "trimprefix", + ConvTrimSuffix: "trimsuffix", + ConvIndex: "index", + ConvDate: "date", + ConvTruncate: "truncate", + ConvBase64Encode: "base64encode", + ConvBase64Decode: "base64decode", + ConvEncoder: "encoder", + ConvDecoder: "decoder", + ConvISO8859_1: "ISO-8859-1", + ConvWindows1252: "Windows-1252", + ConvMacintosh: "Macintosh", + ConvCharmap: "charmap", + ConvLowercase: "lowercase", + ConvUppercase: "uppercase", + ConvTitlecase: "titlecase", + ConvASCII: "ascii", + ConvURL: "url", + ConvMapper: "mapper", + } +} + +func (transformChoices) Type() string { + return "string" +} diff --git a/lib/transform/transform.go b/lib/transform/transform.go new file mode 100644 index 000000000..ef41449e5 --- /dev/null +++ b/lib/transform/transform.go @@ -0,0 +1,226 @@ +// Package transform holds functions for path name transformations +package transform + +import ( + "encoding/base64" + "errors" + "fmt" + "mime" + "os" + "path" + "strconv" + "strings" + "unicode/utf8" + + "github.com/rclone/rclone/fs" + "github.com/rclone/rclone/lib/encoder" + "golang.org/x/text/encoding/charmap" + "golang.org/x/text/unicode/norm" +) + +// Path transforms a path s according to the --name-transform options in use +// +// If no transforms are in use, s is returned unchanged +func Path(s string, isDir bool) string { + if !Transforming() { + return s + } + + var err error + old := s + for _, t := range Opt.transforms { + if isDir && t.tag == file { + continue + } + baseOnly := !isDir && t.tag == file + if t.tag == dir && !isDir { + s, err = transformDir(s, t) + } else { + s, err = transformPath(s, t, baseOnly) + } + if err != nil { + fs.Error(s, err.Error()) // TODO: return err instead of logging it? + } + } + if old != s { + fs.Debugf(old, "transformed to: %v", s) + } + return s +} + +// Transforming returns true when transforms are in use +func Transforming() bool { + return len(Opt.transforms) > 0 +} + +// transformPath transforms a path string according to the chosen TransformAlgo. +// Each path segment is transformed separately, to preserve path separators. +// If baseOnly is true, only the base will be transformed (useful for renaming while walking a dir tree recursively.) +// for example, "some/nested/path" -> "some/nested/CONVERTEDPATH" +// otherwise, the entire is path is transformed. +func transformPath(s string, t transform, baseOnly bool) (string, error) { + if s == "" || s == "/" || s == "\\" || s == "." { + return "", nil + } + + if baseOnly { + transformedBase, err := transformPathSegment(path.Base(s), t) + if err := validateSegment(transformedBase); err != nil { + return "", err + } + return path.Join(path.Dir(s), transformedBase), err + } + + segments := strings.Split(s, string(os.PathSeparator)) + transformedSegments := make([]string, len(segments)) + for _, seg := range segments { + convSeg, err := transformPathSegment(seg, t) + if err != nil { + return "", err + } + if err := validateSegment(convSeg); err != nil { + return "", err + } + transformedSegments = append(transformedSegments, convSeg) + } + return path.Join(transformedSegments...), nil +} + +// transform all but the last path segment +func transformDir(s string, t transform) (string, error) { + dirPath, err := transformPath(path.Dir(s), t, false) + if err != nil { + return "", err + } + return path.Join(dirPath, path.Base(s)), nil +} + +// transformPathSegment transforms one path segment (or really any string) according to the chosen TransformAlgo. +// It assumes path separators have already been trimmed. +func transformPathSegment(s string, t transform) (string, error) { + switch t.key { + case ConvNone: + return s, nil + case ConvToNFC: + return norm.NFC.String(s), nil + case ConvToNFD: + return norm.NFD.String(s), nil + case ConvToNFKC: + return norm.NFKC.String(s), nil + case ConvToNFKD: + return norm.NFKD.String(s), nil + case ConvBase64Encode: + return base64.URLEncoding.EncodeToString([]byte(s)), nil // URLEncoding to avoid slashes + case ConvBase64Decode: + if s == ".DS_Store" { + return s, nil + } + b, err := base64.URLEncoding.DecodeString(s) + if err != nil { + fs.Errorf(s, "base64 error") + } + return string(b), err + case ConvFindReplace: + split := strings.Split(t.value, ":") + if len(split) != 2 { + return s, fmt.Errorf("wrong number of values: %v", t.value) + } + return strings.ReplaceAll(s, split[0], split[1]), nil + case ConvPrefix: + return t.value + s, nil + case ConvSuffix: + return s + t.value, nil + case ConvSuffixKeepExtension: + return SuffixKeepExtension(s, t.value), nil + case ConvTrimPrefix: + return strings.TrimPrefix(s, t.value), nil + case ConvTrimSuffix: + return strings.TrimSuffix(s, t.value), nil + case ConvTruncate: + max, err := strconv.Atoi(t.value) + if err != nil { + return s, err + } + if max <= 0 { + return s, nil + } + if utf8.RuneCountInString(s) <= max { + return s, nil + } + runes := []rune(s) + return string(runes[:max]), nil + case ConvEncoder: + var enc encoder.MultiEncoder + err := enc.Set(t.value) + if err != nil { + return s, err + } + return enc.Encode(s), nil + case ConvDecoder: + var enc encoder.MultiEncoder + err := enc.Set(t.value) + if err != nil { + return s, err + } + return enc.Decode(s), nil + case ConvISO8859_1: + return encodeWithReplacement(s, charmap.ISO8859_1), nil + case ConvWindows1252: + return encodeWithReplacement(s, charmap.Windows1252), nil + case ConvMacintosh: + return encodeWithReplacement(s, charmap.Macintosh), nil + case ConvCharmap: + var cmapType fs.Enum[cmapChoices] + err := cmapType.Set(t.value) + if err != nil { + return s, err + } + c := charmapByID(cmapType) + return encodeWithReplacement(s, c), nil + case ConvLowercase: + return strings.ToLower(s), nil + case ConvUppercase: + return strings.ToUpper(s), nil + case ConvTitlecase: + return strings.ToTitle(s), nil + case ConvASCII: + return toASCII(s), nil + default: + return "", errors.New("this option is not yet implemented") + } +} + +// SuffixKeepExtension adds a suffix while keeping extension +// +// i.e. file.txt becomes file_somesuffix.txt not file.txt_somesuffix +func SuffixKeepExtension(remote string, suffix string) string { + var ( + base = remote + exts = "" + first = true + ext = path.Ext(remote) + ) + for ext != "" { + // Look second and subsequent extensions in mime types. + // If they aren't found then don't keep it as an extension. + if !first && mime.TypeByExtension(ext) == "" { + break + } + base = base[:len(base)-len(ext)] + exts = ext + exts + first = false + ext = path.Ext(base) + } + return base + suffix + exts +} + +// forbid transformations that add/remove path separators +func validateSegment(s string) error { + if s == "" { + return errors.New("transform cannot render path segments empty") + } + if strings.ContainsRune(s, '/') { + return fmt.Errorf("transform cannot add path separators: %v", s) + } + return nil +} diff --git a/lib/transform/transform_test.go b/lib/transform/transform_test.go new file mode 100644 index 000000000..67c7d77ac --- /dev/null +++ b/lib/transform/transform_test.go @@ -0,0 +1,130 @@ +package transform + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// sync tests are in fs/sync/sync_transform_test.go to avoid import cycle issues + +func TestPath(t *testing.T) { + for _, test := range []struct { + path string + want string + }{ + {"", ""}, + {"toe/toe/toe", "tictactoe/tictactoe/tictactoe"}, + {"a/b/c", "tictaca/tictacb/tictacc"}, + } { + err := SetOptions(context.Background(), "all,prefix=tac", "all,prefix=tic") + require.NoError(t, err) + + got := Path(test.path, false) + assert.Equal(t, test.want, got) + } +} + +func TestFileTagOnFile(t *testing.T) { + for _, test := range []struct { + path string + want string + }{ + {"a/b/c.txt", "a/b/1c.txt"}, + } { + err := SetOptions(context.Background(), "file,prefix=1") + require.NoError(t, err) + + got := Path(test.path, false) + assert.Equal(t, test.want, got) + } +} + +func TestDirTagOnFile(t *testing.T) { + for _, test := range []struct { + path string + want string + }{ + {"a/b/c.txt", "1a/1b/c.txt"}, + } { + err := SetOptions(context.Background(), "dir,prefix=1") + require.NoError(t, err) + + got := Path(test.path, false) + assert.Equal(t, test.want, got) + } +} + +func TestAllTag(t *testing.T) { + for _, test := range []struct { + path string + want string + }{ + {"a/b/c.txt", "1a/1b/1c.txt"}, + } { + err := SetOptions(context.Background(), "all,prefix=1") + require.NoError(t, err) + + got := Path(test.path, false) + assert.Equal(t, test.want, got) + } +} + +func TestFileTagOnDir(t *testing.T) { + for _, test := range []struct { + path string + want string + }{ + {"a/b", "a/b"}, + } { + err := SetOptions(context.Background(), "file,prefix=1") + require.NoError(t, err) + + got := Path(test.path, true) + assert.Equal(t, test.want, got) + } +} + +func TestDirTagOnDir(t *testing.T) { + for _, test := range []struct { + path string + want string + }{ + {"a/b", "1a/1b"}, + } { + err := SetOptions(context.Background(), "dir,prefix=1") + require.NoError(t, err) + + got := Path(test.path, true) + assert.Equal(t, test.want, got) + } +} + +func TestVarious(t *testing.T) { + for _, test := range []struct { + path string + want string + flags []string + }{ + {"stories/The Quick Brown Fox!.txt", "STORIES/THE QUICK BROWN FOX!.TXT", []string{"all,uppercase"}}, + {"stories/The Quick Brown Fox!.txt", "stories/The Slow Brown Turtle!.txt", []string{"all,replace=Fox:Turtle", "all,replace=Quick:Slow"}}, + {"stories/The Quick Brown Fox!.txt", "c3Rvcmllcw==/VGhlIFF1aWNrIEJyb3duIEZveCEudHh0", []string{"all,base64encode"}}, + {"c3Rvcmllcw==/VGhlIFF1aWNrIEJyb3duIEZveCEudHh0", "stories/The Quick Brown Fox!.txt", []string{"all,base64decode"}}, + {"stories/The Quick Brown 🦊 Fox Went to the Café!.txt", "stories/The Quick Brown 🦊 Fox Went to the Café!.txt", []string{"all,nfc"}}, + {"stories/The Quick Brown 🦊 Fox Went to the Café!.txt", "stories/The Quick Brown 🦊 Fox Went to the Café!.txt", []string{"all,nfd"}}, + {"stories/The Quick Brown 🦊 Fox!.txt", "stories/The Quick Brown Fox!.txt", []string{"all,ascii"}}, + {"stories/The Quick Brown Fox!.txt", "stories/The Quick Brown Fox!", []string{"all,trimsuffix=.txt"}}, + {"stories/The Quick Brown Fox!.txt", "OLD_stories/OLD_The Quick Brown Fox!.txt", []string{"all,prefix=OLD_"}}, + {"stories/The Quick Brown 🦊 Fox Went to the Café!.txt", "stories/The Quick Brown _ Fox Went to the Caf_!.txt", []string{"all,charmap=ISO-8859-7"}}, + {"stories/The Quick Brown Fox: A Memoir [draft].txt", "stories/The Quick Brown Fox: A Memoir [draft].txt", []string{"all,encoder=Colon,SquareBracket"}}, + {"stories/The Quick Brown 🦊 Fox Went to the Café!.txt", "stories/The Quick Brown 🦊 Fox", []string{"all,truncate=21"}}, + } { + err := SetOptions(context.Background(), test.flags...) + require.NoError(t, err) + + got := Path(test.path, false) + assert.Equal(t, test.want, got) + } +} diff --git a/lib/transform/transformflags/transformflags.go b/lib/transform/transformflags/transformflags.go new file mode 100644 index 000000000..c03c2159d --- /dev/null +++ b/lib/transform/transformflags/transformflags.go @@ -0,0 +1,14 @@ +// Package transformflags implements command line flags to set up a transform +package transformflags + +import ( + "github.com/rclone/rclone/fs/config/flags" + "github.com/rclone/rclone/lib/transform" + + "github.com/spf13/pflag" +) + +// AddFlags adds the transform flags to the command +func AddFlags(flagSet *pflag.FlagSet) { + flags.AddFlagsFromOptions(flagSet, "", transform.OptionsInfo) +}