mirror of
https://github.com/rclone/rclone.git
synced 2025-12-06 00:03:32 +00:00
Compare commits
7 Commits
v1.70-stab
...
pr-8538-tr
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dbce609665 | ||
|
|
62e8512711 | ||
|
|
53bdd58085 | ||
|
|
7b9f8eca00 | ||
|
|
433ed18e91 | ||
|
|
34a20555ca | ||
|
|
f20ee1488b |
@@ -15,6 +15,7 @@ import (
|
||||
_ "github.com/rclone/rclone/cmd/cleanup"
|
||||
_ "github.com/rclone/rclone/cmd/cmount"
|
||||
_ "github.com/rclone/rclone/cmd/config"
|
||||
_ "github.com/rclone/rclone/cmd/convmv"
|
||||
_ "github.com/rclone/rclone/cmd/copy"
|
||||
_ "github.com/rclone/rclone/cmd/copyto"
|
||||
_ "github.com/rclone/rclone/cmd/copyurl"
|
||||
|
||||
@@ -5,8 +5,6 @@ import (
|
||||
"os"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Names comprises a set of file names
|
||||
@@ -85,81 +83,3 @@ func (am AliasMap) Alias(name1 string) string {
|
||||
}
|
||||
return name1
|
||||
}
|
||||
|
||||
// ParseGlobs determines whether a string contains {brackets}
|
||||
// and returns the substring (including both brackets) for replacing
|
||||
// substring is first opening bracket to last closing bracket --
|
||||
// good for {{this}} but not {this}{this}
|
||||
func ParseGlobs(s string) (hasGlobs bool, substring string) {
|
||||
open := strings.Index(s, "{")
|
||||
close := strings.LastIndex(s, "}")
|
||||
if open >= 0 && close > open {
|
||||
return true, s[open : close+1]
|
||||
}
|
||||
return false, ""
|
||||
}
|
||||
|
||||
// TrimBrackets converts {{this}} to this
|
||||
func TrimBrackets(s string) string {
|
||||
return strings.Trim(s, "{}")
|
||||
}
|
||||
|
||||
// TimeFormat converts a user-supplied string to a Go time constant, if possible
|
||||
func TimeFormat(timeFormat string) string {
|
||||
switch timeFormat {
|
||||
case "Layout":
|
||||
timeFormat = time.Layout
|
||||
case "ANSIC":
|
||||
timeFormat = time.ANSIC
|
||||
case "UnixDate":
|
||||
timeFormat = time.UnixDate
|
||||
case "RubyDate":
|
||||
timeFormat = time.RubyDate
|
||||
case "RFC822":
|
||||
timeFormat = time.RFC822
|
||||
case "RFC822Z":
|
||||
timeFormat = time.RFC822Z
|
||||
case "RFC850":
|
||||
timeFormat = time.RFC850
|
||||
case "RFC1123":
|
||||
timeFormat = time.RFC1123
|
||||
case "RFC1123Z":
|
||||
timeFormat = time.RFC1123Z
|
||||
case "RFC3339":
|
||||
timeFormat = time.RFC3339
|
||||
case "RFC3339Nano":
|
||||
timeFormat = time.RFC3339Nano
|
||||
case "Kitchen":
|
||||
timeFormat = time.Kitchen
|
||||
case "Stamp":
|
||||
timeFormat = time.Stamp
|
||||
case "StampMilli":
|
||||
timeFormat = time.StampMilli
|
||||
case "StampMicro":
|
||||
timeFormat = time.StampMicro
|
||||
case "StampNano":
|
||||
timeFormat = time.StampNano
|
||||
case "DateTime":
|
||||
// timeFormat = time.DateTime // missing in go1.19
|
||||
timeFormat = "2006-01-02 15:04:05"
|
||||
case "DateOnly":
|
||||
// timeFormat = time.DateOnly // missing in go1.19
|
||||
timeFormat = "2006-01-02"
|
||||
case "TimeOnly":
|
||||
// timeFormat = time.TimeOnly // missing in go1.19
|
||||
timeFormat = "15:04:05"
|
||||
case "MacFriendlyTime", "macfriendlytime", "mac":
|
||||
timeFormat = "2006-01-02 0304PM" // not actually a Go constant -- but useful as macOS filenames can't have colons
|
||||
}
|
||||
return timeFormat
|
||||
}
|
||||
|
||||
// AppyTimeGlobs converts "myfile-{DateOnly}.txt" to "myfile-2006-01-02.txt"
|
||||
func AppyTimeGlobs(s string, t time.Time) string {
|
||||
hasGlobs, substring := ParseGlobs(s)
|
||||
if !hasGlobs {
|
||||
return s
|
||||
}
|
||||
timeString := t.Local().Format(TimeFormat(TrimBrackets(substring)))
|
||||
return strings.ReplaceAll(s, substring, timeString)
|
||||
}
|
||||
|
||||
@@ -4,8 +4,6 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"math"
|
||||
"mime"
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -13,6 +11,7 @@ import (
|
||||
"github.com/rclone/rclone/fs"
|
||||
"github.com/rclone/rclone/fs/operations"
|
||||
"github.com/rclone/rclone/lib/terminal"
|
||||
"github.com/rclone/rclone/lib/transform"
|
||||
)
|
||||
|
||||
// Prefer describes strategies for resolving sync conflicts
|
||||
@@ -97,8 +96,8 @@ func (b *bisyncRun) setResolveDefaults(ctx context.Context) error {
|
||||
}
|
||||
// replace glob variables, if any
|
||||
t := time.Now() // capture static time here so it is the same for all files throughout this run
|
||||
b.opt.ConflictSuffix1 = bilib.AppyTimeGlobs(b.opt.ConflictSuffix1, t)
|
||||
b.opt.ConflictSuffix2 = bilib.AppyTimeGlobs(b.opt.ConflictSuffix2, t)
|
||||
b.opt.ConflictSuffix1 = transform.AppyTimeGlobs(b.opt.ConflictSuffix1, t)
|
||||
b.opt.ConflictSuffix2 = transform.AppyTimeGlobs(b.opt.ConflictSuffix2, t)
|
||||
|
||||
// append dot (intentionally allow more than one)
|
||||
b.opt.ConflictSuffix1 = "." + b.opt.ConflictSuffix1
|
||||
@@ -130,6 +129,7 @@ type (
|
||||
path2 namePair
|
||||
}
|
||||
)
|
||||
|
||||
type namePair struct {
|
||||
oldName string
|
||||
newName string
|
||||
@@ -240,24 +240,7 @@ func SuffixName(ctx context.Context, remote, suffix string) string {
|
||||
}
|
||||
ci := fs.GetConfig(ctx)
|
||||
if ci.SuffixKeepExtension {
|
||||
var (
|
||||
base = remote
|
||||
exts = ""
|
||||
first = true
|
||||
ext = path.Ext(remote)
|
||||
)
|
||||
for ext != "" {
|
||||
// Look second and subsequent extensions in mime types.
|
||||
// If they aren't found then don't keep it as an extension.
|
||||
if !first && mime.TypeByExtension(ext) == "" {
|
||||
break
|
||||
}
|
||||
base = base[:len(base)-len(ext)]
|
||||
exts = ext + exts
|
||||
first = false
|
||||
ext = path.Ext(base)
|
||||
}
|
||||
return base + suffix + exts
|
||||
return transform.SuffixKeepExtension(remote, suffix)
|
||||
}
|
||||
return remote + suffix
|
||||
}
|
||||
|
||||
108
cmd/convmv/convmv.go
Normal file
108
cmd/convmv/convmv.go
Normal file
@@ -0,0 +1,108 @@
|
||||
// Package convmv provides the convmv command.
|
||||
package convmv
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"strings"
|
||||
|
||||
"github.com/rclone/rclone/cmd"
|
||||
"github.com/rclone/rclone/fs/config/flags"
|
||||
"github.com/rclone/rclone/fs/operations"
|
||||
"github.com/rclone/rclone/fs/sync"
|
||||
"github.com/rclone/rclone/lib/transform"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// Globals
|
||||
var (
|
||||
deleteEmptySrcDirs = false
|
||||
createEmptySrcDirs = false
|
||||
)
|
||||
|
||||
func init() {
|
||||
cmd.Root.AddCommand(commandDefinition)
|
||||
cmdFlags := commandDefinition.Flags()
|
||||
flags.BoolVarP(cmdFlags, &deleteEmptySrcDirs, "delete-empty-src-dirs", "", deleteEmptySrcDirs, "Delete empty source dirs after move", "")
|
||||
flags.BoolVarP(cmdFlags, &createEmptySrcDirs, "create-empty-src-dirs", "", createEmptySrcDirs, "Create empty source dirs on destination after move", "")
|
||||
}
|
||||
|
||||
var commandDefinition = &cobra.Command{
|
||||
Use: "convmv dest:path --name-transform XXX",
|
||||
Short: `Convert file and directory names in place.`,
|
||||
// Warning¡ "¡" will be replaced by backticks below
|
||||
Long: strings.ReplaceAll(`
|
||||
convmv supports advanced path name transformations for converting and renaming files and directories by applying prefixes, suffixes, and other alterations.
|
||||
|
||||
`+transform.SprintList()+`
|
||||
|
||||
Multiple transformations can be used in sequence, applied in the order they are specified on the command line.
|
||||
|
||||
The ¡--name-transform¡ flag is also available in ¡sync¡, ¡copy¡, and ¡move¡.
|
||||
|
||||
## Files vs Directories ##
|
||||
|
||||
By default ¡--name-transform¡ will only apply to file names. The means only the leaf file name will be transformed.
|
||||
However some of the transforms would be better applied to the whole path or just directories.
|
||||
To choose which which part of the file path is affected some tags can be added to the ¡--name-transform¡
|
||||
|
||||
| Tag | Effect |
|
||||
|------|------|
|
||||
| ¡file¡ | Only transform the leaf name of files (DEFAULT) |
|
||||
| ¡dir¡ | Only transform name of directories - these may appear anywhere in the path |
|
||||
| ¡all¡ | Transform the entire path for files and directories |
|
||||
|
||||
This is used by adding the tag into the transform name like this: ¡--name-transform file,prefix=ABC¡ or ¡--name-transform dir,prefix=DEF¡.
|
||||
|
||||
For some conversions using all is more likely to be useful, for example ¡--name-transform all,nfc¡
|
||||
|
||||
Note that ¡--name-transform¡ may not add path separators ¡/¡ to the name. This will cause an error.
|
||||
|
||||
## Ordering and Conflicts ##
|
||||
|
||||
* Transformations will be applied in the order specified by the user.
|
||||
* If the ¡file¡ tag is in use (the default) then only the leaf name of files will be transformed.
|
||||
* If the ¡dir¡ tag is in use then directories anywhere in the path will be transformed
|
||||
* If the ¡all¡ tag is in use then directories and files anywhere in the path will be transformed
|
||||
* Each transformation will be run one path segment at a time.
|
||||
* If a transformation adds a ¡/¡ or ends up with an empty path segment then that will be an error.
|
||||
* It is up to the user to put the transformations in a sensible order.
|
||||
* Conflicting transformations, such as ¡prefix¡ followed by ¡trimprefix¡ or ¡nfc¡ followed by ¡nfd¡, are possible.
|
||||
* Instead of enforcing mutual exclusivity, transformations are applied in sequence as specified by the
|
||||
user, allowing for intentional use cases (e.g., trimming one prefix before adding another).
|
||||
* Users should be aware that certain combinations may lead to unexpected results and should verify
|
||||
transformations using ¡--dry-run¡ before execution.
|
||||
|
||||
## Race Conditions and Non-Deterministic Behavior ##
|
||||
|
||||
Some transformations, such as ¡replace=old:new¡, may introduce conflicts where multiple source files map to the same destination name.
|
||||
This can lead to race conditions when performing concurrent transfers. It is up to the user to anticipate these.
|
||||
* If two files from the source are transformed into the same name at the destination, the final state may be non-deterministic.
|
||||
* Running rclone check after a sync using such transformations may erroneously report missing or differing files due to overwritten results.
|
||||
|
||||
* To minimize risks, users should:
|
||||
* Carefully review transformations that may introduce conflicts.
|
||||
* Use ¡--dry-run¡ to inspect changes before executing a sync (but keep in mind that it won't show the effect of non-deterministic transformations).
|
||||
* Avoid transformations that cause multiple distinct source files to map to the same destination name.
|
||||
* Consider disabling concurrency with ¡--transfers=1¡ if necessary.
|
||||
* Certain transformations (e.g. ¡prefix¡) will have a multiplying effect every time they are used. Avoid these when using ¡bisync¡.
|
||||
|
||||
`, "¡", "`"),
|
||||
Annotations: map[string]string{
|
||||
"versionIntroduced": "v1.70",
|
||||
"groups": "Filter,Listing,Important,Copy",
|
||||
},
|
||||
Run: func(command *cobra.Command, args []string) {
|
||||
cmd.CheckArgs(1, 1, command, args)
|
||||
fdst, srcFileName := cmd.NewFsFile(args[0])
|
||||
cmd.Run(false, true, command, func() error {
|
||||
if !transform.Transforming(context.Background()) {
|
||||
return errors.New("--name-transform must be set")
|
||||
}
|
||||
if srcFileName == "" {
|
||||
return sync.Transform(context.Background(), fdst, deleteEmptySrcDirs, createEmptySrcDirs)
|
||||
}
|
||||
return operations.TransformFile(context.Background(), fdst, srcFileName)
|
||||
})
|
||||
},
|
||||
}
|
||||
253
cmd/convmv/convmv_test.go
Normal file
253
cmd/convmv/convmv_test.go
Normal file
@@ -0,0 +1,253 @@
|
||||
// Package convmv provides the convmv command.
|
||||
package convmv
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"context"
|
||||
"fmt"
|
||||
"path"
|
||||
"slices"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
_ "github.com/rclone/rclone/backend/all" // import all backends
|
||||
"github.com/rclone/rclone/fs"
|
||||
"github.com/rclone/rclone/fs/filter"
|
||||
"github.com/rclone/rclone/fs/operations"
|
||||
"github.com/rclone/rclone/fs/sync"
|
||||
"github.com/rclone/rclone/fs/walk"
|
||||
"github.com/rclone/rclone/fstest"
|
||||
"github.com/rclone/rclone/lib/transform"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"golang.org/x/text/unicode/norm"
|
||||
)
|
||||
|
||||
// Some times used in the tests
|
||||
var (
|
||||
t1 = fstest.Time("2001-02-03T04:05:06.499999999Z")
|
||||
debug = ``
|
||||
)
|
||||
|
||||
// TestMain drives the tests
|
||||
func TestMain(m *testing.M) {
|
||||
fstest.TestMain(m)
|
||||
}
|
||||
|
||||
func TestTransform(t *testing.T) {
|
||||
type args struct {
|
||||
TransformOpt []string
|
||||
TransformBackOpt []string
|
||||
Lossless bool // whether the TransformBackAlgo is always losslessly invertible
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
}{
|
||||
{name: "NFC", args: args{
|
||||
TransformOpt: []string{"all,nfc"},
|
||||
TransformBackOpt: []string{"all,nfd"},
|
||||
Lossless: false,
|
||||
}},
|
||||
{name: "NFD", args: args{
|
||||
TransformOpt: []string{"all,nfd"},
|
||||
TransformBackOpt: []string{"all,nfc"},
|
||||
Lossless: false,
|
||||
}},
|
||||
{name: "base64", args: args{
|
||||
TransformOpt: []string{"all,base64encode"},
|
||||
TransformBackOpt: []string{"all,base64encode"},
|
||||
Lossless: false,
|
||||
}},
|
||||
{name: "prefix", args: args{
|
||||
TransformOpt: []string{"all,prefix=PREFIX"},
|
||||
TransformBackOpt: []string{"all,trimprefix=PREFIX"},
|
||||
Lossless: true,
|
||||
}},
|
||||
{name: "suffix", args: args{
|
||||
TransformOpt: []string{"all,suffix=SUFFIX"},
|
||||
TransformBackOpt: []string{"all,trimsuffix=SUFFIX"},
|
||||
Lossless: true,
|
||||
}},
|
||||
{name: "truncate", args: args{
|
||||
TransformOpt: []string{"all,truncate=10"},
|
||||
TransformBackOpt: []string{"all,truncate=10"},
|
||||
Lossless: false,
|
||||
}},
|
||||
{name: "encoder", args: args{
|
||||
TransformOpt: []string{"all,encoder=Colon,SquareBracket"},
|
||||
TransformBackOpt: []string{"all,decoder=Colon,SquareBracket"},
|
||||
Lossless: true,
|
||||
}},
|
||||
{name: "ISO-8859-1", args: args{
|
||||
TransformOpt: []string{"all,ISO-8859-1"},
|
||||
TransformBackOpt: []string{"all,ISO-8859-1"},
|
||||
Lossless: false,
|
||||
}},
|
||||
{name: "charmap", args: args{
|
||||
TransformOpt: []string{"all,charmap=ISO-8859-7"},
|
||||
TransformBackOpt: []string{"all,charmap=ISO-8859-7"},
|
||||
Lossless: false,
|
||||
}},
|
||||
{name: "lowercase", args: args{
|
||||
TransformOpt: []string{"all,lowercase"},
|
||||
TransformBackOpt: []string{"all,lowercase"},
|
||||
Lossless: false,
|
||||
}},
|
||||
{name: "ascii", args: args{
|
||||
TransformOpt: []string{"all,ascii"},
|
||||
TransformBackOpt: []string{"all,ascii"},
|
||||
Lossless: false,
|
||||
}},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
r := fstest.NewRun(t)
|
||||
defer r.Finalise()
|
||||
|
||||
ctx := context.Background()
|
||||
r.Mkdir(ctx, r.Flocal)
|
||||
r.Mkdir(ctx, r.Fremote)
|
||||
items := makeTestFiles(t, r, "dir1")
|
||||
err := r.Fremote.Mkdir(ctx, "empty/empty")
|
||||
require.NoError(t, err)
|
||||
err = r.Flocal.Mkdir(ctx, "empty/empty")
|
||||
require.NoError(t, err)
|
||||
deleteDSStore(t, r)
|
||||
r.CheckRemoteListing(t, items, []string{"dir1", "empty", "empty/empty"})
|
||||
r.CheckLocalListing(t, items, []string{"dir1", "empty", "empty/empty"})
|
||||
|
||||
err = transform.SetOptions(ctx, tt.args.TransformOpt...)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = sync.Transform(ctx, r.Fremote, true, true)
|
||||
assert.NoError(t, err)
|
||||
compareNames(ctx, t, r, items)
|
||||
|
||||
transformedItems := transformItems(ctx, t, items)
|
||||
r.CheckRemoteListing(t, transformedItems, []string{transform.Path(ctx, "dir1", true), transform.Path(ctx, "empty", true), transform.Path(ctx, "empty/empty", true)})
|
||||
err = transform.SetOptions(ctx, tt.args.TransformBackOpt...)
|
||||
require.NoError(t, err)
|
||||
err = sync.Transform(ctx, r.Fremote, true, true)
|
||||
assert.NoError(t, err)
|
||||
compareNames(ctx, t, r, transformedItems)
|
||||
|
||||
if tt.args.Lossless {
|
||||
deleteDSStore(t, r)
|
||||
r.CheckRemoteListing(t, items, []string{"dir1", "empty", "empty/empty"})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// const alphabet = "ƀɀɠʀҠԀڀڠݠހ߀ကႠᄀᄠᅀᆀᇠሀሠበዠጠᎠᏀᐠᑀᑠᒀᒠᓀᓠᔀᔠᕀᕠᖀᖠᗀᗠᘀᘠᙀᚠᛀកᠠᡀᣀᦀ᧠ᨠᯀᰀᴀ⇠⋀⍀⍠⎀⎠⏀␀─┠╀╠▀■◀◠☀☠♀♠⚀⚠⛀⛠✀✠❀➀➠⠀⠠⡀⡠⢀⢠⣀⣠⤀⤠⥀⥠⦠⨠⩀⪀⪠⫠⬀⬠⭀ⰀⲀⲠⳀⴀⵀ⺠⻀㇀㐀㐠㑀㑠㒀㒠㓀㓠㔀㔠㕀㕠㖀㖠㗀㗠㘀㘠㙀㙠㚀㚠㛀㛠㜀㜠㝀㝠㞀㞠㟀㟠㠀㠠㡀㡠㢀㢠㣀㣠㤀㤠㥀㥠㦀㦠㧀㧠㨀㨠㩀㩠㪀㪠㫀㫠㬀㬠㭀㭠㮀㮠㯀㯠㰀㰠㱀㱠㲀㲠㳀㳠㴀㴠㵀㵠㶀㶠㷀㷠㸀㸠㹀㹠㺀㺠㻀㻠㼀㼠㽀㽠㾀㾠㿀㿠䀀䀠䁀䁠䂀䂠䃀䃠䄀䄠䅀䅠䆀䆠䇀䇠䈀䈠䉀䉠䊀䊠䋀䋠䌀䌠䍀䍠䎀䎠䏀䏠䐀䐠䑀䑠䒀䒠䓀䓠䔀䔠䕀䕠䖀䖠䗀䗠䘀䘠䙀䙠䚀䚠䛀䛠䜀䜠䝀䝠䞀䞠䟀䟠䠀䠠䡀䡠䢀䢠䣀䣠䤀䤠䥀䥠䦀䦠䧀䧠䨀䨠䩀䩠䪀䪠䫀䫠䬀䬠䭀䭠䮀䮠䯀䯠䰀䰠䱀䱠䲀䲠䳀䳠䴀䴠䵀䵠䶀䷀䷠一丠乀习亀亠什仠伀传佀你侀侠俀俠倀倠偀偠傀傠僀僠儀儠兀兠冀冠净几刀删剀剠劀加勀勠匀匠區占厀厠叀叠吀吠呀呠咀咠哀哠唀唠啀啠喀喠嗀嗠嘀嘠噀噠嚀嚠囀因圀圠址坠垀垠埀埠堀堠塀塠墀墠壀壠夀夠奀奠妀妠姀姠娀娠婀婠媀媠嫀嫠嬀嬠孀孠宀宠寀寠尀尠局屠岀岠峀峠崀崠嵀嵠嶀嶠巀巠帀帠幀幠庀庠廀廠开张彀彠往徠忀忠怀怠恀恠悀悠惀惠愀愠慀慠憀憠懀懠戀戠所扠技抠拀拠挀挠捀捠掀掠揀揠搀搠摀摠撀撠擀擠攀攠敀敠斀斠旀无昀映晀晠暀暠曀曠最朠杀杠枀枠柀柠栀栠桀桠梀梠检棠椀椠楀楠榀榠槀槠樀樠橀橠檀檠櫀櫠欀欠歀歠殀殠毀毠氀氠汀池沀沠泀泠洀洠浀浠涀涠淀淠渀渠湀湠満溠滀滠漀漠潀潠澀澠激濠瀀瀠灀灠炀炠烀烠焀焠煀煠熀熠燀燠爀爠牀牠犀犠狀狠猀猠獀獠玀玠珀珠琀琠瑀瑠璀璠瓀瓠甀甠畀畠疀疠痀痠瘀瘠癀癠皀皠盀盠眀眠着睠瞀瞠矀矠砀砠础硠碀碠磀磠礀礠祀祠禀禠秀秠稀稠穀穠窀窠竀章笀笠筀筠简箠節篠簀簠籀籠粀粠糀糠紀素絀絠綀綠緀締縀縠繀繠纀纠绀绠缀缠罀罠羀羠翀翠耀耠聀聠肀肠胀胠脀脠腀腠膀膠臀臠舀舠艀艠芀芠苀苠茀茠荀荠莀莠菀菠萀萠葀葠蒀蒠蓀蓠蔀蔠蕀蕠薀薠藀藠蘀蘠虀虠蚀蚠蛀蛠蜀蜠蝀蝠螀螠蟀蟠蠀蠠血衠袀袠裀裠褀褠襀襠覀覠觀觠言訠詀詠誀誠諀諠謀謠譀譠讀讠诀诠谀谠豀豠貀負賀賠贀贠赀赠趀趠跀跠踀踠蹀蹠躀躠軀軠輀輠轀轠辀辠迀迠退造遀遠邀邠郀郠鄀鄠酀酠醀醠釀釠鈀鈠鉀鉠銀銠鋀鋠錀錠鍀鍠鎀鎠鏀鏠鐀鐠鑀鑠钀钠铀铠销锠镀镠門閠闀闠阀阠陀陠隀隠雀雠需霠靀靠鞀鞠韀韠頀頠顀顠颀颠飀飠餀餠饀饠馀馠駀駠騀騠驀驠骀骠髀髠鬀鬠魀魠鮀鮠鯀鯠鰀鰠鱀鱠鲀鲠鳀鳠鴀鴠鵀鵠鶀鶠鷀鷠鸀鸠鹀鹠麀麠黀黠鼀鼠齀齠龀龠ꀀꀠꁀꁠꂀꂠꃀꃠꄀꄠꅀꅠꆀꆠꇀꇠꈀꈠꉀꉠꊀꊠꋀꋠꌀꌠꍀꍠꎀꎠꏀꏠꐀꐠꑀꑠ꒠ꔀꔠꕀꕠꖀꖠꗀꗠꙀꚠꛀ꜀꜠ꝀꞀꡀ測試_Русский___ě_áñ"
|
||||
const alphabet = "abcdefg123456789"
|
||||
|
||||
var extras = []string{"apple", "banana", "appleappleapplebanana", "splitbananasplit"}
|
||||
|
||||
func makeTestFiles(t *testing.T, r *fstest.Run, dir string) []fstest.Item {
|
||||
t.Helper()
|
||||
n := 0
|
||||
// Create test files
|
||||
items := []fstest.Item{}
|
||||
for _, c := range alphabet {
|
||||
var out strings.Builder
|
||||
for i := rune(0); i < 7; i++ {
|
||||
out.WriteRune(c + i)
|
||||
}
|
||||
fileName := path.Join(dir, fmt.Sprintf("%04d-%s.txt", n, out.String()))
|
||||
fileName = strings.ToValidUTF8(fileName, "")
|
||||
fileName = strings.NewReplacer(":", "", "<", "", ">", "", "?", "").Replace(fileName) // remove characters illegal on windows
|
||||
|
||||
if debug != "" {
|
||||
fileName = debug
|
||||
}
|
||||
|
||||
item := r.WriteObject(context.Background(), fileName, fileName, t1)
|
||||
r.WriteFile(fileName, fileName, t1)
|
||||
items = append(items, item)
|
||||
n++
|
||||
|
||||
if debug != "" {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
for _, extra := range extras {
|
||||
item := r.WriteObject(context.Background(), extra, extra, t1)
|
||||
r.WriteFile(extra, extra, t1)
|
||||
items = append(items, item)
|
||||
}
|
||||
|
||||
return items
|
||||
}
|
||||
|
||||
func deleteDSStore(t *testing.T, r *fstest.Run) {
|
||||
ctxDSStore, fi := filter.AddConfig(context.Background())
|
||||
err := fi.AddRule(`+ *.DS_Store`)
|
||||
assert.NoError(t, err)
|
||||
err = fi.AddRule(`- **`)
|
||||
assert.NoError(t, err)
|
||||
err = operations.Delete(ctxDSStore, r.Fremote)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func compareNames(ctx context.Context, t *testing.T, r *fstest.Run, items []fstest.Item) {
|
||||
var entries fs.DirEntries
|
||||
|
||||
deleteDSStore(t, r)
|
||||
err := walk.ListR(context.Background(), r.Fremote, "", true, -1, walk.ListObjects, func(e fs.DirEntries) error {
|
||||
entries = append(entries, e...)
|
||||
return nil
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
entries = slices.DeleteFunc(entries, func(E fs.DirEntry) bool { // remove those pesky .DS_Store files
|
||||
if strings.Contains(E.Remote(), ".DS_Store") {
|
||||
err := operations.DeleteFile(context.Background(), E.(fs.Object))
|
||||
assert.NoError(t, err)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
require.Equal(t, len(items), entries.Len())
|
||||
|
||||
// sort by CONVERTED name
|
||||
slices.SortStableFunc(items, func(a, b fstest.Item) int {
|
||||
aConv := transform.Path(ctx, a.Path, false)
|
||||
bConv := transform.Path(ctx, b.Path, false)
|
||||
return cmp.Compare(aConv, bConv)
|
||||
})
|
||||
slices.SortStableFunc(entries, func(a, b fs.DirEntry) int {
|
||||
return cmp.Compare(a.Remote(), b.Remote())
|
||||
})
|
||||
|
||||
for i, e := range entries {
|
||||
expect := transform.Path(ctx, items[i].Path, false)
|
||||
msg := fmt.Sprintf("expected %v, got %v", detectEncoding(expect), detectEncoding(e.Remote()))
|
||||
assert.Equal(t, expect, e.Remote(), msg)
|
||||
}
|
||||
}
|
||||
|
||||
func transformItems(ctx context.Context, t *testing.T, items []fstest.Item) []fstest.Item {
|
||||
transformedItems := []fstest.Item{}
|
||||
for _, item := range items {
|
||||
newPath := transform.Path(ctx, item.Path, false)
|
||||
newItem := item
|
||||
newItem.Path = newPath
|
||||
transformedItems = append(transformedItems, newItem)
|
||||
}
|
||||
return transformedItems
|
||||
}
|
||||
|
||||
func detectEncoding(s string) string {
|
||||
if norm.NFC.IsNormalString(s) && norm.NFD.IsNormalString(s) {
|
||||
return "BOTH"
|
||||
}
|
||||
if !norm.NFC.IsNormalString(s) && norm.NFD.IsNormalString(s) {
|
||||
return "NFD"
|
||||
}
|
||||
if norm.NFC.IsNormalString(s) && !norm.NFD.IsNormalString(s) {
|
||||
return "NFC"
|
||||
}
|
||||
return "OTHER"
|
||||
}
|
||||
@@ -191,7 +191,6 @@ func setupRootCommand(rootCmd *cobra.Command) {
|
||||
})
|
||||
|
||||
cobra.OnInitialize(initConfig)
|
||||
|
||||
}
|
||||
|
||||
// Traverse the tree of commands running fn on each
|
||||
|
||||
@@ -1557,6 +1557,32 @@ Setting `--max-buffer-memory` allows the buffer memory to be
|
||||
controlled so that it doesn't overwhelm the machine and allows
|
||||
`--transfers` to be set large.
|
||||
|
||||
### --max-connections=N ###
|
||||
|
||||
This sets the maximum number of concurrent calls to the backend API.
|
||||
It may not map 1:1 to TCP or HTTP connections depending on the backend
|
||||
in use and the use of HTTP1 vs HTTP2.
|
||||
|
||||
When downloading files, backends only limit the initial opening of the
|
||||
stream. The bulk data download is not counted as a connection. This
|
||||
means that the `--max--connections` flag won't limit the total number
|
||||
of downloads.
|
||||
|
||||
Note that it is possible to cause deadlocks with this setting so it
|
||||
should be used with care.
|
||||
|
||||
If you are doing a sync or copy then make sure `--max-connections` is
|
||||
one more than the sum of `--transfers` and `--checkers`.
|
||||
|
||||
If you use `--check-first` then `--max-connections` just needs to be
|
||||
one more than the maximum of `--checkers` and `--transfers`.
|
||||
|
||||
So for `--max-connections 3` you'd use `--checkers 2 --transfers 2
|
||||
--check-first` or `--checkers 1 --transfers 1`.
|
||||
|
||||
Setting this flag can be useful for backends which do multipart
|
||||
uploads to limit the number of simultaneous parts being transferred.
|
||||
|
||||
### --max-delete=N ###
|
||||
|
||||
This tells rclone not to delete more than N files. If that limit is
|
||||
@@ -1848,6 +1874,13 @@ If the backend has a `--backend-upload-concurrency` setting (eg
|
||||
number of transfers instead if it is larger than the value of
|
||||
`--multi-thread-streams` or `--multi-thread-streams` isn't set.
|
||||
|
||||
### --name-transform COMMAND[=XXXX] ###
|
||||
`--name-transform` introduces path name transformations for
|
||||
`rclone copy`, `rclone sync`, and `rclone move`. These transformations
|
||||
enable modifications to source and destination file names by applying
|
||||
prefixes, suffixes, and other alterations during transfer operations.
|
||||
For detailed docs and examples, see [`convmv`](/commands/rclone_convmv/).
|
||||
|
||||
### --no-check-dest ###
|
||||
|
||||
The `--no-check-dest` can be used with `move` or `copy` and it causes
|
||||
|
||||
32
fs/config.go
32
fs/config.go
@@ -545,31 +545,16 @@ var ConfigOptionsInfo = Options{{
|
||||
Help: "Add partial-suffix to temporary file name when --inplace is not used",
|
||||
Groups: "Copy",
|
||||
}, {
|
||||
Name: "max_connections",
|
||||
Help: strings.ReplaceAll(`Maximum number of simultaneous connections, 0 for unlimited.
|
||||
|
||||
This sets the maximum number of connections made to the backend on a
|
||||
per backend basis. Connections in this case are calls to the backend
|
||||
API and may not map 1:1 to TCP or HTTP connections depending on the
|
||||
backend in use.
|
||||
|
||||
Note that it is possible to cause deadlocks with this setting so it
|
||||
should be used with care.
|
||||
|
||||
If you are doing a sync or copy then make sure |--max-connections| is
|
||||
one more than the sum of |--transfers| and |--checkers|.
|
||||
|
||||
If you use |--check-first| then |--max-connections| just needs to be
|
||||
one more than the maximum of |--checkers| and |--transfers|.
|
||||
|
||||
So for |--max-connections 3| you'd use |--checkers 2 --transfers 2
|
||||
--check-first| or |--checkers 1 --transfers 1|.
|
||||
|
||||
Setting this flag can be useful for backends which do multipart
|
||||
uploads or downloads to limit the number of total connections.
|
||||
`, "|", "`"),
|
||||
Name: "max_connections",
|
||||
Help: "Maximum number of simultaneous backend API connections, 0 for unlimited.",
|
||||
Default: 0,
|
||||
Advanced: true,
|
||||
Groups: "Networking",
|
||||
}, {
|
||||
Name: "name_transform",
|
||||
Default: []string{},
|
||||
Help: "Transform paths during the copy process.",
|
||||
Groups: "Copy",
|
||||
}}
|
||||
|
||||
// ConfigInfo is filesystem config options
|
||||
@@ -681,6 +666,7 @@ type ConfigInfo struct {
|
||||
PartialSuffix string `config:"partial_suffix"`
|
||||
MetadataMapper SpaceSepList `config:"metadata_mapper"`
|
||||
MaxConnections int `config:"max_connections"`
|
||||
NameTransform []string `config:"name_transform"`
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
"github.com/rclone/rclone/fs/filter"
|
||||
"github.com/rclone/rclone/fs/list"
|
||||
"github.com/rclone/rclone/fs/walk"
|
||||
"github.com/rclone/rclone/lib/transform"
|
||||
"golang.org/x/sync/errgroup"
|
||||
"golang.org/x/text/unicode/norm"
|
||||
)
|
||||
@@ -60,9 +61,9 @@ type Marcher interface {
|
||||
// Note: this will flag filter-aware backends on the source side
|
||||
func (m *March) init(ctx context.Context) {
|
||||
ci := fs.GetConfig(ctx)
|
||||
m.srcListDir = m.makeListDir(ctx, m.Fsrc, m.SrcIncludeAll)
|
||||
m.srcListDir = m.makeListDir(ctx, m.Fsrc, m.SrcIncludeAll, m.srcKey)
|
||||
if !m.NoTraverse {
|
||||
m.dstListDir = m.makeListDir(ctx, m.Fdst, m.DstIncludeAll)
|
||||
m.dstListDir = m.makeListDir(ctx, m.Fdst, m.DstIncludeAll, m.dstKey)
|
||||
}
|
||||
// Now create the matching transform
|
||||
// ..normalise the UTF8 first
|
||||
@@ -80,13 +81,26 @@ func (m *March) init(ctx context.Context) {
|
||||
}
|
||||
}
|
||||
|
||||
// key turns a directory entry into a sort key using the defined transforms.
|
||||
func (m *March) key(entry fs.DirEntry) string {
|
||||
// srcKey turns a directory entry into a sort key using the defined transforms.
|
||||
func (m *March) srcKey(entry fs.DirEntry) string {
|
||||
if entry == nil {
|
||||
return ""
|
||||
}
|
||||
name := path.Base(entry.Remote())
|
||||
for _, transform := range m.transforms {
|
||||
name = transform.Path(m.Ctx, name, fs.DirEntryType(entry) == "directory")
|
||||
return transforms(name, m.transforms)
|
||||
}
|
||||
|
||||
// dstKey turns a directory entry into a sort key using the defined transforms.
|
||||
func (m *March) dstKey(entry fs.DirEntry) string {
|
||||
if entry == nil {
|
||||
return ""
|
||||
}
|
||||
return transforms(path.Base(entry.Remote()), m.transforms)
|
||||
}
|
||||
|
||||
func transforms(name string, transforms []matchTransformFn) string {
|
||||
for _, transform := range transforms {
|
||||
name = transform(name)
|
||||
}
|
||||
return name
|
||||
@@ -95,14 +109,14 @@ func (m *March) key(entry fs.DirEntry) string {
|
||||
// makeListDir makes constructs a listing function for the given fs
|
||||
// and includeAll flags for marching through the file system.
|
||||
// Note: this will optionally flag filter-aware backends!
|
||||
func (m *March) makeListDir(ctx context.Context, f fs.Fs, includeAll bool) listDirFn {
|
||||
func (m *March) makeListDir(ctx context.Context, f fs.Fs, includeAll bool, keyFn list.KeyFn) listDirFn {
|
||||
ci := fs.GetConfig(ctx)
|
||||
fi := filter.GetConfig(ctx)
|
||||
if !(ci.UseListR && f.Features().ListR != nil) && // !--fast-list active and
|
||||
!(ci.NoTraverse && fi.HaveFilesFrom()) { // !(--files-from and --no-traverse)
|
||||
return func(dir string, callback fs.ListRCallback) (err error) {
|
||||
dirCtx := filter.SetUseFilter(m.Ctx, f.Features().FilterAware && !includeAll) // make filter-aware backends constrain List
|
||||
return list.DirSortedFn(dirCtx, f, includeAll, dir, callback, m.key)
|
||||
return list.DirSortedFn(dirCtx, f, includeAll, dir, callback, keyFn)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -137,7 +151,7 @@ func (m *March) makeListDir(ctx context.Context, f fs.Fs, includeAll bool) listD
|
||||
// in syncing as it will use the first entry for the sync
|
||||
// comparison.
|
||||
slices.SortStableFunc(entries, func(a, b fs.DirEntry) int {
|
||||
return cmp.Compare(m.key(a), m.key(b))
|
||||
return cmp.Compare(keyFn(a), keyFn(b))
|
||||
})
|
||||
return callback(entries)
|
||||
}
|
||||
@@ -290,11 +304,11 @@ func (m *March) matchListings(srcChan, dstChan <-chan fs.DirEntry, srcOnly, dstO
|
||||
// Reload src and dst if needed - we set them to nil if used
|
||||
if src == nil {
|
||||
src = <-srcChan
|
||||
srcName = m.key(src)
|
||||
srcName = m.srcKey(src)
|
||||
}
|
||||
if dst == nil {
|
||||
dst = <-dstChan
|
||||
dstName = m.key(dst)
|
||||
dstName = m.dstKey(dst)
|
||||
}
|
||||
if src == nil && dst == nil {
|
||||
break
|
||||
@@ -399,7 +413,7 @@ func (m *March) processJob(job listDirJob) ([]listDirJob, error) {
|
||||
if m.NoTraverse && !m.NoCheckDest {
|
||||
originalSrcChan := srcChan
|
||||
srcChan = make(chan fs.DirEntry, 100)
|
||||
ls, err := list.NewSorter(m.Ctx, m.Fdst, list.SortToChan(dstChan), m.key)
|
||||
ls, err := list.NewSorter(m.Ctx, m.Fdst, list.SortToChan(dstChan), m.dstKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -449,7 +463,6 @@ func (m *March) processJob(job listDirJob) ([]listDirJob, error) {
|
||||
noDst: true,
|
||||
})
|
||||
}
|
||||
|
||||
}, func(dst fs.DirEntry) {
|
||||
recurse := m.Callback.DstOnly(dst)
|
||||
if recurse && job.dstDepth > 0 {
|
||||
|
||||
@@ -491,7 +491,11 @@ func TestMatchListings(t *testing.T) {
|
||||
// Make a channel to send the source (0) or dest (1) using a list.Sorter
|
||||
makeChan := func(offset int) <-chan fs.DirEntry {
|
||||
out := make(chan fs.DirEntry)
|
||||
ls, err := list.NewSorter(ctx, nil, list.SortToChan(out), m.key)
|
||||
key := m.dstKey
|
||||
if offset == 0 {
|
||||
key = m.srcKey
|
||||
}
|
||||
ls, err := list.NewSorter(ctx, nil, list.SortToChan(out), key)
|
||||
require.NoError(t, err)
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
"github.com/rclone/rclone/fs/hash"
|
||||
"github.com/rclone/rclone/lib/atexit"
|
||||
"github.com/rclone/rclone/lib/pacer"
|
||||
"github.com/rclone/rclone/lib/transform"
|
||||
)
|
||||
|
||||
// State of the copy
|
||||
@@ -390,7 +391,7 @@ func Copy(ctx context.Context, f fs.Fs, dst fs.Object, remote string, src fs.Obj
|
||||
f: f,
|
||||
dstFeatures: f.Features(),
|
||||
dst: dst,
|
||||
remote: remote,
|
||||
remote: transform.Path(ctx, remote, false),
|
||||
src: src,
|
||||
ci: ci,
|
||||
tr: tr,
|
||||
@@ -399,7 +400,7 @@ func Copy(ctx context.Context, f fs.Fs, dst fs.Object, remote string, src fs.Obj
|
||||
}
|
||||
c.hashType, c.hashOption = CommonHash(ctx, f, src.Fs())
|
||||
if c.dst != nil {
|
||||
c.remote = c.dst.Remote()
|
||||
c.remote = transform.Path(ctx, c.dst.Remote(), false)
|
||||
}
|
||||
// Are we using partials?
|
||||
//
|
||||
@@ -414,5 +415,5 @@ func Copy(ctx context.Context, f fs.Fs, dst fs.Object, remote string, src fs.Obj
|
||||
|
||||
// CopyFile moves a single file possibly to a new name
|
||||
func CopyFile(ctx context.Context, fdst fs.Fs, fsrc fs.Fs, dstFileName string, srcFileName string) (err error) {
|
||||
return moveOrCopyFile(ctx, fdst, fsrc, dstFileName, srcFileName, true)
|
||||
return moveOrCopyFile(ctx, fdst, fsrc, dstFileName, srcFileName, true, false)
|
||||
}
|
||||
|
||||
@@ -39,6 +39,7 @@ import (
|
||||
"github.com/rclone/rclone/lib/pacer"
|
||||
"github.com/rclone/rclone/lib/random"
|
||||
"github.com/rclone/rclone/lib/readers"
|
||||
"github.com/rclone/rclone/lib/transform"
|
||||
"golang.org/x/sync/errgroup"
|
||||
"golang.org/x/text/unicode/norm"
|
||||
)
|
||||
@@ -424,6 +425,8 @@ func MoveTransfer(ctx context.Context, fdst fs.Fs, dst fs.Object, remote string,
|
||||
|
||||
// move - see Move for help
|
||||
func move(ctx context.Context, fdst fs.Fs, dst fs.Object, remote string, src fs.Object, isTransfer bool) (newDst fs.Object, err error) {
|
||||
origRemote := remote // avoid double-transform on fallback to copy
|
||||
remote = transform.Path(ctx, remote, false)
|
||||
ci := fs.GetConfig(ctx)
|
||||
var tr *accounting.Transfer
|
||||
if isTransfer {
|
||||
@@ -447,12 +450,14 @@ func move(ctx context.Context, fdst fs.Fs, dst fs.Object, remote string, src fs.
|
||||
if doMove := fdst.Features().Move; doMove != nil && (SameConfig(src.Fs(), fdst) || (SameRemoteType(src.Fs(), fdst) && (fdst.Features().ServerSideAcrossConfigs || ci.ServerSideAcrossConfigs))) {
|
||||
// Delete destination if it exists and is not the same file as src (could be same file while seemingly different if the remote is case insensitive)
|
||||
if dst != nil {
|
||||
remote = dst.Remote()
|
||||
remote = transform.Path(ctx, dst.Remote(), false)
|
||||
if !SameObject(src, dst) {
|
||||
err = DeleteFile(ctx, dst)
|
||||
if err != nil {
|
||||
return newDst, err
|
||||
}
|
||||
} else if src.Remote() == remote {
|
||||
return newDst, nil
|
||||
} else if needsMoveCaseInsensitive(fdst, fdst, remote, src.Remote(), false) {
|
||||
doMove = func(ctx context.Context, src fs.Object, remote string) (fs.Object, error) {
|
||||
return MoveCaseInsensitive(ctx, fdst, fdst, remote, src.Remote(), false, src)
|
||||
@@ -488,7 +493,7 @@ func move(ctx context.Context, fdst fs.Fs, dst fs.Object, remote string, src fs.
|
||||
}
|
||||
}
|
||||
// Move not found or didn't work so copy dst <- src
|
||||
newDst, err = Copy(ctx, fdst, dst, remote, src)
|
||||
newDst, err = Copy(ctx, fdst, dst, origRemote, src)
|
||||
if err != nil {
|
||||
fs.Errorf(src, "Not deleting source as copy failed: %v", err)
|
||||
return newDst, err
|
||||
@@ -516,24 +521,7 @@ func SuffixName(ctx context.Context, remote string) string {
|
||||
return remote
|
||||
}
|
||||
if ci.SuffixKeepExtension {
|
||||
var (
|
||||
base = remote
|
||||
exts = ""
|
||||
first = true
|
||||
ext = path.Ext(remote)
|
||||
)
|
||||
for ext != "" {
|
||||
// Look second and subsequent extensions in mime types.
|
||||
// If they aren't found then don't keep it as an extension.
|
||||
if !first && mime.TypeByExtension(ext) == "" {
|
||||
break
|
||||
}
|
||||
base = base[:len(base)-len(ext)]
|
||||
exts = ext + exts
|
||||
first = false
|
||||
ext = path.Ext(base)
|
||||
}
|
||||
return base + ci.Suffix + exts
|
||||
return transform.SuffixKeepExtension(remote, ci.Suffix)
|
||||
}
|
||||
return remote + ci.Suffix
|
||||
}
|
||||
@@ -1994,12 +1982,12 @@ func MoveCaseInsensitive(ctx context.Context, fdst fs.Fs, fsrc fs.Fs, dstFileNam
|
||||
}
|
||||
|
||||
// moveOrCopyFile moves or copies a single file possibly to a new name
|
||||
func moveOrCopyFile(ctx context.Context, fdst fs.Fs, fsrc fs.Fs, dstFileName string, srcFileName string, cp bool) (err error) {
|
||||
func moveOrCopyFile(ctx context.Context, fdst fs.Fs, fsrc fs.Fs, dstFileName string, srcFileName string, cp bool, allowOverlap bool) (err error) {
|
||||
ci := fs.GetConfig(ctx)
|
||||
logger, usingLogger := GetLogger(ctx)
|
||||
dstFilePath := path.Join(fdst.Root(), dstFileName)
|
||||
srcFilePath := path.Join(fsrc.Root(), srcFileName)
|
||||
if fdst.Name() == fsrc.Name() && dstFilePath == srcFilePath {
|
||||
if fdst.Name() == fsrc.Name() && dstFilePath == srcFilePath && !allowOverlap {
|
||||
fs.Debugf(fdst, "don't need to copy/move %s, it is already at target location", dstFileName)
|
||||
if usingLogger {
|
||||
srcObj, _ := fsrc.NewObject(ctx, srcFileName)
|
||||
@@ -2106,7 +2094,14 @@ func moveOrCopyFile(ctx context.Context, fdst fs.Fs, fsrc fs.Fs, dstFileName str
|
||||
//
|
||||
// This is treated as a transfer.
|
||||
func MoveFile(ctx context.Context, fdst fs.Fs, fsrc fs.Fs, dstFileName string, srcFileName string) (err error) {
|
||||
return moveOrCopyFile(ctx, fdst, fsrc, dstFileName, srcFileName, false)
|
||||
return moveOrCopyFile(ctx, fdst, fsrc, dstFileName, srcFileName, false, false)
|
||||
}
|
||||
|
||||
// TransformFile transforms a file in place using --name-transform
|
||||
//
|
||||
// This is treated as a transfer.
|
||||
func TransformFile(ctx context.Context, fdst fs.Fs, srcFileName string) (err error) {
|
||||
return moveOrCopyFile(ctx, fdst, fdst, srcFileName, srcFileName, false, true)
|
||||
}
|
||||
|
||||
// SetTier changes tier of object in remote
|
||||
@@ -2211,50 +2206,10 @@ func (l *ListFormat) SetOutput(output []func(entry *ListJSONItem) string) {
|
||||
|
||||
// AddModTime adds file's Mod Time to output
|
||||
func (l *ListFormat) AddModTime(timeFormat string) {
|
||||
switch timeFormat {
|
||||
case "":
|
||||
if timeFormat == "" {
|
||||
timeFormat = "2006-01-02 15:04:05"
|
||||
case "Layout":
|
||||
timeFormat = time.Layout
|
||||
case "ANSIC":
|
||||
timeFormat = time.ANSIC
|
||||
case "UnixDate":
|
||||
timeFormat = time.UnixDate
|
||||
case "RubyDate":
|
||||
timeFormat = time.RubyDate
|
||||
case "RFC822":
|
||||
timeFormat = time.RFC822
|
||||
case "RFC822Z":
|
||||
timeFormat = time.RFC822Z
|
||||
case "RFC850":
|
||||
timeFormat = time.RFC850
|
||||
case "RFC1123":
|
||||
timeFormat = time.RFC1123
|
||||
case "RFC1123Z":
|
||||
timeFormat = time.RFC1123Z
|
||||
case "RFC3339":
|
||||
timeFormat = time.RFC3339
|
||||
case "RFC3339Nano":
|
||||
timeFormat = time.RFC3339Nano
|
||||
case "Kitchen":
|
||||
timeFormat = time.Kitchen
|
||||
case "Stamp":
|
||||
timeFormat = time.Stamp
|
||||
case "StampMilli":
|
||||
timeFormat = time.StampMilli
|
||||
case "StampMicro":
|
||||
timeFormat = time.StampMicro
|
||||
case "StampNano":
|
||||
timeFormat = time.StampNano
|
||||
case "DateTime":
|
||||
// timeFormat = time.DateTime // missing in go1.19
|
||||
timeFormat = "2006-01-02 15:04:05"
|
||||
case "DateOnly":
|
||||
// timeFormat = time.DateOnly // missing in go1.19
|
||||
timeFormat = "2006-01-02"
|
||||
case "TimeOnly":
|
||||
// timeFormat = time.TimeOnly // missing in go1.19
|
||||
timeFormat = "15:04:05"
|
||||
} else {
|
||||
timeFormat = transform.TimeFormat(timeFormat)
|
||||
}
|
||||
l.AppendOutput(func(entry *ListJSONItem) string {
|
||||
return entry.ModTime.When.Local().Format(timeFormat)
|
||||
|
||||
@@ -62,7 +62,7 @@ func rcList(ctx context.Context, in rc.Params) (out rc.Params, err error) {
|
||||
if rc.NotErrParamNotFound(err) {
|
||||
return nil, err
|
||||
}
|
||||
var list = []*ListJSONItem{}
|
||||
list := []*ListJSONItem{}
|
||||
err = ListJSON(ctx, f, remote, &opt, func(item *ListJSONItem) error {
|
||||
list = append(list, item)
|
||||
return nil
|
||||
@@ -193,7 +193,7 @@ func rcMoveOrCopyFile(ctx context.Context, in rc.Params, cp bool) (out rc.Params
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return nil, moveOrCopyFile(ctx, dstFs, srcFs, dstRemote, srcRemote, cp)
|
||||
return nil, moveOrCopyFile(ctx, dstFs, srcFs, dstRemote, srcRemote, cp, false)
|
||||
}
|
||||
|
||||
func init() {
|
||||
@@ -289,7 +289,6 @@ func rcSingleCommand(ctx context.Context, in rc.Params, name string, noRemote bo
|
||||
|
||||
var request *http.Request
|
||||
request, err := in.GetHTTPRequest()
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -629,12 +628,12 @@ func rcBackend(ctx context.Context, in rc.Params) (out rc.Params, err error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var opt = map[string]string{}
|
||||
opt := map[string]string{}
|
||||
err = in.GetStructMissingOK("opt", &opt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var arg = []string{}
|
||||
arg := []string{}
|
||||
err = in.GetStructMissingOK("arg", &arg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -642,7 +641,6 @@ func rcBackend(ctx context.Context, in rc.Params) (out rc.Params, err error) {
|
||||
result, err := doCommand(ctx, command, arg, opt)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("command %q failed: %w", command, err)
|
||||
|
||||
}
|
||||
out = make(rc.Params)
|
||||
out["result"] = result
|
||||
@@ -685,7 +683,6 @@ func rcDu(ctx context.Context, in rc.Params) (out rc.Params, err error) {
|
||||
dir, err := in.GetString("dir")
|
||||
if rc.IsErrParamNotFound(err) {
|
||||
dir = config.GetCacheDir()
|
||||
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ import (
|
||||
"github.com/rclone/rclone/fs/march"
|
||||
"github.com/rclone/rclone/fs/operations"
|
||||
"github.com/rclone/rclone/lib/errcount"
|
||||
"github.com/rclone/rclone/lib/transform"
|
||||
"golang.org/x/sync/errgroup"
|
||||
)
|
||||
|
||||
@@ -95,6 +96,7 @@ type syncCopyMove struct {
|
||||
setDirModTimes []setDirModTime // directories that need their modtime set
|
||||
setDirModTimesMaxLevel int // max level of the directories to set
|
||||
modifiedDirs map[string]struct{} // dirs with changed contents (if s.setDirModTimeAfter)
|
||||
allowOverlap bool // whether we allow src and dst to overlap (i.e. for convmv)
|
||||
}
|
||||
|
||||
// For keeping track of delayed modtime sets
|
||||
@@ -126,8 +128,8 @@ func (strategy trackRenamesStrategy) leaf() bool {
|
||||
return (strategy & trackRenamesStrategyLeaf) != 0
|
||||
}
|
||||
|
||||
func newSyncCopyMove(ctx context.Context, fdst, fsrc fs.Fs, deleteMode fs.DeleteMode, DoMove bool, deleteEmptySrcDirs bool, copyEmptySrcDirs bool) (*syncCopyMove, error) {
|
||||
if (deleteMode != fs.DeleteModeOff || DoMove) && operations.OverlappingFilterCheck(ctx, fdst, fsrc) {
|
||||
func newSyncCopyMove(ctx context.Context, fdst, fsrc fs.Fs, deleteMode fs.DeleteMode, DoMove bool, deleteEmptySrcDirs bool, copyEmptySrcDirs bool, allowOverlap bool) (*syncCopyMove, error) {
|
||||
if (deleteMode != fs.DeleteModeOff || DoMove) && operations.OverlappingFilterCheck(ctx, fdst, fsrc) && !allowOverlap {
|
||||
return nil, fserrors.FatalError(fs.ErrorOverlapping)
|
||||
}
|
||||
ci := fs.GetConfig(ctx)
|
||||
@@ -161,6 +163,7 @@ func newSyncCopyMove(ctx context.Context, fdst, fsrc fs.Fs, deleteMode fs.Delete
|
||||
setDirModTime: (!ci.NoUpdateDirModTime && fsrc.Features().CanHaveEmptyDirectories) && (fdst.Features().WriteDirSetModTime || fdst.Features().MkdirMetadata != nil || fdst.Features().DirSetModTime != nil),
|
||||
setDirModTimeAfter: !ci.NoUpdateDirModTime && (!copyEmptySrcDirs || fsrc.Features().CanHaveEmptyDirectories && fdst.Features().DirModTimeUpdatesOnWrite),
|
||||
modifiedDirs: make(map[string]struct{}),
|
||||
allowOverlap: allowOverlap,
|
||||
}
|
||||
|
||||
s.logger, s.usingLogger = operations.GetLogger(ctx)
|
||||
@@ -922,7 +925,7 @@ func (s *syncCopyMove) tryRename(src fs.Object) bool {
|
||||
//
|
||||
// dir is the start directory, "" for root
|
||||
func (s *syncCopyMove) run() error {
|
||||
if operations.Same(s.fdst, s.fsrc) {
|
||||
if operations.Same(s.fdst, s.fsrc) && !s.allowOverlap {
|
||||
fs.Errorf(s.fdst, "Nothing to do as source and destination are the same")
|
||||
return nil
|
||||
}
|
||||
@@ -1122,6 +1125,9 @@ func (s *syncCopyMove) copyDirMetadata(ctx context.Context, f fs.Fs, dst fs.Dire
|
||||
newDst, err = operations.SetDirModTime(ctx, f, dst, dir, src.ModTime(ctx))
|
||||
}
|
||||
}
|
||||
if transform.Transforming(ctx) && newDst != nil && src.Remote() != newDst.Remote() {
|
||||
s.markParentNotEmpty(src)
|
||||
}
|
||||
// If we need to set modtime after and we created a dir, then save it for later
|
||||
if s.setDirModTime && s.setDirModTimeAfter && err == nil {
|
||||
if newDst != nil {
|
||||
@@ -1254,8 +1260,8 @@ func (s *syncCopyMove) SrcOnly(src fs.DirEntry) (recurse bool) {
|
||||
s.logger(s.ctx, operations.MissingOnDst, src, nil, fs.ErrorIsDir)
|
||||
|
||||
// Create the directory and make sure the Metadata/ModTime is correct
|
||||
s.copyDirMetadata(s.ctx, s.fdst, nil, x.Remote(), x)
|
||||
s.markDirModified(x.Remote())
|
||||
s.copyDirMetadata(s.ctx, s.fdst, nil, transform.Path(s.ctx, x.Remote(), true), x)
|
||||
s.markDirModified(transform.Path(s.ctx, x.Remote(), true))
|
||||
return true
|
||||
default:
|
||||
panic("Bad object in DirEntries")
|
||||
@@ -1288,7 +1294,11 @@ func (s *syncCopyMove) Match(ctx context.Context, dst, src fs.DirEntry) (recurse
|
||||
}
|
||||
case fs.Directory:
|
||||
// Do the same thing to the entire contents of the directory
|
||||
s.markParentNotEmpty(src)
|
||||
srcX = fs.NewOverrideDirectory(srcX, transform.Path(ctx, src.Remote(), true))
|
||||
src = srcX
|
||||
if !transform.Transforming(ctx) || src.Remote() != dst.Remote() {
|
||||
s.markParentNotEmpty(src)
|
||||
}
|
||||
dstX, ok := dst.(fs.Directory)
|
||||
if ok {
|
||||
s.logger(s.ctx, operations.Match, src, dst, fs.ErrorIsDir)
|
||||
@@ -1327,7 +1337,7 @@ func (s *syncCopyMove) Match(ctx context.Context, dst, src fs.DirEntry) (recurse
|
||||
// If DoMove is true then files will be moved instead of copied.
|
||||
//
|
||||
// dir is the start directory, "" for root
|
||||
func runSyncCopyMove(ctx context.Context, fdst, fsrc fs.Fs, deleteMode fs.DeleteMode, DoMove bool, deleteEmptySrcDirs bool, copyEmptySrcDirs bool) error {
|
||||
func runSyncCopyMove(ctx context.Context, fdst, fsrc fs.Fs, deleteMode fs.DeleteMode, DoMove bool, deleteEmptySrcDirs bool, copyEmptySrcDirs bool, allowOverlap bool) error {
|
||||
ci := fs.GetConfig(ctx)
|
||||
if deleteMode != fs.DeleteModeOff && DoMove {
|
||||
return fserrors.FatalError(errors.New("can't delete and move at the same time"))
|
||||
@@ -1338,7 +1348,7 @@ func runSyncCopyMove(ctx context.Context, fdst, fsrc fs.Fs, deleteMode fs.Delete
|
||||
return fserrors.FatalError(errors.New("can't use --delete-before with --track-renames"))
|
||||
}
|
||||
// only delete stuff during in this pass
|
||||
do, err := newSyncCopyMove(ctx, fdst, fsrc, fs.DeleteModeOnly, false, deleteEmptySrcDirs, copyEmptySrcDirs)
|
||||
do, err := newSyncCopyMove(ctx, fdst, fsrc, fs.DeleteModeOnly, false, deleteEmptySrcDirs, copyEmptySrcDirs, allowOverlap)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -1349,7 +1359,7 @@ func runSyncCopyMove(ctx context.Context, fdst, fsrc fs.Fs, deleteMode fs.Delete
|
||||
// Next pass does a copy only
|
||||
deleteMode = fs.DeleteModeOff
|
||||
}
|
||||
do, err := newSyncCopyMove(ctx, fdst, fsrc, deleteMode, DoMove, deleteEmptySrcDirs, copyEmptySrcDirs)
|
||||
do, err := newSyncCopyMove(ctx, fdst, fsrc, deleteMode, DoMove, deleteEmptySrcDirs, copyEmptySrcDirs, allowOverlap)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -1359,17 +1369,22 @@ func runSyncCopyMove(ctx context.Context, fdst, fsrc fs.Fs, deleteMode fs.Delete
|
||||
// Sync fsrc into fdst
|
||||
func Sync(ctx context.Context, fdst, fsrc fs.Fs, copyEmptySrcDirs bool) error {
|
||||
ci := fs.GetConfig(ctx)
|
||||
return runSyncCopyMove(ctx, fdst, fsrc, ci.DeleteMode, false, false, copyEmptySrcDirs)
|
||||
return runSyncCopyMove(ctx, fdst, fsrc, ci.DeleteMode, false, false, copyEmptySrcDirs, false)
|
||||
}
|
||||
|
||||
// CopyDir copies fsrc into fdst
|
||||
func CopyDir(ctx context.Context, fdst, fsrc fs.Fs, copyEmptySrcDirs bool) error {
|
||||
return runSyncCopyMove(ctx, fdst, fsrc, fs.DeleteModeOff, false, false, copyEmptySrcDirs)
|
||||
return runSyncCopyMove(ctx, fdst, fsrc, fs.DeleteModeOff, false, false, copyEmptySrcDirs, false)
|
||||
}
|
||||
|
||||
// moveDir moves fsrc into fdst
|
||||
func moveDir(ctx context.Context, fdst, fsrc fs.Fs, deleteEmptySrcDirs bool, copyEmptySrcDirs bool) error {
|
||||
return runSyncCopyMove(ctx, fdst, fsrc, fs.DeleteModeOff, true, deleteEmptySrcDirs, copyEmptySrcDirs)
|
||||
return runSyncCopyMove(ctx, fdst, fsrc, fs.DeleteModeOff, true, deleteEmptySrcDirs, copyEmptySrcDirs, false)
|
||||
}
|
||||
|
||||
// Transform renames fdst in place
|
||||
func Transform(ctx context.Context, fdst fs.Fs, deleteEmptySrcDirs bool, copyEmptySrcDirs bool) error {
|
||||
return runSyncCopyMove(ctx, fdst, fdst, fs.DeleteModeOff, true, deleteEmptySrcDirs, copyEmptySrcDirs, true)
|
||||
}
|
||||
|
||||
// MoveDir moves fsrc into fdst
|
||||
|
||||
@@ -27,6 +27,7 @@ import (
|
||||
"github.com/rclone/rclone/fs/hash"
|
||||
"github.com/rclone/rclone/fs/operations"
|
||||
"github.com/rclone/rclone/fstest"
|
||||
"github.com/rclone/rclone/lib/transform"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"golang.org/x/text/unicode/norm"
|
||||
@@ -2980,7 +2981,7 @@ func predictDstFromLogger(ctx context.Context) context.Context {
|
||||
if winner.Err != nil {
|
||||
errMsg = ";" + winner.Err.Error()
|
||||
}
|
||||
operations.SyncFprintf(opt.JSON, "%s;%s;%v;%s%s\n", file.ModTime(ctx).Local().Format(timeFormat), checksum, file.Size(), file.Remote(), errMsg)
|
||||
operations.SyncFprintf(opt.JSON, "%s;%s;%v;%s%s\n", file.ModTime(ctx).Local().Format(timeFormat), checksum, file.Size(), transform.Path(ctx, file.Remote(), false), errMsg) // TODO: should the transform be handled in the sync instead of here?
|
||||
}
|
||||
}
|
||||
return operations.WithSyncLogger(ctx, opt)
|
||||
|
||||
483
fs/sync/sync_transform_test.go
Normal file
483
fs/sync/sync_transform_test.go
Normal file
@@ -0,0 +1,483 @@
|
||||
// Test transform
|
||||
|
||||
package sync
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"context"
|
||||
"fmt"
|
||||
"path"
|
||||
"slices"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
_ "github.com/rclone/rclone/backend/all"
|
||||
"github.com/rclone/rclone/fs"
|
||||
"github.com/rclone/rclone/fs/filter"
|
||||
"github.com/rclone/rclone/fs/operations"
|
||||
"github.com/rclone/rclone/fs/walk"
|
||||
"github.com/rclone/rclone/fstest"
|
||||
"github.com/rclone/rclone/lib/transform"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"golang.org/x/text/unicode/norm"
|
||||
)
|
||||
|
||||
var debug = ``
|
||||
|
||||
func TestTransform(t *testing.T) {
|
||||
type args struct {
|
||||
TransformOpt []string
|
||||
TransformBackOpt []string
|
||||
Lossless bool // whether the TransformBackAlgo is always losslessly invertible
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
}{
|
||||
{name: "NFC", args: args{
|
||||
TransformOpt: []string{"nfc"},
|
||||
TransformBackOpt: []string{"nfd"},
|
||||
Lossless: false,
|
||||
}},
|
||||
{name: "NFD", args: args{
|
||||
TransformOpt: []string{"nfd"},
|
||||
TransformBackOpt: []string{"nfc"},
|
||||
Lossless: false,
|
||||
}},
|
||||
{name: "base64", args: args{
|
||||
TransformOpt: []string{"base64encode"},
|
||||
TransformBackOpt: []string{"base64encode"},
|
||||
Lossless: false,
|
||||
}},
|
||||
{name: "prefix", args: args{
|
||||
TransformOpt: []string{"prefix=PREFIX"},
|
||||
TransformBackOpt: []string{"trimprefix=PREFIX"},
|
||||
Lossless: true,
|
||||
}},
|
||||
{name: "suffix", args: args{
|
||||
TransformOpt: []string{"suffix=SUFFIX"},
|
||||
TransformBackOpt: []string{"trimsuffix=SUFFIX"},
|
||||
Lossless: true,
|
||||
}},
|
||||
{name: "truncate", args: args{
|
||||
TransformOpt: []string{"truncate=10"},
|
||||
TransformBackOpt: []string{"truncate=10"},
|
||||
Lossless: false,
|
||||
}},
|
||||
{name: "encoder", args: args{
|
||||
TransformOpt: []string{"encoder=Colon,SquareBracket"},
|
||||
TransformBackOpt: []string{"decoder=Colon,SquareBracket"},
|
||||
Lossless: true,
|
||||
}},
|
||||
{name: "ISO-8859-1", args: args{
|
||||
TransformOpt: []string{"ISO-8859-1"},
|
||||
TransformBackOpt: []string{"ISO-8859-1"},
|
||||
Lossless: false,
|
||||
}},
|
||||
{name: "charmap", args: args{
|
||||
TransformOpt: []string{"all,charmap=ISO-8859-7"},
|
||||
TransformBackOpt: []string{"all,charmap=ISO-8859-7"},
|
||||
Lossless: false,
|
||||
}},
|
||||
{name: "lowercase", args: args{
|
||||
TransformOpt: []string{"all,lowercase"},
|
||||
TransformBackOpt: []string{"all,lowercase"},
|
||||
Lossless: false,
|
||||
}},
|
||||
{name: "ascii", args: args{
|
||||
TransformOpt: []string{"all,ascii"},
|
||||
TransformBackOpt: []string{"all,ascii"},
|
||||
Lossless: false,
|
||||
}},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
r := fstest.NewRun(t)
|
||||
defer r.Finalise()
|
||||
|
||||
ctx := context.Background()
|
||||
r.Mkdir(ctx, r.Flocal)
|
||||
r.Mkdir(ctx, r.Fremote)
|
||||
items := makeTestFiles(t, r, "dir1")
|
||||
deleteDSStore(t, r)
|
||||
r.CheckRemoteListing(t, items, nil)
|
||||
r.CheckLocalListing(t, items, nil)
|
||||
|
||||
err := transform.SetOptions(ctx, tt.args.TransformOpt...)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = Sync(ctx, r.Fremote, r.Flocal, true)
|
||||
assert.NoError(t, err)
|
||||
compareNames(ctx, t, r, items)
|
||||
|
||||
err = transform.SetOptions(ctx, tt.args.TransformBackOpt...)
|
||||
require.NoError(t, err)
|
||||
err = Sync(ctx, r.Fremote, r.Flocal, true)
|
||||
assert.NoError(t, err)
|
||||
compareNames(ctx, t, r, items)
|
||||
|
||||
if tt.args.Lossless {
|
||||
deleteDSStore(t, r)
|
||||
r.CheckRemoteItems(t, items...)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const alphabet = "abcdefg123456789"
|
||||
|
||||
var extras = []string{"apple", "banana", "appleappleapplebanana", "splitbananasplit"}
|
||||
|
||||
func makeTestFiles(t *testing.T, r *fstest.Run, dir string) []fstest.Item {
|
||||
t.Helper()
|
||||
n := 0
|
||||
// Create test files
|
||||
items := []fstest.Item{}
|
||||
for _, c := range alphabet {
|
||||
var out strings.Builder
|
||||
for i := rune(0); i < 7; i++ {
|
||||
out.WriteRune(c + i)
|
||||
}
|
||||
fileName := path.Join(dir, fmt.Sprintf("%04d-%s.txt", n, out.String()))
|
||||
fileName = strings.ToValidUTF8(fileName, "")
|
||||
fileName = strings.NewReplacer(":", "", "<", "", ">", "", "?", "").Replace(fileName) // remove characters illegal on windows
|
||||
|
||||
if debug != "" {
|
||||
fileName = debug
|
||||
}
|
||||
|
||||
item := r.WriteObject(context.Background(), fileName, fileName, t1)
|
||||
r.WriteFile(fileName, fileName, t1)
|
||||
items = append(items, item)
|
||||
n++
|
||||
|
||||
if debug != "" {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
for _, extra := range extras {
|
||||
item := r.WriteObject(context.Background(), extra, extra, t1)
|
||||
r.WriteFile(extra, extra, t1)
|
||||
items = append(items, item)
|
||||
}
|
||||
|
||||
return items
|
||||
}
|
||||
|
||||
func deleteDSStore(t *testing.T, r *fstest.Run) {
|
||||
ctxDSStore, fi := filter.AddConfig(context.Background())
|
||||
err := fi.AddRule(`+ *.DS_Store`)
|
||||
assert.NoError(t, err)
|
||||
err = fi.AddRule(`- **`)
|
||||
assert.NoError(t, err)
|
||||
err = operations.Delete(ctxDSStore, r.Fremote)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func compareNames(ctx context.Context, t *testing.T, r *fstest.Run, items []fstest.Item) {
|
||||
var entries fs.DirEntries
|
||||
|
||||
deleteDSStore(t, r)
|
||||
err := walk.ListR(context.Background(), r.Fremote, "", true, -1, walk.ListObjects, func(e fs.DirEntries) error {
|
||||
entries = append(entries, e...)
|
||||
return nil
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
entries = slices.DeleteFunc(entries, func(E fs.DirEntry) bool { // remove those pesky .DS_Store files
|
||||
if strings.Contains(E.Remote(), ".DS_Store") {
|
||||
err := operations.DeleteFile(context.Background(), E.(fs.Object))
|
||||
assert.NoError(t, err)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
require.Equal(t, len(items), entries.Len())
|
||||
|
||||
// sort by CONVERTED name
|
||||
slices.SortStableFunc(items, func(a, b fstest.Item) int {
|
||||
aConv := transform.Path(ctx, a.Path, false)
|
||||
bConv := transform.Path(ctx, b.Path, false)
|
||||
return cmp.Compare(aConv, bConv)
|
||||
})
|
||||
slices.SortStableFunc(entries, func(a, b fs.DirEntry) int {
|
||||
return cmp.Compare(a.Remote(), b.Remote())
|
||||
})
|
||||
|
||||
for i, e := range entries {
|
||||
expect := transform.Path(ctx, items[i].Path, false)
|
||||
msg := fmt.Sprintf("expected %v, got %v", detectEncoding(expect), detectEncoding(e.Remote()))
|
||||
assert.Equal(t, expect, e.Remote(), msg)
|
||||
}
|
||||
}
|
||||
|
||||
func detectEncoding(s string) string {
|
||||
if norm.NFC.IsNormalString(s) && norm.NFD.IsNormalString(s) {
|
||||
return "BOTH"
|
||||
}
|
||||
if !norm.NFC.IsNormalString(s) && norm.NFD.IsNormalString(s) {
|
||||
return "NFD"
|
||||
}
|
||||
if norm.NFC.IsNormalString(s) && !norm.NFD.IsNormalString(s) {
|
||||
return "NFC"
|
||||
}
|
||||
return "OTHER"
|
||||
}
|
||||
|
||||
func TestTransformCopy(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
r := fstest.NewRun(t)
|
||||
err := transform.SetOptions(ctx, "all,suffix_keep_extension=_somesuffix")
|
||||
require.NoError(t, err)
|
||||
file1 := r.WriteFile("sub dir/hello world.txt", "hello world", t1)
|
||||
|
||||
r.Mkdir(ctx, r.Fremote)
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
err = Sync(ctx, r.Fremote, r.Flocal, true)
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
require.NoError(t, err)
|
||||
|
||||
r.CheckLocalItems(t, file1)
|
||||
r.CheckRemoteItems(t, fstest.NewItem("sub dir_somesuffix/hello world_somesuffix.txt", "hello world", t1))
|
||||
}
|
||||
|
||||
func TestDoubleTransform(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
r := fstest.NewRun(t)
|
||||
err := transform.SetOptions(ctx, "all,prefix=tac", "all,prefix=tic")
|
||||
require.NoError(t, err)
|
||||
file1 := r.WriteFile("toe/toe", "hello world", t1)
|
||||
|
||||
r.Mkdir(ctx, r.Fremote)
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
err = Sync(ctx, r.Fremote, r.Flocal, true)
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
require.NoError(t, err)
|
||||
|
||||
r.CheckLocalItems(t, file1)
|
||||
r.CheckRemoteItems(t, fstest.NewItem("tictactoe/tictactoe", "hello world", t1))
|
||||
}
|
||||
|
||||
func TestFileTag(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
r := fstest.NewRun(t)
|
||||
err := transform.SetOptions(ctx, "file,prefix=tac", "file,prefix=tic")
|
||||
require.NoError(t, err)
|
||||
file1 := r.WriteFile("toe/toe/toe", "hello world", t1)
|
||||
|
||||
r.Mkdir(ctx, r.Fremote)
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
err = Sync(ctx, r.Fremote, r.Flocal, true)
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
require.NoError(t, err)
|
||||
|
||||
r.CheckLocalItems(t, file1)
|
||||
r.CheckRemoteItems(t, fstest.NewItem("toe/toe/tictactoe", "hello world", t1))
|
||||
}
|
||||
|
||||
func TestNoTag(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
r := fstest.NewRun(t)
|
||||
err := transform.SetOptions(ctx, "prefix=tac", "prefix=tic")
|
||||
require.NoError(t, err)
|
||||
file1 := r.WriteFile("toe/toe/toe", "hello world", t1)
|
||||
|
||||
r.Mkdir(ctx, r.Fremote)
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
err = Sync(ctx, r.Fremote, r.Flocal, true)
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
require.NoError(t, err)
|
||||
|
||||
r.CheckLocalItems(t, file1)
|
||||
r.CheckRemoteItems(t, fstest.NewItem("toe/toe/tictactoe", "hello world", t1))
|
||||
}
|
||||
|
||||
func TestDirTag(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
r := fstest.NewRun(t)
|
||||
err := transform.SetOptions(ctx, "dir,prefix=tac", "dir,prefix=tic")
|
||||
require.NoError(t, err)
|
||||
r.WriteFile("toe/toe/toe.txt", "hello world", t1)
|
||||
_, err = operations.MkdirModTime(ctx, r.Flocal, "empty_dir", t1)
|
||||
require.NoError(t, err)
|
||||
|
||||
r.Mkdir(ctx, r.Fremote)
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
err = Sync(ctx, r.Fremote, r.Flocal, true)
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
require.NoError(t, err)
|
||||
|
||||
r.CheckLocalListing(t, []fstest.Item{fstest.NewItem("toe/toe/toe.txt", "hello world", t1)}, []string{"empty_dir", "toe", "toe/toe"})
|
||||
r.CheckRemoteListing(t, []fstest.Item{fstest.NewItem("tictactoe/tictactoe/toe.txt", "hello world", t1)}, []string{"tictacempty_dir", "tictactoe", "tictactoe/tictactoe"})
|
||||
}
|
||||
|
||||
func TestAllTag(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
r := fstest.NewRun(t)
|
||||
err := transform.SetOptions(ctx, "all,prefix=tac", "all,prefix=tic")
|
||||
require.NoError(t, err)
|
||||
r.WriteFile("toe/toe/toe.txt", "hello world", t1)
|
||||
_, err = operations.MkdirModTime(ctx, r.Flocal, "empty_dir", t1)
|
||||
require.NoError(t, err)
|
||||
|
||||
r.Mkdir(ctx, r.Fremote)
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
err = Sync(ctx, r.Fremote, r.Flocal, true)
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
require.NoError(t, err)
|
||||
|
||||
r.CheckLocalListing(t, []fstest.Item{fstest.NewItem("toe/toe/toe.txt", "hello world", t1)}, []string{"empty_dir", "toe", "toe/toe"})
|
||||
r.CheckRemoteListing(t, []fstest.Item{fstest.NewItem("tictactoe/tictactoe/tictactoe.txt", "hello world", t1)}, []string{"tictacempty_dir", "tictactoe", "tictactoe/tictactoe"})
|
||||
err = operations.Check(ctx, &operations.CheckOpt{Fsrc: r.Flocal, Fdst: r.Fremote}) // should not error even though dst has transformed names
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestRunTwice(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
r := fstest.NewRun(t)
|
||||
err := transform.SetOptions(ctx, "dir,prefix=tac", "dir,prefix=tic")
|
||||
require.NoError(t, err)
|
||||
file1 := r.WriteFile("toe/toe/toe.txt", "hello world", t1)
|
||||
|
||||
r.Mkdir(ctx, r.Fremote)
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
err = Sync(ctx, r.Fremote, r.Flocal, true)
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
require.NoError(t, err)
|
||||
|
||||
r.CheckLocalItems(t, file1)
|
||||
r.CheckRemoteItems(t, fstest.NewItem("tictactoe/tictactoe/toe.txt", "hello world", t1))
|
||||
|
||||
// result should not change second time, since src is unchanged
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
err = Sync(ctx, r.Fremote, r.Flocal, true)
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
require.NoError(t, err)
|
||||
|
||||
r.CheckLocalItems(t, file1)
|
||||
r.CheckRemoteItems(t, fstest.NewItem("tictactoe/tictactoe/toe.txt", "hello world", t1))
|
||||
}
|
||||
|
||||
func TestSyntax(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
err := transform.SetOptions(ctx, "prefix")
|
||||
assert.Error(t, err) // should error as required value is missing
|
||||
|
||||
err = transform.SetOptions(ctx, "banana")
|
||||
assert.Error(t, err) // should error as unrecognized option
|
||||
|
||||
err = transform.SetOptions(ctx, "=123")
|
||||
assert.Error(t, err) // should error as required key is missing
|
||||
|
||||
err = transform.SetOptions(ctx, "prefix=123")
|
||||
assert.NoError(t, err) // should not error
|
||||
}
|
||||
|
||||
func TestConflicting(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
r := fstest.NewRun(t)
|
||||
err := transform.SetOptions(ctx, "prefix=tac", "trimprefix=tac")
|
||||
require.NoError(t, err)
|
||||
file1 := r.WriteFile("toe/toe/toe", "hello world", t1)
|
||||
|
||||
r.Mkdir(ctx, r.Fremote)
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
err = Sync(ctx, r.Fremote, r.Flocal, true)
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
require.NoError(t, err)
|
||||
|
||||
// should result in no change as prefix and trimprefix cancel out
|
||||
r.CheckLocalItems(t, file1)
|
||||
r.CheckRemoteItems(t, fstest.NewItem("toe/toe/toe", "hello world", t1))
|
||||
}
|
||||
|
||||
func TestMove(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
r := fstest.NewRun(t)
|
||||
err := transform.SetOptions(ctx, "all,prefix=tac", "all,prefix=tic")
|
||||
require.NoError(t, err)
|
||||
r.WriteFile("toe/toe/toe.txt", "hello world", t1)
|
||||
_, err = operations.MkdirModTime(ctx, r.Flocal, "empty_dir", t1)
|
||||
require.NoError(t, err)
|
||||
|
||||
r.Mkdir(ctx, r.Fremote)
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
err = MoveDir(ctx, r.Fremote, r.Flocal, true, true)
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
require.NoError(t, err)
|
||||
|
||||
r.CheckLocalListing(t, []fstest.Item{}, []string{})
|
||||
r.CheckRemoteListing(t, []fstest.Item{fstest.NewItem("tictactoe/tictactoe/tictactoe.txt", "hello world", t1)}, []string{"tictacempty_dir", "tictactoe", "tictactoe/tictactoe"})
|
||||
}
|
||||
|
||||
func TestTransformFile(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
r := fstest.NewRun(t)
|
||||
err := transform.SetOptions(ctx, "all,prefix=tac", "all,prefix=tic")
|
||||
require.NoError(t, err)
|
||||
r.WriteFile("toe/toe/toe.txt", "hello world", t1)
|
||||
_, err = operations.MkdirModTime(ctx, r.Flocal, "empty_dir", t1)
|
||||
require.NoError(t, err)
|
||||
|
||||
r.Mkdir(ctx, r.Fremote)
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
err = MoveDir(ctx, r.Fremote, r.Flocal, true, true)
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
require.NoError(t, err)
|
||||
|
||||
r.CheckLocalListing(t, []fstest.Item{}, []string{})
|
||||
r.CheckRemoteListing(t, []fstest.Item{fstest.NewItem("tictactoe/tictactoe/tictactoe.txt", "hello world", t1)}, []string{"tictacempty_dir", "tictactoe", "tictactoe/tictactoe"})
|
||||
|
||||
err = transform.SetOptions(ctx, "all,trimprefix=tic", "all,trimprefix=tac")
|
||||
require.NoError(t, err)
|
||||
err = operations.TransformFile(ctx, r.Fremote, "tictactoe/tictactoe/tictactoe.txt")
|
||||
require.NoError(t, err)
|
||||
r.CheckLocalListing(t, []fstest.Item{}, []string{})
|
||||
r.CheckRemoteListing(t, []fstest.Item{fstest.NewItem("toe/toe/toe.txt", "hello world", t1)}, []string{"tictacempty_dir", "tictactoe", "tictactoe/tictactoe", "toe", "toe/toe"})
|
||||
}
|
||||
|
||||
func TestBase64(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
r := fstest.NewRun(t)
|
||||
err := transform.SetOptions(ctx, "all,base64encode")
|
||||
require.NoError(t, err)
|
||||
file1 := r.WriteFile("toe/toe/toe.txt", "hello world", t1)
|
||||
|
||||
r.Mkdir(ctx, r.Fremote)
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
err = Sync(ctx, r.Fremote, r.Flocal, true)
|
||||
testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
require.NoError(t, err)
|
||||
|
||||
r.CheckLocalItems(t, file1)
|
||||
r.CheckRemoteItems(t, fstest.NewItem("dG9l/dG9l/dG9lLnR4dA==", "hello world", t1))
|
||||
|
||||
// round trip
|
||||
err = transform.SetOptions(ctx, "all,base64decode")
|
||||
require.NoError(t, err)
|
||||
ctx = predictDstFromLogger(ctx)
|
||||
err = Sync(ctx, r.Flocal, r.Fremote, true)
|
||||
testLoggerVsLsf(ctx, r.Flocal, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
require.NoError(t, err)
|
||||
|
||||
r.CheckLocalItems(t, file1)
|
||||
r.CheckRemoteItems(t, fstest.NewItem("dG9l/dG9l/dG9lLnR4dA==", "hello world", t1))
|
||||
}
|
||||
|
||||
func TestError(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
r := fstest.NewRun(t)
|
||||
err := transform.SetOptions(ctx, "all,prefix=ta/c") // has illegal character
|
||||
require.NoError(t, err)
|
||||
file1 := r.WriteFile("toe/toe/toe", "hello world", t1)
|
||||
|
||||
r.Mkdir(ctx, r.Fremote)
|
||||
// ctx = predictDstFromLogger(ctx)
|
||||
err = Sync(ctx, r.Fremote, r.Flocal, true)
|
||||
// testLoggerVsLsf(ctx, r.Fremote, operations.GetLoggerOpt(ctx).JSON, t)
|
||||
assert.Error(t, err)
|
||||
|
||||
r.CheckLocalListing(t, []fstest.Item{file1}, []string{"toe", "toe/toe"})
|
||||
r.CheckRemoteListing(t, []fstest.Item{file1}, []string{"toe", "toe/toe"})
|
||||
}
|
||||
@@ -151,8 +151,8 @@ func init() {
|
||||
alias("Dot", EncodeDot)
|
||||
}
|
||||
|
||||
// validStrings returns all the valid MultiEncoder strings
|
||||
func validStrings() string {
|
||||
// ValidStrings returns all the valid MultiEncoder strings
|
||||
func ValidStrings() string {
|
||||
var out []string
|
||||
for k := range nameToEncoding {
|
||||
out = append(out, k)
|
||||
@@ -192,7 +192,7 @@ func (mask *MultiEncoder) Set(in string) error {
|
||||
} else {
|
||||
i, err := strconv.ParseUint(part, 0, 0)
|
||||
if err != nil {
|
||||
return fmt.Errorf("bad encoding %q: possible values are: %s", part, validStrings())
|
||||
return fmt.Errorf("bad encoding %q: possible values are: %s", part, ValidStrings())
|
||||
}
|
||||
out |= MultiEncoder(i)
|
||||
}
|
||||
@@ -313,8 +313,7 @@ func (mask MultiEncoder) Encode(in string) string {
|
||||
}
|
||||
if mask.Has(EncodeAsterisk) { // *
|
||||
switch r {
|
||||
case '*',
|
||||
'*':
|
||||
case '*', '*':
|
||||
return true
|
||||
}
|
||||
}
|
||||
@@ -346,64 +345,55 @@ func (mask MultiEncoder) Encode(in string) string {
|
||||
}
|
||||
if mask.Has(EncodeQuestion) { // ?
|
||||
switch r {
|
||||
case '?',
|
||||
'?':
|
||||
case '?', '?':
|
||||
return true
|
||||
}
|
||||
}
|
||||
if mask.Has(EncodeColon) { // :
|
||||
switch r {
|
||||
case ':',
|
||||
':':
|
||||
case ':', ':':
|
||||
return true
|
||||
}
|
||||
}
|
||||
if mask.Has(EncodePipe) { // |
|
||||
switch r {
|
||||
case '|',
|
||||
'|':
|
||||
case '|', '|':
|
||||
return true
|
||||
}
|
||||
}
|
||||
if mask.Has(EncodeDoubleQuote) { // "
|
||||
switch r {
|
||||
case '"',
|
||||
'"':
|
||||
case '"', '"':
|
||||
return true
|
||||
}
|
||||
}
|
||||
if mask.Has(EncodeSingleQuote) { // '
|
||||
switch r {
|
||||
case '\'',
|
||||
''':
|
||||
case '\'', ''':
|
||||
return true
|
||||
}
|
||||
}
|
||||
if mask.Has(EncodeBackQuote) { // `
|
||||
switch r {
|
||||
case '`',
|
||||
'`':
|
||||
case '`', '`':
|
||||
return true
|
||||
}
|
||||
}
|
||||
if mask.Has(EncodeDollar) { // $
|
||||
switch r {
|
||||
case '$',
|
||||
'$':
|
||||
case '$', '$':
|
||||
return true
|
||||
}
|
||||
}
|
||||
if mask.Has(EncodeSlash) { // /
|
||||
switch r {
|
||||
case '/',
|
||||
'/':
|
||||
case '/', '/':
|
||||
return true
|
||||
}
|
||||
}
|
||||
if mask.Has(EncodeBackSlash) { // \
|
||||
switch r {
|
||||
case '\\',
|
||||
'\':
|
||||
case '\\', '\':
|
||||
return true
|
||||
}
|
||||
}
|
||||
@@ -416,15 +406,13 @@ func (mask MultiEncoder) Encode(in string) string {
|
||||
}
|
||||
if mask.Has(EncodeHash) { // #
|
||||
switch r {
|
||||
case '#',
|
||||
'#':
|
||||
case '#', '#':
|
||||
return true
|
||||
}
|
||||
}
|
||||
if mask.Has(EncodePercent) { // %
|
||||
switch r {
|
||||
case '%',
|
||||
'%':
|
||||
case '%', '%':
|
||||
return true
|
||||
}
|
||||
}
|
||||
@@ -1182,6 +1170,7 @@ func appendQuotedBytes(w io.Writer, s string) {
|
||||
_, _ = fmt.Fprintf(w, string(QuoteRune)+"%02X", b)
|
||||
}
|
||||
}
|
||||
|
||||
func appendUnquotedByte(w io.Writer, s string) bool {
|
||||
if len(s) < 2 {
|
||||
return false
|
||||
@@ -1202,12 +1191,15 @@ func (identity) Decode(in string) string { return in }
|
||||
func (i identity) FromStandardPath(s string) string {
|
||||
return FromStandardPath(i, s)
|
||||
}
|
||||
|
||||
func (i identity) FromStandardName(s string) string {
|
||||
return FromStandardName(i, s)
|
||||
}
|
||||
|
||||
func (i identity) ToStandardPath(s string) string {
|
||||
return ToStandardPath(i, s)
|
||||
}
|
||||
|
||||
func (i identity) ToStandardName(s string) string {
|
||||
return ToStandardName(i, s)
|
||||
}
|
||||
|
||||
71
lib/transform/cmap.go
Normal file
71
lib/transform/cmap.go
Normal file
@@ -0,0 +1,71 @@
|
||||
package transform
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/rclone/rclone/fs"
|
||||
"golang.org/x/text/encoding/charmap"
|
||||
)
|
||||
|
||||
var (
|
||||
cmaps = map[int]*charmap.Charmap{}
|
||||
lock sync.Mutex
|
||||
)
|
||||
|
||||
type cmapChoices struct{}
|
||||
|
||||
func (cmapChoices) Choices() []string {
|
||||
choices := make([]string, 1)
|
||||
i := 0
|
||||
for _, enc := range charmap.All {
|
||||
c, ok := enc.(*charmap.Charmap)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
name := strings.ReplaceAll(c.String(), " ", "-")
|
||||
if name == "" {
|
||||
name = fmt.Sprintf("unknown-%d", i)
|
||||
}
|
||||
lock.Lock()
|
||||
cmaps[i] = c
|
||||
lock.Unlock()
|
||||
choices = append(choices, name)
|
||||
i++
|
||||
}
|
||||
return choices
|
||||
}
|
||||
|
||||
func (cmapChoices) Type() string {
|
||||
return "string"
|
||||
}
|
||||
|
||||
func charmapByID(cm fs.Enum[cmapChoices]) *charmap.Charmap {
|
||||
lock.Lock()
|
||||
c, ok := cmaps[int(cm)]
|
||||
lock.Unlock()
|
||||
if ok {
|
||||
return c
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func encodeWithReplacement(s string, cmap *charmap.Charmap) string {
|
||||
return strings.Map(func(r rune) rune {
|
||||
b, ok := cmap.EncodeRune(r)
|
||||
if !ok {
|
||||
return '_'
|
||||
}
|
||||
return cmap.DecodeByte(b)
|
||||
}, s)
|
||||
}
|
||||
|
||||
func toASCII(s string) string {
|
||||
return strings.Map(func(r rune) rune {
|
||||
if r <= 127 {
|
||||
return r
|
||||
}
|
||||
return -1
|
||||
}, s)
|
||||
}
|
||||
136
lib/transform/help.go
Normal file
136
lib/transform/help.go
Normal file
@@ -0,0 +1,136 @@
|
||||
package transform
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/rclone/rclone/fs"
|
||||
"github.com/rclone/rclone/lib/encoder"
|
||||
)
|
||||
|
||||
type commands struct {
|
||||
command string
|
||||
description string
|
||||
}
|
||||
|
||||
type example struct {
|
||||
path string
|
||||
flags []string
|
||||
}
|
||||
|
||||
var commandList = []commands{
|
||||
{command: "--name-transform prefix=XXXX", description: "Prepends XXXX to the file name."},
|
||||
{command: "--name-transform suffix=XXXX", description: "Appends XXXX to the file name after the extension."},
|
||||
{command: "--name-transform suffix_keep_extension=XXXX", description: "Appends XXXX to the file name while preserving the original file extension."},
|
||||
{command: "--name-transform trimprefix=XXXX", description: "Removes XXXX if it appears at the start of the file name."},
|
||||
{command: "--name-transform trimsuffix=XXXX", description: "Removes XXXX if it appears at the end of the file name."},
|
||||
{command: "--name-transform regex=/pattern/replacement/", description: "Applies a regex-based transformation."},
|
||||
{command: "--name-transform replace=old:new", description: "Replaces occurrences of old with new in the file name."},
|
||||
{command: "--name-transform date={YYYYMMDD}", description: "Appends or prefixes the specified date format."},
|
||||
{command: "--name-transform truncate=N", description: "Truncates the file name to a maximum of N characters."},
|
||||
{command: "--name-transform base64encode", description: "Encodes the file name in Base64."},
|
||||
{command: "--name-transform base64decode", description: "Decodes a Base64-encoded file name."},
|
||||
{command: "--name-transform encoder=ENCODING", description: "Converts the file name to the specified encoding (e.g., ISO-8859-1, Windows-1252, Macintosh)."},
|
||||
{command: "--name-transform decoder=ENCODING", description: "Decodes the file name from the specified encoding."},
|
||||
{command: "--name-transform charmap=MAP", description: "Applies a character mapping transformation."},
|
||||
{command: "--name-transform lowercase", description: "Converts the file name to lowercase."},
|
||||
{command: "--name-transform uppercase", description: "Converts the file name to UPPERCASE."},
|
||||
{command: "--name-transform titlecase", description: "Converts the file name to Title Case."},
|
||||
{command: "--name-transform ascii", description: "Strips non-ASCII characters."},
|
||||
{command: "--name-transform url", description: "URL-encodes the file name."},
|
||||
{command: "--name-transform nfc", description: "Converts the file name to NFC Unicode normalization form."},
|
||||
{command: "--name-transform nfd", description: "Converts the file name to NFD Unicode normalization form."},
|
||||
{command: "--name-transform nfkc", description: "Converts the file name to NFKC Unicode normalization form."},
|
||||
{command: "--name-transform nfkd", description: "Converts the file name to NFKD Unicode normalization form."},
|
||||
{command: "--name-transform command=/path/to/my/programfile names.", description: "Executes an external program to transform"},
|
||||
}
|
||||
|
||||
var examples = []example{
|
||||
{"stories/The Quick Brown Fox!.txt", []string{"all,uppercase"}},
|
||||
{"stories/The Quick Brown Fox!.txt", []string{"all,replace=Fox:Turtle", "all,replace=Quick:Slow"}},
|
||||
{"stories/The Quick Brown Fox!.txt", []string{"all,base64encode"}},
|
||||
{"c3Rvcmllcw==/VGhlIFF1aWNrIEJyb3duIEZveCEudHh0", []string{"all,base64decode"}},
|
||||
{"stories/The Quick Brown 🦊 Fox Went to the Café!.txt", []string{"all,nfc"}},
|
||||
{"stories/The Quick Brown 🦊 Fox Went to the Café!.txt", []string{"all,nfd"}},
|
||||
{"stories/The Quick Brown 🦊 Fox!.txt", []string{"all,ascii"}},
|
||||
{"stories/The Quick Brown Fox!.txt", []string{"all,trimsuffix=.txt"}},
|
||||
{"stories/The Quick Brown Fox!.txt", []string{"all,prefix=OLD_"}},
|
||||
{"stories/The Quick Brown 🦊 Fox Went to the Café!.txt", []string{"all,charmap=ISO-8859-7"}},
|
||||
{"stories/The Quick Brown Fox: A Memoir [draft].txt", []string{"all,encoder=Colon,SquareBracket"}},
|
||||
{"stories/The Quick Brown 🦊 Fox Went to the Café!.txt", []string{"all,truncate=21"}},
|
||||
{"stories/The Quick Brown Fox!.txt", []string{"all,command=echo"}},
|
||||
{"stories/The Quick Brown Fox!", []string{"date=-{YYYYMMDD}"}},
|
||||
{"stories/The Quick Brown Fox!", []string{"date=-{macfriendlytime}"}},
|
||||
{"stories/The Quick Brown Fox!.txt", []string{"all,regex=[\\.\\w]/ab"}},
|
||||
}
|
||||
|
||||
func (e example) command() string {
|
||||
s := fmt.Sprintf(`rclone convmv %q`, e.path)
|
||||
for _, f := range e.flags {
|
||||
s += fmt.Sprintf(" --name-transform %q", f)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (e example) output() string {
|
||||
ctx := context.Background()
|
||||
err := SetOptions(ctx, e.flags...)
|
||||
if err != nil {
|
||||
fs.Errorf(nil, "error generating help text: %v", err)
|
||||
}
|
||||
return Path(ctx, e.path, false)
|
||||
}
|
||||
|
||||
// go run ./ convmv --help
|
||||
func sprintExamples() string {
|
||||
s := "Examples: \n\n"
|
||||
for _, e := range examples {
|
||||
s += fmt.Sprintf("```\n%s\n", e.command())
|
||||
s += fmt.Sprintf("// Output: %s\n```\n\n", e.output())
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func commandTable() string {
|
||||
s := `| Command | Description |
|
||||
|------|------|`
|
||||
for _, c := range commandList {
|
||||
s += fmt.Sprintf("\n| `%s` | %s |", c.command, c.description)
|
||||
}
|
||||
s += "\n\n\n"
|
||||
return s
|
||||
}
|
||||
|
||||
// SprintList returns the example help text as a string
|
||||
func SprintList() string {
|
||||
var algos transformAlgo
|
||||
var charmaps fs.Enum[cmapChoices]
|
||||
s := commandTable()
|
||||
s += fmt.Sprintln("Conversion modes: \n```")
|
||||
for _, v := range algos.Choices() {
|
||||
s += fmt.Sprintln(v + " ")
|
||||
}
|
||||
s += fmt.Sprintln("```")
|
||||
|
||||
s += fmt.Sprintln("Char maps: \n```")
|
||||
for _, v := range charmaps.Choices() {
|
||||
s += fmt.Sprintln(v + " ")
|
||||
}
|
||||
s += fmt.Sprintln("```")
|
||||
|
||||
s += fmt.Sprintln("Encoding masks: \n```")
|
||||
for _, v := range strings.Split(encoder.ValidStrings(), ",") {
|
||||
s += fmt.Sprintln(v + " ")
|
||||
}
|
||||
s += fmt.Sprintln("```")
|
||||
|
||||
s += sprintExamples()
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
// PrintList prints the example help text to stdout
|
||||
func PrintList() {
|
||||
fmt.Println(SprintList())
|
||||
}
|
||||
248
lib/transform/options.go
Normal file
248
lib/transform/options.go
Normal file
@@ -0,0 +1,248 @@
|
||||
package transform
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"slices"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/rclone/rclone/fs"
|
||||
)
|
||||
|
||||
type transform struct {
|
||||
key transformAlgo // for example, "prefix"
|
||||
value string // for example, "some_prefix_"
|
||||
tag tag // file, dir, or all
|
||||
}
|
||||
|
||||
// tag controls which part of the file path is affected (file, dir, all)
|
||||
type tag int
|
||||
|
||||
// tag modes
|
||||
const (
|
||||
file tag = iota // Only transform the leaf name of files (default)
|
||||
dir // Only transform name of directories - these may appear anywhere in the path
|
||||
all // Transform the entire path for files and directories
|
||||
)
|
||||
|
||||
// Transforming returns true when transforms are in use
|
||||
func Transforming(ctx context.Context) bool {
|
||||
ci := fs.GetConfig(ctx)
|
||||
return len(ci.NameTransform) > 0
|
||||
}
|
||||
|
||||
// SetOptions sets the options in ctx from flags passed in.
|
||||
// Any existing flags will be overwritten.
|
||||
// s should be in the same format as cmd line flags, i.e. "all,prefix=XXX"
|
||||
func SetOptions(ctx context.Context, s ...string) (err error) {
|
||||
ci := fs.GetConfig(ctx)
|
||||
ci.NameTransform = s
|
||||
_, err = getOptions(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// cache to minimize re-parsing
|
||||
var (
|
||||
cachedNameTransform []string
|
||||
cachedOpt []transform
|
||||
cacheLock sync.Mutex
|
||||
)
|
||||
|
||||
// getOptions sets the options from flags passed in.
|
||||
func getOptions(ctx context.Context) (opt []transform, err error) {
|
||||
if !Transforming(ctx) {
|
||||
return opt, nil
|
||||
}
|
||||
|
||||
ci := fs.GetConfig(ctx)
|
||||
|
||||
// return cached opt if available
|
||||
if cachedNameTransform != nil && slices.Equal(ci.NameTransform, cachedNameTransform) {
|
||||
return cachedOpt, nil
|
||||
}
|
||||
|
||||
for _, transform := range ci.NameTransform {
|
||||
t, err := parse(transform)
|
||||
if err != nil {
|
||||
return opt, err
|
||||
}
|
||||
opt = append(opt, t)
|
||||
}
|
||||
updateCache(ci.NameTransform, opt)
|
||||
return opt, nil
|
||||
}
|
||||
|
||||
func updateCache(nt []string, o []transform) {
|
||||
cacheLock.Lock()
|
||||
cachedNameTransform = slices.Clone(nt)
|
||||
cachedOpt = o
|
||||
cacheLock.Unlock()
|
||||
}
|
||||
|
||||
// parse a single instance of --name-transform
|
||||
func parse(s string) (t transform, err error) {
|
||||
if s == "" {
|
||||
return t, nil
|
||||
}
|
||||
s = t.parseTag(s)
|
||||
err = t.parseKeyVal(s)
|
||||
return t, err
|
||||
}
|
||||
|
||||
// parse the tag (file/dir/all), set the option accordingly, and return the trimmed string
|
||||
//
|
||||
// we don't worry about errors here because it will error anyway as an invalid key
|
||||
func (t *transform) parseTag(s string) string {
|
||||
if strings.HasPrefix(s, "file,") {
|
||||
t.tag = file
|
||||
return strings.TrimPrefix(s, "file,")
|
||||
}
|
||||
if strings.HasPrefix(s, "dir,") {
|
||||
t.tag = dir
|
||||
return strings.TrimPrefix(s, "dir,")
|
||||
}
|
||||
if strings.HasPrefix(s, "all,") {
|
||||
t.tag = all
|
||||
return strings.TrimPrefix(s, "all,")
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// parse key and value (if any) by splitting on '=' sign
|
||||
// (file/dir/all tag has already been trimmed)
|
||||
func (t *transform) parseKeyVal(s string) (err error) {
|
||||
if !strings.ContainsRune(s, '=') {
|
||||
err = t.key.Set(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if t.requiresValue() {
|
||||
fs.Debugf(nil, "received %v", s)
|
||||
return errors.New("value is required for " + t.key.String())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
split := strings.Split(s, "=")
|
||||
if len(split) != 2 {
|
||||
return errors.New("too many values")
|
||||
}
|
||||
if split[0] == "" {
|
||||
return errors.New("key cannot be blank")
|
||||
}
|
||||
err = t.key.Set(split[0])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
t.value = split[1]
|
||||
return nil
|
||||
}
|
||||
|
||||
// returns true if this particular algorithm requires a value
|
||||
func (t *transform) requiresValue() bool {
|
||||
switch t.key {
|
||||
case ConvFindReplace:
|
||||
return true
|
||||
case ConvPrefix:
|
||||
return true
|
||||
case ConvSuffix:
|
||||
return true
|
||||
case ConvSuffixKeepExtension:
|
||||
return true
|
||||
case ConvTrimPrefix:
|
||||
return true
|
||||
case ConvTrimSuffix:
|
||||
return true
|
||||
case ConvIndex:
|
||||
return true
|
||||
case ConvDate:
|
||||
return true
|
||||
case ConvTruncate:
|
||||
return true
|
||||
case ConvEncoder:
|
||||
return true
|
||||
case ConvDecoder:
|
||||
return true
|
||||
case ConvRegex:
|
||||
return true
|
||||
case ConvCommand:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// transformAlgo describes conversion setting
|
||||
type transformAlgo = fs.Enum[transformChoices]
|
||||
|
||||
// Supported transform options
|
||||
const (
|
||||
ConvNone transformAlgo = iota
|
||||
ConvToNFC
|
||||
ConvToNFD
|
||||
ConvToNFKC
|
||||
ConvToNFKD
|
||||
ConvFindReplace
|
||||
ConvPrefix
|
||||
ConvSuffix
|
||||
ConvSuffixKeepExtension
|
||||
ConvTrimPrefix
|
||||
ConvTrimSuffix
|
||||
ConvIndex
|
||||
ConvDate
|
||||
ConvTruncate
|
||||
ConvBase64Encode
|
||||
ConvBase64Decode
|
||||
ConvEncoder
|
||||
ConvDecoder
|
||||
ConvISO8859_1
|
||||
ConvWindows1252
|
||||
ConvMacintosh
|
||||
ConvCharmap
|
||||
ConvLowercase
|
||||
ConvUppercase
|
||||
ConvTitlecase
|
||||
ConvASCII
|
||||
ConvURL
|
||||
ConvRegex
|
||||
ConvCommand
|
||||
)
|
||||
|
||||
type transformChoices struct{}
|
||||
|
||||
func (transformChoices) Choices() []string {
|
||||
return []string{
|
||||
ConvNone: "none",
|
||||
ConvToNFC: "nfc",
|
||||
ConvToNFD: "nfd",
|
||||
ConvToNFKC: "nfkc",
|
||||
ConvToNFKD: "nfkd",
|
||||
ConvFindReplace: "replace",
|
||||
ConvPrefix: "prefix",
|
||||
ConvSuffix: "suffix",
|
||||
ConvSuffixKeepExtension: "suffix_keep_extension",
|
||||
ConvTrimPrefix: "trimprefix",
|
||||
ConvTrimSuffix: "trimsuffix",
|
||||
ConvIndex: "index",
|
||||
ConvDate: "date",
|
||||
ConvTruncate: "truncate",
|
||||
ConvBase64Encode: "base64encode",
|
||||
ConvBase64Decode: "base64decode",
|
||||
ConvEncoder: "encoder",
|
||||
ConvDecoder: "decoder",
|
||||
ConvISO8859_1: "ISO-8859-1",
|
||||
ConvWindows1252: "Windows-1252",
|
||||
ConvMacintosh: "Macintosh",
|
||||
ConvCharmap: "charmap",
|
||||
ConvLowercase: "lowercase",
|
||||
ConvUppercase: "uppercase",
|
||||
ConvTitlecase: "titlecase",
|
||||
ConvASCII: "ascii",
|
||||
ConvURL: "url",
|
||||
ConvRegex: "regex",
|
||||
ConvCommand: "command",
|
||||
}
|
||||
}
|
||||
|
||||
func (transformChoices) Type() string {
|
||||
return "string"
|
||||
}
|
||||
335
lib/transform/transform.go
Normal file
335
lib/transform/transform.go
Normal file
@@ -0,0 +1,335 @@
|
||||
// Package transform holds functions for path name transformations
|
||||
package transform
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"mime"
|
||||
"net/url"
|
||||
"os/exec"
|
||||
"path"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/rclone/rclone/fs"
|
||||
"github.com/rclone/rclone/lib/encoder"
|
||||
"golang.org/x/text/encoding/charmap"
|
||||
"golang.org/x/text/unicode/norm"
|
||||
)
|
||||
|
||||
// Path transforms a path s according to the --name-transform options in use
|
||||
//
|
||||
// If no transforms are in use, s is returned unchanged
|
||||
func Path(ctx context.Context, s string, isDir bool) string {
|
||||
if !Transforming(ctx) {
|
||||
return s
|
||||
}
|
||||
|
||||
old := s
|
||||
opt, err := getOptions(ctx)
|
||||
if err != nil {
|
||||
err = fs.CountError(ctx, err)
|
||||
fs.Errorf(s, "Failed to parse transform flags: %v", err)
|
||||
}
|
||||
for _, t := range opt {
|
||||
if isDir && t.tag == file {
|
||||
continue
|
||||
}
|
||||
baseOnly := !isDir && t.tag == file
|
||||
if t.tag == dir && !isDir {
|
||||
s, err = transformDir(s, t)
|
||||
} else {
|
||||
s, err = transformPath(s, t, baseOnly)
|
||||
}
|
||||
if err != nil {
|
||||
err = fs.CountError(ctx, err)
|
||||
fs.Errorf(s, "Failed to transform: %v", err)
|
||||
}
|
||||
}
|
||||
if old != s {
|
||||
fs.Debugf(old, "transformed to: %v", s)
|
||||
}
|
||||
if strings.Count(old, "/") != strings.Count(s, "/") {
|
||||
err = fs.CountError(ctx, fmt.Errorf("number of path segments must match: %v (%v), %v (%v)", old, strings.Count(old, "/"), s, strings.Count(s, "/")))
|
||||
fs.Errorf(old, "%v", err)
|
||||
return old
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// transformPath transforms a path string according to the chosen TransformAlgo.
|
||||
// Each path segment is transformed separately, to preserve path separators.
|
||||
// If baseOnly is true, only the base will be transformed (useful for renaming while walking a dir tree recursively.)
|
||||
// for example, "some/nested/path" -> "some/nested/CONVERTEDPATH"
|
||||
// otherwise, the entire is path is transformed.
|
||||
func transformPath(s string, t transform, baseOnly bool) (string, error) {
|
||||
if s == "" || s == "/" || s == "\\" || s == "." {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if baseOnly {
|
||||
transformedBase, err := transformPathSegment(path.Base(s), t)
|
||||
if err := validateSegment(transformedBase); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return path.Join(path.Dir(s), transformedBase), err
|
||||
}
|
||||
|
||||
segments := strings.Split(s, "/")
|
||||
transformedSegments := make([]string, len(segments))
|
||||
for _, seg := range segments {
|
||||
convSeg, err := transformPathSegment(seg, t)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if err := validateSegment(convSeg); err != nil {
|
||||
return "", err
|
||||
}
|
||||
transformedSegments = append(transformedSegments, convSeg)
|
||||
}
|
||||
return path.Join(transformedSegments...), nil
|
||||
}
|
||||
|
||||
// transform all but the last path segment
|
||||
func transformDir(s string, t transform) (string, error) {
|
||||
dirPath, err := transformPath(path.Dir(s), t, false)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return path.Join(dirPath, path.Base(s)), nil
|
||||
}
|
||||
|
||||
// transformPathSegment transforms one path segment (or really any string) according to the chosen TransformAlgo.
|
||||
// It assumes path separators have already been trimmed.
|
||||
func transformPathSegment(s string, t transform) (string, error) {
|
||||
switch t.key {
|
||||
case ConvNone:
|
||||
return s, nil
|
||||
case ConvToNFC:
|
||||
return norm.NFC.String(s), nil
|
||||
case ConvToNFD:
|
||||
return norm.NFD.String(s), nil
|
||||
case ConvToNFKC:
|
||||
return norm.NFKC.String(s), nil
|
||||
case ConvToNFKD:
|
||||
return norm.NFKD.String(s), nil
|
||||
case ConvBase64Encode:
|
||||
return base64.URLEncoding.EncodeToString([]byte(s)), nil // URLEncoding to avoid slashes
|
||||
case ConvBase64Decode:
|
||||
if s == ".DS_Store" {
|
||||
return s, nil
|
||||
}
|
||||
b, err := base64.URLEncoding.DecodeString(s)
|
||||
if err != nil {
|
||||
fs.Errorf(s, "base64 error")
|
||||
}
|
||||
return string(b), err
|
||||
case ConvFindReplace:
|
||||
split := strings.Split(t.value, ":")
|
||||
if len(split) != 2 {
|
||||
return s, fmt.Errorf("wrong number of values: %v", t.value)
|
||||
}
|
||||
return strings.ReplaceAll(s, split[0], split[1]), nil
|
||||
case ConvPrefix:
|
||||
return t.value + s, nil
|
||||
case ConvSuffix:
|
||||
return s + t.value, nil
|
||||
case ConvSuffixKeepExtension:
|
||||
return SuffixKeepExtension(s, t.value), nil
|
||||
case ConvTrimPrefix:
|
||||
return strings.TrimPrefix(s, t.value), nil
|
||||
case ConvTrimSuffix:
|
||||
return strings.TrimSuffix(s, t.value), nil
|
||||
case ConvTruncate:
|
||||
max, err := strconv.Atoi(t.value)
|
||||
if err != nil {
|
||||
return s, err
|
||||
}
|
||||
if max <= 0 {
|
||||
return s, nil
|
||||
}
|
||||
if utf8.RuneCountInString(s) <= max {
|
||||
return s, nil
|
||||
}
|
||||
runes := []rune(s)
|
||||
return string(runes[:max]), nil
|
||||
case ConvEncoder:
|
||||
var enc encoder.MultiEncoder
|
||||
err := enc.Set(t.value)
|
||||
if err != nil {
|
||||
return s, err
|
||||
}
|
||||
return enc.Encode(s), nil
|
||||
case ConvDecoder:
|
||||
var enc encoder.MultiEncoder
|
||||
err := enc.Set(t.value)
|
||||
if err != nil {
|
||||
return s, err
|
||||
}
|
||||
return enc.Decode(s), nil
|
||||
case ConvISO8859_1:
|
||||
return encodeWithReplacement(s, charmap.ISO8859_1), nil
|
||||
case ConvWindows1252:
|
||||
return encodeWithReplacement(s, charmap.Windows1252), nil
|
||||
case ConvMacintosh:
|
||||
return encodeWithReplacement(s, charmap.Macintosh), nil
|
||||
case ConvCharmap:
|
||||
var cmapType fs.Enum[cmapChoices]
|
||||
err := cmapType.Set(t.value)
|
||||
if err != nil {
|
||||
return s, err
|
||||
}
|
||||
c := charmapByID(cmapType)
|
||||
return encodeWithReplacement(s, c), nil
|
||||
case ConvLowercase:
|
||||
return strings.ToLower(s), nil
|
||||
case ConvUppercase:
|
||||
return strings.ToUpper(s), nil
|
||||
case ConvTitlecase:
|
||||
return strings.ToTitle(s), nil
|
||||
case ConvASCII:
|
||||
return toASCII(s), nil
|
||||
case ConvURL:
|
||||
return url.QueryEscape(s), nil
|
||||
case ConvDate:
|
||||
return s + AppyTimeGlobs(t.value, time.Now()), nil
|
||||
case ConvRegex:
|
||||
split := strings.Split(t.value, "/")
|
||||
if len(split) != 2 {
|
||||
return s, fmt.Errorf("regex syntax error: %v", t.value)
|
||||
}
|
||||
re := regexp.MustCompile(split[0])
|
||||
return re.ReplaceAllString(s, split[1]), nil
|
||||
case ConvCommand:
|
||||
return mapper(s, t.value)
|
||||
default:
|
||||
return "", errors.New("this option is not yet implemented")
|
||||
}
|
||||
}
|
||||
|
||||
// SuffixKeepExtension adds a suffix while keeping extension
|
||||
//
|
||||
// i.e. file.txt becomes file_somesuffix.txt not file.txt_somesuffix
|
||||
func SuffixKeepExtension(remote string, suffix string) string {
|
||||
var (
|
||||
base = remote
|
||||
exts = ""
|
||||
first = true
|
||||
ext = path.Ext(remote)
|
||||
)
|
||||
for ext != "" {
|
||||
// Look second and subsequent extensions in mime types.
|
||||
// If they aren't found then don't keep it as an extension.
|
||||
if !first && mime.TypeByExtension(ext) == "" {
|
||||
break
|
||||
}
|
||||
base = base[:len(base)-len(ext)]
|
||||
exts = ext + exts
|
||||
first = false
|
||||
ext = path.Ext(base)
|
||||
}
|
||||
return base + suffix + exts
|
||||
}
|
||||
|
||||
// forbid transformations that add/remove path separators
|
||||
func validateSegment(s string) error {
|
||||
if strings.TrimSpace(s) == "" {
|
||||
return errors.New("transform cannot render path segments empty")
|
||||
}
|
||||
if strings.ContainsRune(s, '/') {
|
||||
return fmt.Errorf("transform cannot add path separators: %v", s)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ParseGlobs determines whether a string contains {brackets}
|
||||
// and returns the substring (including both brackets) for replacing
|
||||
// substring is first opening bracket to last closing bracket --
|
||||
// good for {{this}} but not {this}{this}
|
||||
func ParseGlobs(s string) (hasGlobs bool, substring string) {
|
||||
open := strings.Index(s, "{")
|
||||
close := strings.LastIndex(s, "}")
|
||||
if open >= 0 && close > open {
|
||||
return true, s[open : close+1]
|
||||
}
|
||||
return false, ""
|
||||
}
|
||||
|
||||
// TrimBrackets converts {{this}} to this
|
||||
func TrimBrackets(s string) string {
|
||||
return strings.Trim(s, "{}")
|
||||
}
|
||||
|
||||
// TimeFormat converts a user-supplied string to a Go time constant, if possible
|
||||
func TimeFormat(timeFormat string) string {
|
||||
switch timeFormat {
|
||||
case "Layout":
|
||||
timeFormat = time.Layout
|
||||
case "ANSIC":
|
||||
timeFormat = time.ANSIC
|
||||
case "UnixDate":
|
||||
timeFormat = time.UnixDate
|
||||
case "RubyDate":
|
||||
timeFormat = time.RubyDate
|
||||
case "RFC822":
|
||||
timeFormat = time.RFC822
|
||||
case "RFC822Z":
|
||||
timeFormat = time.RFC822Z
|
||||
case "RFC850":
|
||||
timeFormat = time.RFC850
|
||||
case "RFC1123":
|
||||
timeFormat = time.RFC1123
|
||||
case "RFC1123Z":
|
||||
timeFormat = time.RFC1123Z
|
||||
case "RFC3339":
|
||||
timeFormat = time.RFC3339
|
||||
case "RFC3339Nano":
|
||||
timeFormat = time.RFC3339Nano
|
||||
case "Kitchen":
|
||||
timeFormat = time.Kitchen
|
||||
case "Stamp":
|
||||
timeFormat = time.Stamp
|
||||
case "StampMilli":
|
||||
timeFormat = time.StampMilli
|
||||
case "StampMicro":
|
||||
timeFormat = time.StampMicro
|
||||
case "StampNano":
|
||||
timeFormat = time.StampNano
|
||||
case "DateTime":
|
||||
timeFormat = time.DateTime
|
||||
case "DateOnly":
|
||||
timeFormat = time.DateOnly
|
||||
case "TimeOnly":
|
||||
timeFormat = time.TimeOnly
|
||||
case "MacFriendlyTime", "macfriendlytime", "mac":
|
||||
timeFormat = "2006-01-02 0304PM" // not actually a Go constant -- but useful as macOS filenames can't have colons
|
||||
case "YYYYMMDD":
|
||||
timeFormat = "20060102"
|
||||
}
|
||||
return timeFormat
|
||||
}
|
||||
|
||||
// AppyTimeGlobs converts "myfile-{DateOnly}.txt" to "myfile-2006-01-02.txt"
|
||||
func AppyTimeGlobs(s string, t time.Time) string {
|
||||
hasGlobs, substring := ParseGlobs(s)
|
||||
if !hasGlobs {
|
||||
return s
|
||||
}
|
||||
timeString := t.Local().Format(TimeFormat(TrimBrackets(substring)))
|
||||
return strings.ReplaceAll(s, substring, timeString)
|
||||
}
|
||||
|
||||
func mapper(s string, command string) (string, error) {
|
||||
out, err := exec.Command(command, s).CombinedOutput()
|
||||
if err != nil {
|
||||
out = bytes.TrimSpace(out)
|
||||
return s, fmt.Errorf("%s: error running command %q: %v", out, command+" "+s, err)
|
||||
}
|
||||
return string(bytes.TrimSpace(out)), nil
|
||||
}
|
||||
142
lib/transform/transform_test.go
Normal file
142
lib/transform/transform_test.go
Normal file
@@ -0,0 +1,142 @@
|
||||
package transform
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
// sync tests are in fs/sync/sync_transform_test.go to avoid import cycle issues
|
||||
|
||||
func newOptions(s ...string) (context.Context, error) {
|
||||
ctx := context.Background()
|
||||
err := SetOptions(ctx, s...)
|
||||
return ctx, err
|
||||
}
|
||||
|
||||
func TestPath(t *testing.T) {
|
||||
for _, test := range []struct {
|
||||
path string
|
||||
want string
|
||||
}{
|
||||
{"", ""},
|
||||
{"toe/toe/toe", "tictactoe/tictactoe/tictactoe"},
|
||||
{"a/b/c", "tictaca/tictacb/tictacc"},
|
||||
} {
|
||||
ctx, err := newOptions("all,prefix=tac", "all,prefix=tic")
|
||||
require.NoError(t, err)
|
||||
|
||||
got := Path(ctx, test.path, false)
|
||||
assert.Equal(t, test.want, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileTagOnFile(t *testing.T) {
|
||||
for _, test := range []struct {
|
||||
path string
|
||||
want string
|
||||
}{
|
||||
{"a/b/c.txt", "a/b/1c.txt"},
|
||||
} {
|
||||
ctx, err := newOptions("file,prefix=1")
|
||||
require.NoError(t, err)
|
||||
|
||||
got := Path(ctx, test.path, false)
|
||||
assert.Equal(t, test.want, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDirTagOnFile(t *testing.T) {
|
||||
for _, test := range []struct {
|
||||
path string
|
||||
want string
|
||||
}{
|
||||
{"a/b/c.txt", "1a/1b/c.txt"},
|
||||
} {
|
||||
ctx, err := newOptions("dir,prefix=1")
|
||||
require.NoError(t, err)
|
||||
|
||||
got := Path(ctx, test.path, false)
|
||||
assert.Equal(t, test.want, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAllTag(t *testing.T) {
|
||||
for _, test := range []struct {
|
||||
path string
|
||||
want string
|
||||
}{
|
||||
{"a/b/c.txt", "1a/1b/1c.txt"},
|
||||
} {
|
||||
ctx, err := newOptions("all,prefix=1")
|
||||
require.NoError(t, err)
|
||||
|
||||
got := Path(ctx, test.path, false)
|
||||
assert.Equal(t, test.want, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileTagOnDir(t *testing.T) {
|
||||
for _, test := range []struct {
|
||||
path string
|
||||
want string
|
||||
}{
|
||||
{"a/b", "a/b"},
|
||||
} {
|
||||
ctx, err := newOptions("file,prefix=1")
|
||||
require.NoError(t, err)
|
||||
|
||||
got := Path(ctx, test.path, true)
|
||||
assert.Equal(t, test.want, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDirTagOnDir(t *testing.T) {
|
||||
for _, test := range []struct {
|
||||
path string
|
||||
want string
|
||||
}{
|
||||
{"a/b", "1a/1b"},
|
||||
} {
|
||||
ctx, err := newOptions("dir,prefix=1")
|
||||
require.NoError(t, err)
|
||||
|
||||
got := Path(ctx, test.path, true)
|
||||
assert.Equal(t, test.want, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestVarious(t *testing.T) {
|
||||
for _, test := range []struct {
|
||||
path string
|
||||
want string
|
||||
flags []string
|
||||
}{
|
||||
{"stories/The Quick Brown Fox!.txt", "STORIES/THE QUICK BROWN FOX!.TXT", []string{"all,uppercase"}},
|
||||
{"stories/The Quick Brown Fox!.txt", "stories/The Slow Brown Turtle!.txt", []string{"all,replace=Fox:Turtle", "all,replace=Quick:Slow"}},
|
||||
{"stories/The Quick Brown Fox!.txt", "c3Rvcmllcw==/VGhlIFF1aWNrIEJyb3duIEZveCEudHh0", []string{"all,base64encode"}},
|
||||
{"c3Rvcmllcw==/VGhlIFF1aWNrIEJyb3duIEZveCEudHh0", "stories/The Quick Brown Fox!.txt", []string{"all,base64decode"}},
|
||||
{"stories/The Quick Brown 🦊 Fox Went to the Café!.txt", "stories/The Quick Brown 🦊 Fox Went to the Café!.txt", []string{"all,nfc"}},
|
||||
{"stories/The Quick Brown 🦊 Fox Went to the Café!.txt", "stories/The Quick Brown 🦊 Fox Went to the Café!.txt", []string{"all,nfd"}},
|
||||
{"stories/The Quick Brown 🦊 Fox!.txt", "stories/The Quick Brown Fox!.txt", []string{"all,ascii"}},
|
||||
{"stories/The Quick Brown 🦊 Fox!.txt", "stories/The+Quick+Brown+%F0%9F%A6%8A+Fox%21.txt", []string{"all,url"}},
|
||||
{"stories/The Quick Brown Fox!.txt", "stories/The Quick Brown Fox!", []string{"all,trimsuffix=.txt"}},
|
||||
{"stories/The Quick Brown Fox!.txt", "OLD_stories/OLD_The Quick Brown Fox!.txt", []string{"all,prefix=OLD_"}},
|
||||
{"stories/The Quick Brown 🦊 Fox Went to the Café!.txt", "stories/The Quick Brown _ Fox Went to the Caf_!.txt", []string{"all,charmap=ISO-8859-7"}},
|
||||
{"stories/The Quick Brown Fox: A Memoir [draft].txt", "stories/The Quick Brown Fox: A Memoir [draft].txt", []string{"all,encoder=Colon,SquareBracket"}},
|
||||
{"stories/The Quick Brown 🦊 Fox Went to the Café!.txt", "stories/The Quick Brown 🦊 Fox", []string{"all,truncate=21"}},
|
||||
{"stories/The Quick Brown Fox!.txt", "stories/The Quick Brown Fox!.txt", []string{"all,command=echo"}},
|
||||
{"stories/The Quick Brown Fox!.txt", "stories/The Quick Brown Fox!.txt-" + time.Now().Local().Format("20060102"), []string{"date=-{YYYYMMDD}"}},
|
||||
{"stories/The Quick Brown Fox!.txt", "stories/The Quick Brown Fox!.txt-" + time.Now().Local().Format("2006-01-02 0304PM"), []string{"date=-{macfriendlytime}"}},
|
||||
{"stories/The Quick Brown Fox!.txt", "ababababababab/ababab ababababab ababababab ababab!abababab", []string{"all,regex=[\\.\\w]/ab"}},
|
||||
} {
|
||||
ctx, err := newOptions(test.flags...)
|
||||
require.NoError(t, err)
|
||||
|
||||
got := Path(ctx, test.path, false)
|
||||
assert.Equal(t, test.want, got)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user