From 059ad47336b57b76233657d2a17c5c338bddd9d9 Mon Sep 17 00:00:00 2001 From: albertony <12441419+albertony@users.noreply.github.com> Date: Fri, 31 Oct 2025 21:58:24 +0100 Subject: [PATCH] docs: change syntax hightlighting for command examples from sh to console --- CONTRIBUTING.md | 44 +++--- RELEASE.md | 22 +-- cmd/backend/backend.go | 8 +- cmd/copyto/copyto.go | 2 +- cmd/cryptcheck/cryptcheck.go | 4 +- cmd/cryptdecode/cryptdecode.go | 2 +- cmd/dedupe/dedupe.go | 10 +- cmd/genautocomplete/genautocomplete_bash.go | 6 +- cmd/genautocomplete/genautocomplete_fish.go | 4 +- .../genautocomplete_powershell.go | 2 +- cmd/genautocomplete/genautocomplete_zsh.go | 4 +- cmd/gitannex/gitannex.md | 25 ++-- cmd/hashsum/hashsum.go | 4 +- cmd/link/link.go | 2 +- cmd/ls/ls.go | 2 +- cmd/lsd/lsd.go | 4 +- cmd/lsf/lsf.go | 16 +-- cmd/lsl/lsl.go | 2 +- cmd/mountlib/mount.md | 20 +-- cmd/mountlib/rc.go | 4 +- cmd/moveto/moveto.go | 2 +- cmd/obscure/obscure.go | 2 +- cmd/rcat/rcat.go | 2 +- cmd/serve/docker/docker.md | 2 +- cmd/serve/restic/restic.go | 6 +- cmd/serve/s3/serve_s3.md | 4 +- cmd/serve/serve.go | 2 +- cmd/serve/sftp/sftp.go | 2 +- cmd/serve/webdav/webdav.go | 4 +- cmd/settier/settier.go | 6 +- cmd/test/test.go | 2 +- cmd/version/version.go | 6 +- docs/content/alias.md | 8 +- docs/content/azureblob.md | 26 ++-- docs/content/azurefiles.md | 16 +-- docs/content/b2.md | 26 ++-- docs/content/bisync.md | 40 +++--- docs/content/box.md | 8 +- docs/content/cache.md | 8 +- docs/content/cloudinary.md | 8 +- docs/content/combine.md | 10 +- docs/content/crypt.md | 10 +- docs/content/docker.md | 56 ++++---- docs/content/docs.md | 132 +++++++++--------- docs/content/doi.md | 2 +- docs/content/downloads.md | 4 +- docs/content/drive.md | 12 +- docs/content/dropbox.md | 8 +- docs/content/faq.md | 22 +-- docs/content/fichier.md | 8 +- docs/content/filefabric.md | 10 +- docs/content/filelu.md | 30 ++-- docs/content/filescom.md | 10 +- docs/content/filtering.md | 24 ++-- docs/content/ftp.md | 14 +- docs/content/gofile.md | 8 +- docs/content/googlecloudstorage.md | 20 +-- docs/content/googlephotos.md | 12 +- docs/content/gui.md | 2 +- docs/content/hasher.md | 13 +- docs/content/hdfs.md | 12 +- docs/content/hidrive.md | 12 +- docs/content/http.md | 12 +- docs/content/iclouddrive.md | 2 +- docs/content/imagekit.md | 8 +- docs/content/install.md | 52 +++---- docs/content/internetarchive.md | 10 +- docs/content/jottacloud.md | 8 +- docs/content/koofr.md | 12 +- docs/content/linkbox.md | 2 +- docs/content/local.md | 28 ++-- docs/content/mailru.md | 10 +- docs/content/mega.md | 8 +- docs/content/memory.md | 2 +- docs/content/netstorage.md | 14 +- docs/content/onedrive.md | 8 +- docs/content/opendrive.md | 8 +- docs/content/oracleobjectstorage/_index.md | 16 +-- .../oracleobjectstorage/tutorial_mount.md | 29 ++-- docs/content/overview.md | 6 +- docs/content/pcloud.md | 8 +- docs/content/pikpak.md | 2 +- docs/content/premiumizeme.md | 8 +- docs/content/protondrive.md | 8 +- docs/content/putio.md | 8 +- docs/content/qingstor.md | 10 +- docs/content/quatrix.md | 10 +- docs/content/rc.md | 54 +++---- docs/content/release_signing.md | 16 +-- docs/content/remote_setup.md | 6 +- docs/content/s3.md | 50 +++---- docs/content/seafile.md | 26 ++-- docs/content/sftp.md | 24 ++-- docs/content/sharefile.md | 8 +- docs/content/sia.md | 8 +- docs/content/smb.md | 2 +- docs/content/storj.md | 71 +++++++--- docs/content/sugarsync.md | 8 +- docs/content/swift.md | 10 +- docs/content/ulozto.md | 8 +- docs/content/union.md | 8 +- docs/content/uptobox.md | 8 +- docs/content/webdav.md | 8 +- docs/content/yandex.md | 10 +- docs/content/zoho.md | 10 +- lib/http/auth.go | 2 +- lib/http/server.go | 2 +- lib/transform/gen_help.go | 2 +- lib/transform/transform.md | 96 ++++++------- vfs/vfs.md | 8 +- 110 files changed, 782 insertions(+), 750 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4a75f8975..46fe8881b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -38,7 +38,7 @@ and [email](https://docs.github.com/en/github/setting-up-and-managing-your-githu Next open your terminal, change directory to your preferred folder and initialise your local rclone project: -```sh +```console git clone https://github.com/rclone/rclone.git cd rclone git remote rename origin upstream @@ -53,13 +53,13 @@ executed from the rclone folder created above. Now [install Go](https://golang.org/doc/install) and verify your installation: -```sh +```console go version ``` Great, you can now compile and execute your own version of rclone: -```sh +```console go build ./rclone version ``` @@ -68,7 +68,7 @@ go build more accurate version number in the executable as well as enable you to specify more build options.) Finally make a branch to add your new feature -```sh +```console git checkout -b my-new-feature ``` @@ -80,7 +80,7 @@ and a quick view on the rclone [code organisation](#code-organisation). When ready - test the affected functionality and run the unit tests for the code you changed -```sh +```console cd folder/with/changed/files go test -v ``` @@ -99,7 +99,7 @@ Make sure you When you are done with that push your changes to GitHub: -```sh +```console git push -u origin my-new-feature ``` @@ -119,7 +119,7 @@ or [squash your commits](#squashing-your-commits). Follow the guideline for [commit messages](#commit-messages) and then: -```sh +```console git checkout my-new-feature # To switch to your branch git status # To see the new and changed files git add FILENAME # To select FILENAME for the commit @@ -130,7 +130,7 @@ git log # To verify the commit. Use q to quit the log You can modify the message or changes in the latest commit using: -```sh +```console git commit --amend ``` @@ -145,7 +145,7 @@ pushed to GitHub. Your previously pushed commits are replaced by: -```sh +```console git push --force origin my-new-feature ``` @@ -154,7 +154,7 @@ git push --force origin my-new-feature To base your changes on the latest version of the [rclone master](https://github.com/rclone/rclone/tree/master) (upstream): -```sh +```console git checkout master git fetch upstream git merge --ff-only @@ -170,7 +170,7 @@ If you rebase commits that have been pushed to GitHub, then you will have to To combine your commits into one commit: -```sh +```console git log # To count the commits to squash, e.g. the last 2 git reset --soft HEAD~2 # To undo the 2 latest commits git status # To check everything is as expected @@ -178,13 +178,13 @@ git status # To check everything is as expected If everything is fine, then make the new combined commit: -```sh +```console git commit # To commit the undone commits as one ``` otherwise, you may roll back using: -```sh +```console git reflog # To check that HEAD{1} is your previous state git reset --soft 'HEAD@{1}' # To roll back to your previous state ``` @@ -219,13 +219,13 @@ to check an error return). rclone's tests are run from the go testing framework, so at the top level you can run this to run all the tests. -```sh +```console go test -v ./... ``` You can also use `make`, if supported by your platform -```sh +```console make quicktest ``` @@ -246,7 +246,7 @@ need to make a remote called `TestDrive`. You can then run the unit tests in the drive directory. These tests are skipped if `TestDrive:` isn't defined. -```sh +```console cd backend/drive go test -v ``` @@ -255,7 +255,7 @@ You can then run the integration tests which test all of rclone's operations. Normally these get run against the local file system, but they can be run against any of the remotes. -```sh +```console cd fs/sync go test -v -remote TestDrive: go test -v -remote TestDrive: -fast-list @@ -268,7 +268,7 @@ If you want to use the integration test framework to run these tests altogether with an HTML report and test retries then from the project root: -```sh +```console go install github.com/rclone/rclone/fstest/test_all test_all -backends drive ``` @@ -278,14 +278,14 @@ test_all -backends drive If you want to run all the integration tests against all the remotes, then change into the project root and run -```sh +```console make check make test ``` The commands may require some extra go packages which you can install with -```sh +```console make build_dep ``` @@ -478,7 +478,7 @@ To add a dependency `github.com/ncw/new_dependency` see the instructions below. These will fetch the dependency and add it to `go.mod` and `go.sum`. -```sh +```console go get github.com/ncw/new_dependency ``` @@ -492,7 +492,7 @@ and `go.sum` in the same commit as your other changes. If you need to update a dependency then run -```sh +```console go get golang.org/x/crypto ``` diff --git a/RELEASE.md b/RELEASE.md index 9ca71989d..4bc50f6f3 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -60,7 +60,7 @@ If `make updatedirect` added a `toolchain` directive then remove it. We don't want to force a toolchain on our users. Linux packagers are often using a version of Go that is a few versions out of date. -```sh +```console go list -m -f '{{if not (or .Main .Indirect)}}{{.Path}}{{end}}' all > /tmp/potential-upgrades go get -d $(cat /tmp/potential-upgrades) go mod tidy -go=1.22 -compat=1.22 @@ -70,7 +70,7 @@ If the `go mod tidy` fails use the output from it to remove the package which can't be upgraded from `/tmp/potential-upgrades` when done -```sh +```console git co go.mod go.sum ``` @@ -102,7 +102,7 @@ The above procedure will not upgrade major versions, so v2 to v3. However this tool can show which major versions might need to be upgraded: -```sh +```console go run github.com/icholy/gomajor@latest list -major ``` @@ -112,7 +112,7 @@ Expect API breakage when updating major versions. At some point after the release run -```sh +```console bin/tidy-beta v1.55 ``` @@ -159,7 +159,7 @@ which is a private repo containing artwork from sponsors. Create an update website branch based off the last release -```sh +```console git co -b update-website ``` @@ -167,19 +167,19 @@ If the branch already exists, double check there are no commits that need saving Now reset the branch to the last release -```sh +```console git reset --hard v1.64.0 ``` Create the changes, check them in, test with `make serve` then -```sh +```console make upload_test_website ``` Check out and when happy -```sh +```console make upload_website ``` @@ -189,14 +189,14 @@ Cherry pick any changes back to master and the stable branch if it is active. To do a basic build of rclone's docker image to debug builds locally: -```sh +```console docker buildx build --load -t rclone/rclone:testing --progress=plain . docker run --rm rclone/rclone:testing version ``` To test the multipatform build -```sh +```console docker buildx build -t rclone/rclone:testing --progress=plain --platform linux/amd64,linux/386,linux/arm64,linux/arm/v7,linux/arm/v6 . ``` @@ -204,6 +204,6 @@ To make a full build then set the tags correctly and add `--push` Note that you can't only build one architecture - you need to build them all. -```sh +```console docker buildx build --platform linux/amd64,linux/386,linux/arm64,linux/arm/v7,linux/arm/v6 -t rclone/rclone:1.54.1 -t rclone/rclone:1.54 -t rclone/rclone:1 -t rclone/rclone:latest --push . ``` diff --git a/cmd/backend/backend.go b/cmd/backend/backend.go index 1780dd36b..4c5591051 100644 --- a/cmd/backend/backend.go +++ b/cmd/backend/backend.go @@ -37,7 +37,7 @@ see the backend docs for definitions. You can discover what commands a backend implements by using -` + "```sh" + ` +` + "```console" + ` rclone backend help remote: rclone backend help ` + "```" + ` @@ -46,19 +46,19 @@ You can also discover information about the backend using (see [operations/fsinfo](/rc/#operations-fsinfo) in the remote control docs for more info). -` + "```sh" + ` +` + "```console" + ` rclone backend features remote: ` + "```" + ` Pass options to the backend command with -o. This should be key=value or key, e.g.: -` + "```sh" + ` +` + "```console" + ` rclone backend stats remote:path stats -o format=json -o long ` + "```" + ` Pass arguments to the backend by placing them on the end of the line -` + "```sh" + ` +` + "```console" + ` rclone backend cleanup remote:path file1 file2 file3 ` + "```" + ` diff --git a/cmd/copyto/copyto.go b/cmd/copyto/copyto.go index 85a533d00..ea38819e3 100644 --- a/cmd/copyto/copyto.go +++ b/cmd/copyto/copyto.go @@ -35,7 +35,7 @@ name. If the source is a directory then it acts exactly like the So -` + "```sh" + ` +` + "```console" + ` rclone copyto src dst ` + "```" + ` diff --git a/cmd/cryptcheck/cryptcheck.go b/cmd/cryptcheck/cryptcheck.go index 13360d512..164bd28c2 100644 --- a/cmd/cryptcheck/cryptcheck.go +++ b/cmd/cryptcheck/cryptcheck.go @@ -37,14 +37,14 @@ checksum of the file it has just encrypted. Use it like this -` + "```sh" + ` +` + "```console" + ` rclone cryptcheck /path/to/files encryptedremote:path ` + "```" + ` You can use it like this also, but that will involve downloading all the files in ` + "`remote:path`" + `. -` + "```sh" + ` +` + "```console" + ` rclone cryptcheck remote:path encryptedremote:path ` + "```" + ` diff --git a/cmd/cryptdecode/cryptdecode.go b/cmd/cryptdecode/cryptdecode.go index 56acf8fe6..133f01ec7 100644 --- a/cmd/cryptdecode/cryptdecode.go +++ b/cmd/cryptdecode/cryptdecode.go @@ -34,7 +34,7 @@ If you supply the ` + "`--reverse`" + ` flag, it will return encrypted file name use it like this -` + "```sh" + ` +` + "```console" + ` rclone cryptdecode encryptedremote: encryptedfilename1 encryptedfilename2 rclone cryptdecode --reverse encryptedremote: filename1 filename2 ` + "```" + ` diff --git a/cmd/dedupe/dedupe.go b/cmd/dedupe/dedupe.go index 9905004c3..6ab1546fa 100644 --- a/cmd/dedupe/dedupe.go +++ b/cmd/dedupe/dedupe.go @@ -68,7 +68,7 @@ Here is an example run. Before - with duplicates -` + "```sh" + ` +` + "```console" + ` $ rclone lsl drive:dupes 6048320 2016-03-05 16:23:16.798000000 one.txt 6048320 2016-03-05 16:23:11.775000000 one.txt @@ -81,7 +81,7 @@ $ rclone lsl drive:dupes Now the ` + "`dedupe`" + ` session -` + "```sh" + ` +` + "```console" + ` $ rclone dedupe drive:dupes 2016/03/05 16:24:37 Google drive root 'dupes': Looking for duplicates using interactive mode. one.txt: Found 4 files with duplicate names @@ -111,7 +111,7 @@ two-3.txt: renamed from: two.txt The result being -` + "```sh" + ` +` + "```console" + ` $ rclone lsl drive:dupes 6048320 2016-03-05 16:23:16.798000000 one.txt 564374 2016-03-05 16:22:52.118000000 two-1.txt @@ -135,13 +135,13 @@ or by using an extra parameter with the same value For example, to rename all the identically named photos in your Google Photos directory, do -` + "```sh" + ` +` + "```console" + ` rclone dedupe --dedupe-mode rename "drive:Google Photos" ` + "```" + ` Or -` + "```sh" + ` +` + "```console" + ` rclone dedupe rename "drive:Google Photos" ` + "```", Annotations: map[string]string{ diff --git a/cmd/genautocomplete/genautocomplete_bash.go b/cmd/genautocomplete/genautocomplete_bash.go index 28f9a1b59..7d1d26cd3 100644 --- a/cmd/genautocomplete/genautocomplete_bash.go +++ b/cmd/genautocomplete/genautocomplete_bash.go @@ -20,13 +20,13 @@ var bashCommandDefinition = &cobra.Command{ By default, when run without any arguments, -` + "```sh" + ` +` + "```console" + ` rclone completion bash ` + "```" + ` the generated script will be written to -` + "```sh" + ` +` + "```console" + ` /etc/bash_completion.d/rclone ` + "```" + ` @@ -43,7 +43,7 @@ can logout and login again to use the autocompletion script. Alternatively, you can source the script directly -` + "```sh" + ` +` + "```console" + ` . /path/to/my_bash_completion_scripts/rclone ` + "```" + ` diff --git a/cmd/genautocomplete/genautocomplete_fish.go b/cmd/genautocomplete/genautocomplete_fish.go index 20191b759..19d27a366 100644 --- a/cmd/genautocomplete/genautocomplete_fish.go +++ b/cmd/genautocomplete/genautocomplete_fish.go @@ -21,14 +21,14 @@ var fishCommandDefinition = &cobra.Command{ This writes to /etc/fish/completions/rclone.fish by default so will probably need to be run with sudo or as root, e.g. -` + "```sh" + ` +` + "```console" + ` sudo rclone completion fish ` + "```" + ` Logout and login again to use the autocompletion scripts, or source them directly -` + "```sh" + ` +` + "```console" + ` . /etc/fish/completions/rclone.fish ` + "```" + ` diff --git a/cmd/genautocomplete/genautocomplete_powershell.go b/cmd/genautocomplete/genautocomplete_powershell.go index 7ce27657f..19b254b39 100644 --- a/cmd/genautocomplete/genautocomplete_powershell.go +++ b/cmd/genautocomplete/genautocomplete_powershell.go @@ -20,7 +20,7 @@ var powershellCommandDefinition = &cobra.Command{ To load completions in your current shell session: -` + "```sh" + ` +` + "```console" + ` rclone completion powershell | Out-String | Invoke-Expression ` + "```" + ` diff --git a/cmd/genautocomplete/genautocomplete_zsh.go b/cmd/genautocomplete/genautocomplete_zsh.go index e60589083..3df4dc738 100644 --- a/cmd/genautocomplete/genautocomplete_zsh.go +++ b/cmd/genautocomplete/genautocomplete_zsh.go @@ -21,14 +21,14 @@ var zshCommandDefinition = &cobra.Command{ This writes to /usr/share/zsh/vendor-completions/_rclone by default so will probably need to be run with sudo or as root, e.g. -` + "```sh" + ` +` + "```console" + ` sudo rclone completion zsh ` + "```" + ` Logout and login again to use the autocompletion scripts, or source them directly -` + "```sh" + ` +` + "```console" + ` autoload -U compinit && compinit ` + "```" + ` diff --git a/cmd/gitannex/gitannex.md b/cmd/gitannex/gitannex.md index 6e3fd33e3..5612c40ac 100644 --- a/cmd/gitannex/gitannex.md +++ b/cmd/gitannex/gitannex.md @@ -11,11 +11,14 @@ users. name. This symlink helps git-annex tell rclone it wants to run the "gitannex" subcommand. - ```sh - # Create the helper symlink in "$HOME/bin". + Create the helper symlink in "$HOME/bin": + + ```console ln -s "$(realpath rclone)" "$HOME/bin/git-annex-remote-rclone-builtin" - # Verify the new symlink is on your PATH. + Verify the new symlink is on your PATH: + + ```console which git-annex-remote-rclone-builtin ``` @@ -27,11 +30,15 @@ users. Start by asking git-annex to describe the remote's available configuration parameters. - ```sh - # If you skipped step 1: - git annex initremote MyRemote type=rclone --whatelse + If you skipped step 1: - # If you created a symlink in step 1: + ```console + git annex initremote MyRemote type=rclone --whatelse + ``` + + If you created a symlink in step 1: + + ```console git annex initremote MyRemote type=external externaltype=rclone-builtin --whatelse ``` @@ -47,7 +54,7 @@ users. be one configured in your rclone.conf file, which can be located with `rclone config file`. - ```sh + ```console git annex initremote MyRemote \ type=external \ externaltype=rclone-builtin \ @@ -61,7 +68,7 @@ users. remote**. This command is very new and has not been tested on many rclone backends. Caveat emptor! - ```sh + ```console git annex testremote MyRemote ``` diff --git a/cmd/hashsum/hashsum.go b/cmd/hashsum/hashsum.go index 4d024f9d8..f0caca9cb 100644 --- a/cmd/hashsum/hashsum.go +++ b/cmd/hashsum/hashsum.go @@ -103,13 +103,13 @@ as a relative path). Run without a hash to see the list of all supported hashes, e.g. -` + "```sh" + ` +` + "```console" + ` $ rclone hashsum ` + hash.HelpString(0) + "```" + ` Then -` + "```sh" + ` +` + "```console" + ` rclone hashsum MD5 remote:path ` + "```" + ` diff --git a/cmd/link/link.go b/cmd/link/link.go index aa65d11b8..bb9f9bb9f 100644 --- a/cmd/link/link.go +++ b/cmd/link/link.go @@ -29,7 +29,7 @@ var commandDefinition = &cobra.Command{ Short: `Generate public link to file/folder.`, Long: `Create, retrieve or remove a public link to the given file or folder. -` + "```sh" + ` +` + "```console" + ` rclone link remote:path/to/file rclone link remote:path/to/folder/ rclone link --unlink remote:path/to/folder/ diff --git a/cmd/ls/ls.go b/cmd/ls/ls.go index b850762de..915d87de6 100644 --- a/cmd/ls/ls.go +++ b/cmd/ls/ls.go @@ -23,7 +23,7 @@ readable format with size and path. Recurses by default. E.g. -` + "```sh" + ` +` + "```console" + ` $ rclone ls swift:bucket 60295 bevajer5jef 90613 canole diff --git a/cmd/lsd/lsd.go b/cmd/lsd/lsd.go index 355a4a6f2..6a8c23189 100644 --- a/cmd/lsd/lsd.go +++ b/cmd/lsd/lsd.go @@ -34,7 +34,7 @@ not), the modification time (if known, the current time if not), the number of objects in the directory (if known, -1 if not) and the name of the directory, E.g. -` + "```sh" + ` +` + "```console" + ` $ rclone lsd swift: 494000 2018-04-26 08:43:20 10000 10000files 65 2018-04-26 08:43:20 1 1File @@ -42,7 +42,7 @@ $ rclone lsd swift: Or -` + "```sh" + ` +` + "```console" + ` $ rclone lsd drive:test -1 2016-10-17 17:41:53 -1 1000files -1 2017-01-03 14:40:54 -1 2500files diff --git a/cmd/lsf/lsf.go b/cmd/lsf/lsf.go index 38e79d62a..03a7d9f0c 100644 --- a/cmd/lsf/lsf.go +++ b/cmd/lsf/lsf.go @@ -54,7 +54,7 @@ one per line. The directories will have a / suffix. E.g. -` + "```sh" + ` +` + "```console" + ` $ rclone lsf swift:bucket bevajer5jef canole @@ -85,7 +85,7 @@ So if you wanted the path, size and modification time, you would use E.g. -` + "```sh" + ` +` + "```console" + ` $ rclone lsf --format "tsp" swift:bucket 2016-06-25 18:55:41;60295;bevajer5jef 2016-06-25 18:55:43;90613;canole @@ -103,13 +103,13 @@ type. For example, to emulate the md5sum command you can use -` + "```sh" + ` +` + "```console" + ` rclone lsf -R --hash MD5 --format hp --separator " " --files-only . ` + "```" + ` E.g. -` + "```sh" + ` +` + "```console" + ` $ rclone lsf -R --hash MD5 --format hp --separator " " --files-only swift:bucket 7908e352297f0f530b84a756f188baa3 bevajer5jef cd65ac234e6fea5925974a51cdd865cc canole @@ -126,7 +126,7 @@ putting it last is a good strategy. E.g. -` + "```sh" + ` +` + "```console" + ` $ rclone lsf --separator "," --format "tshp" swift:bucket 2016-06-25 18:55:41,60295,7908e352297f0f530b84a756f188baa3,bevajer5jef 2016-06-25 18:55:43,90613,cd65ac234e6fea5925974a51cdd865cc,canole @@ -140,7 +140,7 @@ if they contain, E.g. -` + "```sh" + ` +` + "```console" + ` $ rclone lsf --csv --files-only --format ps remote:path test.log,22355 test.sh,449 @@ -153,7 +153,7 @@ to pass to an rclone copy with the ` + "`--files-from-raw`" + ` flag. For example, to find all the files modified within one day and copy those only (without traversing the whole directory structure): -` + "```sh" + ` +` + "```console" + ` rclone lsf --absolute --files-only --max-age 1d /path/to/local > new_files rclone copy --files-from-raw new_files /path/to/local remote:path ` + "```" + ` @@ -162,7 +162,7 @@ The default time format is ` + "`'2006-01-02 15:04:05'`" + `. [Other formats](https://pkg.go.dev/time#pkg-constants) can be specified with the ` + "`--time-format`" + ` flag. Examples: -` + "```sh" + ` +` + "```console" + ` rclone lsf remote:path --format pt --time-format 'Jan 2, 2006 at 3:04pm (MST)' rclone lsf remote:path --format pt --time-format '2006-01-02 15:04:05.000000000' rclone lsf remote:path --format pt --time-format '2006-01-02T15:04:05.999999999Z07:00' diff --git a/cmd/lsl/lsl.go b/cmd/lsl/lsl.go index cabcef28b..3fe4875f0 100644 --- a/cmd/lsl/lsl.go +++ b/cmd/lsl/lsl.go @@ -23,7 +23,7 @@ readable format with modification time, size and path. Recurses by default. E.g. -` + "```sh" + ` +` + "```console" + ` $ rclone lsl swift:bucket 60295 2016-06-25 18:55:41.062626927 bevajer5jef 90613 2016-06-25 18:55:43.302607074 canole diff --git a/cmd/mountlib/mount.md b/cmd/mountlib/mount.md index 789adb30b..8d640d5c6 100644 --- a/cmd/mountlib/mount.md +++ b/cmd/mountlib/mount.md @@ -16,7 +16,7 @@ mount, waits until success or timeout and exits with appropriate code On Linux/macOS/FreeBSD start the mount like this, where `/path/to/local/mount` is an **empty** **existing** directory: -```sh +```console rclone @ remote:path/to/files /path/to/local/mount ``` @@ -32,7 +32,7 @@ and is not supported when [mounting as a network drive](#mounting-modes-on-windo and the last example will mount as network share `\\cloud\remote` and map it to an automatically assigned drive: -```sh +```console rclone @ remote:path/to/files * rclone @ remote:path/to/files X: rclone @ remote:path/to/files C:\path\parent\mount @@ -44,7 +44,7 @@ a SIGINT or SIGTERM signal, the mount should be automatically stopped. When running in background mode the user will have to stop the mount manually: -```sh +```console # Linux fusermount -u /path/to/local/mount #... or on some systems @@ -96,7 +96,7 @@ directory or drive. Using the special value `*` will tell rclone to automatically assign the next available drive letter, starting with Z: and moving backward. Examples: -```sh +```console rclone @ remote:path/to/files * rclone @ remote:path/to/files X: rclone @ remote:path/to/files C:\path\parent\mount @@ -111,7 +111,7 @@ to your @ command. Mounting to a directory path is not supported in this mode, it is a limitation Windows imposes on junctions, so the remote must always be mounted to a drive letter. -```sh +```console rclone @ remote:path/to/files X: --network-mode ``` @@ -129,7 +129,7 @@ volume label for the mapped drive, shown in Windows Explorer etc, while the comp If you specify a full network share UNC path with `--volname`, this will implicitly set the `--network-mode` option, so the following two examples have same result: -```sh +```console rclone @ remote:path/to/files X: --network-mode rclone @ remote:path/to/files X: --volname \\server\share ``` @@ -140,7 +140,7 @@ mountpoint, and instead use the UNC path specified as the volume name, as if it specified with the `--volname` option. This will also implicitly set the `--network-mode` option. This means the following two examples have same result: -```sh +```console rclone @ remote:path/to/files \\cloud\remote rclone @ remote:path/to/files * --volname \\cloud\remote ``` @@ -296,7 +296,7 @@ from the website, rclone will locate the macFUSE libraries without any further i If however, macFUSE is installed using the [macports](https://www.macports.org/) package manager, the following addition steps are required. -```sh +```console sudo mkdir /usr/local/lib cd /usr/local/lib sudo ln -s /opt/local/lib/libfuse.2.dylib @@ -424,7 +424,7 @@ rclone will detect it and translate command-line arguments appropriately. Now you can run classic mounts like this: -```sh +```console mount sftp1:subdir /mnt/data -t rclone -o vfs_cache_mode=writes,sftp_key_file=/path/to/pem ``` @@ -456,7 +456,7 @@ WantedBy=multi-user.target or add in `/etc/fstab` a line like -```sh +```console sftp1:subdir /mnt/data rclone rw,noauto,nofail,_netdev,x-systemd.automount,args2env,vfs_cache_mode=writes,config=/etc/rclone.conf,cache_dir=/var/cache/rclone 0 0 ``` diff --git a/cmd/mountlib/rc.go b/cmd/mountlib/rc.go index 67bba5efb..65af12d71 100644 --- a/cmd/mountlib/rc.go +++ b/cmd/mountlib/rc.go @@ -65,7 +65,7 @@ This takes the following parameters: Example: -` + "```sh" + ` +` + "```console" + ` rclone rc mount/mount fs=mydrive: mountPoint=/home//mountPoint rclone rc mount/mount fs=mydrive: mountPoint=/home//mountPoint mountType=mount rclone rc mount/mount fs=TestDrive: mountPoint=/mnt/tmp vfsOpt='{"CacheMode": 2}' mountOpt='{"AllowOther": true}' @@ -74,7 +74,7 @@ rclone rc mount/mount fs=TestDrive: mountPoint=/mnt/tmp vfsOpt='{"CacheMode": 2} The vfsOpt are as described in options/get and can be seen in the the "vfs" section when running and the mountOpt can be seen in the "mount" section: -` + "```sh" + ` +` + "```console" + ` rclone rc options/get ` + "```" + ` `, diff --git a/cmd/moveto/moveto.go b/cmd/moveto/moveto.go index dd68efa0a..b7a4d2254 100644 --- a/cmd/moveto/moveto.go +++ b/cmd/moveto/moveto.go @@ -35,7 +35,7 @@ like the [move](/commands/rclone_move/) command. So -` + "```sh" + ` +` + "```console" + ` rclone moveto src dst ` + "```" + ` diff --git a/cmd/obscure/obscure.go b/cmd/obscure/obscure.go index 07754f2de..531192b0e 100644 --- a/cmd/obscure/obscure.go +++ b/cmd/obscure/obscure.go @@ -33,7 +33,7 @@ This command can also accept a password through STDIN instead of an argument by passing a hyphen as an argument. This will use the first line of STDIN as the password not including the trailing newline. -` + "```sh" + ` +` + "```console" + ` echo "secretpassword" | rclone obscure - ` + "```" + ` diff --git a/cmd/rcat/rcat.go b/cmd/rcat/rcat.go index 29ecbc407..8cf963b42 100644 --- a/cmd/rcat/rcat.go +++ b/cmd/rcat/rcat.go @@ -28,7 +28,7 @@ var commandDefinition = &cobra.Command{ Short: `Copies standard input to file on remote.`, Long: `Reads from standard input (stdin) and copies it to a single remote file. -` + "```sh" + ` +` + "```console" + ` echo "hello world" | rclone rcat remote:path/to/file ffmpeg - | rclone rcat remote:path/to/file ` + "```" + ` diff --git a/cmd/serve/docker/docker.md b/cmd/serve/docker/docker.md index c16032f8a..c081d128f 100644 --- a/cmd/serve/docker/docker.md +++ b/cmd/serve/docker/docker.md @@ -9,7 +9,7 @@ Docker plugins can run as a managed plugin under control of the docker daemon or as an independent native service. For testing, you can just run it directly from the command line, for example: -```sh +```console sudo rclone serve docker --base-dir /tmp/rclone-volumes --socket-addr localhost:8787 -vv ``` diff --git a/cmd/serve/restic/restic.go b/cmd/serve/restic/restic.go index e295989c0..24af0db69 100644 --- a/cmd/serve/restic/restic.go +++ b/cmd/serve/restic/restic.go @@ -119,7 +119,7 @@ following instructions. Now start the rclone restic server -` + "```sh" + ` +` + "```console" + ` rclone serve restic -v remote:backup ` + "```" + ` @@ -149,7 +149,7 @@ the URL for the REST server. For example: -` + "```sh" + ` +` + "```console" + ` $ export RESTIC_REPOSITORY=rest:http://localhost:8080/ $ export RESTIC_PASSWORD=yourpassword $ restic init @@ -173,7 +173,7 @@ Note that you can use the endpoint to host multiple repositories. Do this by adding a directory name or path after the URL. Note that these **must** end with /. Eg -` + "```sh" + ` +` + "```console" + ` $ export RESTIC_REPOSITORY=rest:http://localhost:8080/user1repo/ # backup user1 stuff $ export RESTIC_REPOSITORY=rest:http://localhost:8080/user2repo/ diff --git a/cmd/serve/s3/serve_s3.md b/cmd/serve/s3/serve_s3.md index 7d3cffc13..7143da9ac 100644 --- a/cmd/serve/s3/serve_s3.md +++ b/cmd/serve/s3/serve_s3.md @@ -33,14 +33,14 @@ cause problems for S3 clients which rely on the Etag being the MD5. For a simple set up, to serve `remote:path` over s3, run the server like this: -```sh +```console rclone serve s3 --auth-key ACCESS_KEY_ID,SECRET_ACCESS_KEY remote:path ``` For example, to use a simple folder in the filesystem, run the server with a command like this: -```sh +```console rclone serve s3 --auth-key ACCESS_KEY_ID,SECRET_ACCESS_KEY local:/path/to/folder ``` diff --git a/cmd/serve/serve.go b/cmd/serve/serve.go index b2773ba9c..fcf3a445b 100644 --- a/cmd/serve/serve.go +++ b/cmd/serve/serve.go @@ -19,7 +19,7 @@ var Command = &cobra.Command{ Long: `Serve a remote over a given protocol. Requires the use of a subcommand to specify the protocol, e.g. -` + "```sh" + ` +` + "```console" + ` rclone serve http remote: ` + "```" + ` diff --git a/cmd/serve/sftp/sftp.go b/cmd/serve/sftp/sftp.go index a49d54ba6..ebd6a0de0 100644 --- a/cmd/serve/sftp/sftp.go +++ b/cmd/serve/sftp/sftp.go @@ -151,7 +151,7 @@ It can be configured with .socket and .service unit files as described in Socket activation can be tested ad-hoc with the ` + "`systemd-socket-activate`" + `command: -` + "```sh" + ` +` + "```console" + ` systemd-socket-activate -l 2222 -- rclone serve sftp :local:vfs/ ` + "```" + ` diff --git a/cmd/serve/webdav/webdav.go b/cmd/serve/webdav/webdav.go index 2183420e5..fbc64b7a4 100644 --- a/cmd/serve/webdav/webdav.go +++ b/cmd/serve/webdav/webdav.go @@ -157,13 +157,13 @@ Create a new DWORD BasicAuthLevel with value 2. You can serve the webdav on a unix socket like this: -` + "```sh" + ` +` + "```console" + ` rclone serve webdav --addr unix:///tmp/my.socket remote:path ` + "```" + ` and connect to it like this using rclone and the webdav backend: -` + "```sh" + ` +` + "```console" + ` rclone --webdav-unix-socket /tmp/my.socket --webdav-url http://localhost lsf :webdav: ` + "```" + ` diff --git a/cmd/settier/settier.go b/cmd/settier/settier.go index 97480f9fc..97bc48374 100644 --- a/cmd/settier/settier.go +++ b/cmd/settier/settier.go @@ -29,19 +29,19 @@ inaccessible.true You can use it to tier single object -` + "```sh" + ` +` + "```console" + ` rclone settier Cool remote:path/file ` + "```" + ` Or use rclone filters to set tier on only specific files -` + "```sh" + ` +` + "```console" + ` rclone --include "*.txt" settier Hot remote:path/dir ` + "```" + ` Or just provide remote directory and all files in directory will be tiered -` + "```sh" + ` +` + "```console" + ` rclone settier tier remote:path/dir ` + "```", Annotations: map[string]string{ diff --git a/cmd/test/test.go b/cmd/test/test.go index 95de40b1b..907c0aa7a 100644 --- a/cmd/test/test.go +++ b/cmd/test/test.go @@ -18,7 +18,7 @@ var Command = &cobra.Command{ Select which test command you want with the subcommand, eg -` + "```sh" + ` +` + "```console" + ` rclone test memory remote: ` + "```" + ` diff --git a/cmd/version/version.go b/cmd/version/version.go index e588df721..549fae027 100644 --- a/cmd/version/version.go +++ b/cmd/version/version.go @@ -42,7 +42,7 @@ build tags and the type of executable (static or dynamic). For example: -` + "```sh" + ` +` + "```console" + ` $ rclone version rclone v1.55.0 - os/version: ubuntu 18.04 (64 bit) @@ -60,7 +60,7 @@ Note: before rclone version 1.55 the os/type and os/arch lines were merged, If you supply the --check flag, then it will do an online check to compare your version with the latest release and the latest beta. -` + "```sh" + ` +` + "```console" + ` $ rclone version --check yours: 1.42.0.6 latest: 1.42 (released 2018-06-16) @@ -69,7 +69,7 @@ beta: 1.42.0.5 (released 2018-06-17) Or -` + "```sh" + ` +` + "```console" + ` $ rclone version --check yours: 1.41 latest: 1.42 (released 2018-06-16) diff --git a/docs/content/alias.md b/docs/content/alias.md index d9e855fca..24dfcfa11 100644 --- a/docs/content/alias.md +++ b/docs/content/alias.md @@ -34,7 +34,7 @@ can be used to only show the trashed files in `myDrive`. Here is an example of how to make an alias called `remote` for local folder. First run: -```sh +```console rclone config ``` @@ -87,19 +87,19 @@ Once configured you can then use `rclone` like this (replace `remote` with the n List directories in top level in `/mnt/storage/backup` -```sh +```console rclone lsd remote: ``` List all the files in `/mnt/storage/backup` -```sh +```console rclone ls remote: ``` Copy another local directory to the alias directory called source -```sh +```console rclone copy /home/source remote:source ``` diff --git a/docs/content/azureblob.md b/docs/content/azureblob.md index 0f5773eeb..08c576c23 100644 --- a/docs/content/azureblob.md +++ b/docs/content/azureblob.md @@ -15,7 +15,7 @@ command.) You may put subdirectories in too, e.g. Here is an example of making a Microsoft Azure Blob Storage configuration. For a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -57,26 +57,26 @@ y/e/d> y See all containers -```sh +```console rclone lsd remote: ``` Make a new container -```sh +```console rclone mkdir remote:container ``` List the contents of a container -```sh +```console rclone ls remote:container ``` Sync `/home/local/directory` to the remote container, deleting any excess files in the container. -```sh +```console rclone sync --interactive /home/local/directory remote:container ``` @@ -212,25 +212,25 @@ Credentials created with the `az` tool can be picked up using `env_auth`. For example if you were to login with a service principal like this: -```sh +```console az login --service-principal -u XXX -p XXX --tenant XXX ``` Then you could access rclone resources like this: -```sh +```console rclone lsf :azureblob,env_auth,account=ACCOUNT:CONTAINER ``` Or -```sh +```console rclone lsf --azureblob-env-auth --azureblob-account=ACCOUNT :azureblob:CONTAINER ``` Which is analogous to using the `az` tool: -```sh +```console az storage blob list --container-name CONTAINER --account-name ACCOUNT --auth-mode login ``` @@ -253,14 +253,14 @@ explorer in the Azure portal. If you use a container level SAS URL, rclone operations are permitted only on a particular container, e.g. -```sh +```console rclone ls azureblob:container ``` You can also list the single container from the root. This will only show the container specified by the SAS URL. -```sh +```console $ rclone lsd azureblob: container/ ``` @@ -268,7 +268,7 @@ container/ Note that you can't see or access any other containers - this will fail -```sh +```console rclone ls azureblob:othercontainer ``` @@ -364,7 +364,7 @@ Don't set `env_auth` at the same time. If you want to access resources with public anonymous access then set `account` only. You can do this without making an rclone config: -```sh +```console rclone lsf :azureblob,account=ACCOUNT:CONTAINER ``` diff --git a/docs/content/azurefiles.md b/docs/content/azurefiles.md index 64ea5695d..0af5a908f 100644 --- a/docs/content/azurefiles.md +++ b/docs/content/azurefiles.md @@ -14,7 +14,7 @@ e.g. `remote:path/to/dir`. Here is an example of making a Microsoft Azure Files Storage configuration. For a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -90,26 +90,26 @@ Once configured you can use rclone. See all files in the top level: -```sh +```console rclone lsf remote: ``` Make a new directory in the root: -```sh +```console rclone mkdir remote:dir ``` Recursively List the contents: -```sh +```console rclone ls remote: ``` Sync `/home/local/directory` to the remote directory, deleting any excess files in the directory. -```sh +```console rclone sync --interactive /home/local/directory remote:dir ``` @@ -238,19 +238,19 @@ Credentials created with the `az` tool can be picked up using `env_auth`. For example if you were to login with a service principal like this: -```sh +```console az login --service-principal -u XXX -p XXX --tenant XXX ``` Then you could access rclone resources like this: -```sh +```console rclone lsf :azurefiles,env_auth,account=ACCOUNT: ``` Or -```sh +```console rclone lsf --azurefiles-env-auth --azurefiles-account=ACCOUNT :azurefiles: ``` diff --git a/docs/content/b2.md b/docs/content/b2.md index 5ebe5dbf6..5efec4cc6 100644 --- a/docs/content/b2.md +++ b/docs/content/b2.md @@ -15,7 +15,7 @@ command.) You may put subdirectories in too, e.g. `remote:bucket/path/to/dir`. Here is an example of making a b2 configuration. First run -```sh +```console rclone config ``` @@ -62,19 +62,19 @@ This remote is called `remote` and can now be used like this See all buckets -```sh +```console rclone lsd remote: ``` Create a new bucket -```sh +```console rclone mkdir remote:bucket ``` List the contents of a bucket -```sh +```console rclone ls remote:bucket ``` @@ -82,7 +82,7 @@ rclone ls remote:bucket Sync `/home/local/directory` to the remote bucket, deleting any excess files in the bucket. -```sh +```console rclone sync --interactive /home/local/directory remote:bucket ``` @@ -230,7 +230,7 @@ version followed by a `cleanup` of the old versions. Show current version and all the versions with `--b2-versions` flag. -```sh +```console $ rclone -q ls b2:cleanup-test 9 one.txt @@ -243,7 +243,7 @@ $ rclone -q --b2-versions ls b2:cleanup-test Retrieve an old version -```sh +```console $ rclone -q --b2-versions copy b2:cleanup-test/one-v2016-07-04-141003-000.txt /tmp $ ls -l /tmp/one-v2016-07-04-141003-000.txt @@ -252,7 +252,7 @@ $ ls -l /tmp/one-v2016-07-04-141003-000.txt Clean up all the old versions and show that they've gone. -```sh +```console $ rclone -q cleanup b2:cleanup-test $ rclone -q ls b2:cleanup-test @@ -268,7 +268,7 @@ When using `--b2-versions` flag rclone is relying on the file name to work out whether the objects are versions or not. Versions' names are created by inserting timestamp between file name and its extension. -```sh +```console 9 file.txt 8 file-v2023-07-17-161032-000.txt 16 file-v2023-06-15-141003-000.txt @@ -322,14 +322,14 @@ rclone will show and act on older versions of files. For example Listing without `--b2-versions` -```sh +```console $ rclone -q ls b2:cleanup-test 9 one.txt ``` And with -```sh +```console $ rclone -q --b2-versions ls b2:cleanup-test 9 one.txt 8 one-v2016-07-04-141032-000.txt @@ -349,7 +349,7 @@ permitted, so you can't upload files or delete them. Rclone supports generating file share links for private B2 buckets. They can either be for a file for example: -```sh +```console ./rclone link B2:bucket/path/to/file.txt https://f002.backblazeb2.com/file/bucket/path/to/file.txt?Authorization=xxxxxxxx @@ -357,7 +357,7 @@ https://f002.backblazeb2.com/file/bucket/path/to/file.txt?Authorization=xxxxxxxx or if run on a directory you will get: -```sh +```console ./rclone link B2:bucket/path https://f002.backblazeb2.com/file/bucket/path?Authorization=xxxxxxxx ``` diff --git a/docs/content/bisync.md b/docs/content/bisync.md index a72bc8e24..85f9ea387 100644 --- a/docs/content/bisync.md +++ b/docs/content/bisync.md @@ -31,7 +31,7 @@ section) before using, or data loss can result. Questions can be asked in the For example, your first command might look like this: -```sh +```console rclone bisync remote1:path1 remote2:path2 --create-empty-src-dirs --compare size,modtime,checksum --slow-hash-sync-only --resilient -MvP --drive-skip-gdocs --fix-case --resync --dry-run ``` @@ -40,7 +40,7 @@ After that, remove `--resync` as well. Here is a typical run log (with timestamps removed for clarity): -```sh +```console rclone bisync /testdir/path1/ /testdir/path2/ --verbose INFO : Synching Path1 "/testdir/path1/" with Path2 "/testdir/path2/" INFO : Path1 checking for diffs @@ -86,7 +86,7 @@ INFO : Bisync successful ## Command line syntax -```sh +```console $ rclone bisync --help Usage: rclone bisync remote1:path1 remote2:path2 [flags] @@ -169,7 +169,7 @@ be copied to Path1, and the process will then copy the Path1 tree to Path2. The `--resync` sequence is roughly equivalent to the following (but see [`--resync-mode`](#resync-mode) for other options): -```sh +```console rclone copy Path2 Path1 --ignore-existing [--create-empty-src-dirs] rclone copy Path1 Path2 [--create-empty-src-dirs] ``` @@ -225,7 +225,7 @@ Shutdown](#graceful-shutdown) mode, when needed) for a very robust almost any interruption it might encounter. Consider adding something like the following: -```sh +```text --resilient --recover --max-lock 2m --conflict-resolve newer ``` @@ -353,13 +353,13 @@ simultaneously (or just `modtime` AND `checksum`). being `size`, `modtime`, and `checksum`. For example, if you want to compare size and checksum, but not modtime, you would do: -```sh +```text --compare size,checksum ``` Or if you want to compare all three: -```sh +```text --compare size,modtime,checksum ``` @@ -627,7 +627,7 @@ specified (or when two identical suffixes are specified.) i.e. with `--conflict-loser pathname`, all of the following would produce exactly the same result: -```sh +```text --conflict-suffix path --conflict-suffix path,path --conflict-suffix path1,path2 @@ -642,7 +642,7 @@ changed with the [`--suffix-keep-extension`](/docs/#suffix-keep-extension) flag curly braces as globs. This can be helpful to track the date and/or time that each conflict was handled by bisync. For example: -```sh +```text --conflict-suffix {DateOnly}-conflict // result: myfile.txt.2006-01-02-conflict1 ``` @@ -667,7 +667,7 @@ conflicts with `..path1` and `..path2` (with two periods, and `path` instead of additional dots can be added by including them in the specified suffix string. For example, for behavior equivalent to the previous default, use: -```sh +```text [--conflict-resolve none] --conflict-loser pathname --conflict-suffix .path ``` @@ -707,13 +707,13 @@ For example, a possible sequence could look like this: 1. Normally scheduled bisync run: - ```sh + ```console rclone bisync Path1 Path2 -MPc --check-access --max-delete 10 --filters-file /path/to/filters.txt -v --no-cleanup --ignore-listing-checksum --disable ListR --checkers=16 --drive-pacer-min-sleep=10ms --create-empty-src-dirs --resilient ``` 2. Periodic independent integrity check (perhaps scheduled nightly or weekly): - ```sh + ```console rclone check -MvPc Path1 Path2 --filter-from /path/to/filters.txt ``` @@ -721,7 +721,7 @@ For example, a possible sequence could look like this: If one side is more up-to-date and you want to make the other side match it, you could run: - ```sh + ```console rclone sync Path1 Path2 --filter-from /path/to/filters.txt --create-empty-src-dirs -MPc -v ``` @@ -851,7 +851,7 @@ override `--backup-dir`. Example: -```sh +```console rclone bisync /Users/someuser/some/local/path/Bisync gdrive:Bisync --backup-dir1 /Users/someuser/some/local/path/BackupDir --backup-dir2 gdrive:BackupDir --suffix -2023-08-26 --suffix-keep-extension --check-access --max-delete 10 --filters-file /Users/someuser/some/local/path/bisync_filters.txt --no-cleanup --ignore-listing-checksum --checkers=16 --drive-pacer-min-sleep=10ms --create-empty-src-dirs --resilient -MvP --drive-skip-gdocs --fix-case ``` @@ -1383,7 +1383,7 @@ listings and thus not checked during the check access phase. Here are two normal runs. The first one has a newer file on the remote. The second has no deltas between local and remote. -```sh +```text 2021/05/16 00:24:38 INFO : Synching Path1 "/path/to/local/tree/" with Path2 "dropbox:/" 2021/05/16 00:24:38 INFO : Path1 checking for diffs 2021/05/16 00:24:38 INFO : - Path1 File is new - file.txt @@ -1433,7 +1433,7 @@ numerous such messages in the log. Since there are no final error/warning messages on line *7*, rclone has recovered from failure after a retry, and the overall sync was successful. -```sh +```text 1: 2021/05/14 00:44:12 INFO : Synching Path1 "/path/to/local/tree" with Path2 "dropbox:" 2: 2021/05/14 00:44:12 INFO : Path1 checking for diffs 3: 2021/05/14 00:44:12 INFO : Path2 checking for diffs @@ -1446,7 +1446,7 @@ recovered from failure after a retry, and the overall sync was successful. This log shows a *Critical failure* which requires a `--resync` to recover from. See the [Runtime Error Handling](#error-handling) section. -```sh +```text 2021/05/12 00:49:40 INFO : Google drive root '': Waiting for checks to finish 2021/05/12 00:49:40 INFO : Google drive root '': Waiting for transfers to finish 2021/05/12 00:49:40 INFO : Google drive root '': not deleting files as there were IO errors @@ -1531,7 +1531,7 @@ on Linux you can use *Cron* which is described below. The 1st example runs a sync every 5 minutes between a local directory and an OwnCloud server, with output logged to a runlog file: -```sh +```text # Minute (0-59) # Hour (0-23) # Day of Month (1-31) @@ -1548,7 +1548,7 @@ If you run `rclone bisync` as a cron job, redirect stdout/stderr to a file. The 2nd example runs a sync to Dropbox every hour and logs all stdout (via the `>>`) and stderr (via `2>&1`) to a log file. -```sh +```text 0 * * * * /path/to/rclone bisync /path/to/local/dropbox Dropbox: --check-access --filters-file /home/user/filters.txt >> /path/to/logs/dropbox-run.log 2>&1 ``` @@ -1630,7 +1630,7 @@ Rerunning the test will let it pass. Consider such failures as noise. ### Test command syntax -```sh +```text usage: go test ./cmd/bisync [options...] Options: diff --git a/docs/content/box.md b/docs/content/box.md index 521962a9a..ac34880fe 100644 --- a/docs/content/box.md +++ b/docs/content/box.md @@ -18,7 +18,7 @@ to use JWT authentication. `rclone config` walks you through it. Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -96,13 +96,13 @@ Once configured you can then use `rclone` like this (replace `remote` with the n List directories in top level of your Box -```sh +```console rclone lsd remote: ``` List all the files in your Box -```sh +```console rclone ls remote: ``` @@ -144,7 +144,7 @@ did the authentication on. Here is how to do it. -```sh +```console $ rclone config Current remotes: diff --git a/docs/content/cache.md b/docs/content/cache.md index 79b6be29f..780c2094d 100644 --- a/docs/content/cache.md +++ b/docs/content/cache.md @@ -31,7 +31,7 @@ with `cache`. Here is an example of how to make a remote called `test-cache`. First run: -```sh +```console rclone config ``` @@ -117,19 +117,19 @@ You can then use it like this, List directories in top level of your drive -```sh +```console rclone lsd test-cache: ``` List all the files in your drive -```sh +```console rclone ls test-cache: ``` To start a cached mount -```sh +```console rclone mount --allow-other test-cache: /var/tmp/test-cache ``` diff --git a/docs/content/cloudinary.md b/docs/content/cloudinary.md index 8297d9817..bae3ece9e 100644 --- a/docs/content/cloudinary.md +++ b/docs/content/cloudinary.md @@ -38,7 +38,7 @@ from the developer section. Now run -```sh +```console rclone config ``` @@ -113,19 +113,19 @@ y/e/d> y List directories in the top level of your Media Library -```sh +```console rclone lsd cloudinary-media-library: ``` Make a new directory. -```sh +```console rclone mkdir cloudinary-media-library:directory ``` List the contents of a directory. -```sh +```console rclone ls cloudinary-media-library:directory ``` diff --git a/docs/content/combine.md b/docs/content/combine.md index 63a83ffbd..6816c31b8 100644 --- a/docs/content/combine.md +++ b/docs/content/combine.md @@ -11,7 +11,7 @@ tree. For example you might have a remote for images on one provider: -```sh +```console $ rclone tree s3:imagesbucket / ├── image1.jpg @@ -20,7 +20,7 @@ $ rclone tree s3:imagesbucket And a remote for files on another: -```sh +```console $ rclone tree drive:important/files / ├── file1.txt @@ -30,7 +30,7 @@ $ rclone tree drive:important/files The `combine` backend can join these together into a synthetic directory structure like this: -```sh +```console $ rclone tree combined: / ├── files @@ -57,7 +57,7 @@ either be a local paths or other remotes. Here is an example of how to make a combine called `remote` for the example above. First run: -```sh +```console rclone config ``` @@ -107,7 +107,7 @@ the shared drives you have access to. Assuming your main (non shared drive) Google drive remote is called `drive:` you would run -```sh +```console rclone backend -o config drives drive: ``` diff --git a/docs/content/crypt.md b/docs/content/crypt.md index 03e2564dc..e65b5b846 100644 --- a/docs/content/crypt.md +++ b/docs/content/crypt.md @@ -274,7 +274,7 @@ details, and a tool you can use to check if you are affected. Create the following file structure using "standard" file name encryption. -```sh +```text plaintext/ ├── file0.txt ├── file1.txt @@ -287,7 +287,7 @@ plaintext/ Copy these to the remote, and list them -```sh +```console $ rclone -q copy plaintext secret: $ rclone -q ls secret: 7 file1.txt @@ -299,7 +299,7 @@ $ rclone -q ls secret: The crypt remote looks like -```sh +```console $ rclone -q ls remote:path 55 hagjclgavj2mbiqm6u6cnjjqcg 54 v05749mltvv1tf4onltun46gls @@ -310,7 +310,7 @@ $ rclone -q ls remote:path The directory structure is preserved -```sh +```console $ rclone -q ls secret:subdir 8 file2.txt 9 file3.txt @@ -321,7 +321,7 @@ Without file name encryption `.bin` extensions are added to underlying names. This prevents the cloud provider attempting to interpret file content. -```sh +```console $ rclone -q ls remote:path 54 file0.txt.bin 57 subdir/file3.txt.bin diff --git a/docs/content/docker.md b/docs/content/docker.md index c63d74408..c8592c826 100644 --- a/docs/content/docker.md +++ b/docs/content/docker.md @@ -45,27 +45,27 @@ on the host. The *FUSE* driver is a prerequisite for rclone mounting and should be installed on host: -```sh +```console sudo apt-get -y install fuse3 ``` Create two directories required by rclone docker plugin: -```sh +```console sudo mkdir -p /var/lib/docker-plugins/rclone/config sudo mkdir -p /var/lib/docker-plugins/rclone/cache ``` Install the managed rclone docker plugin for your architecture (here `amd64`): -```sh +```console docker plugin install rclone/docker-volume-rclone:amd64 args="-v" --alias rclone --grant-all-permissions docker plugin list ``` Create your [SFTP volume](/sftp/#standard-options): -```sh +```console docker volume create firstvolume -d rclone -o type=sftp -o sftp-host=_hostname_ -o sftp-user=_username_ -o sftp-pass=_password_ -o allow-other=true ``` @@ -78,7 +78,7 @@ for example `-o path=/home/username`. Time to create a test container and mount the volume into it: -```sh +```console docker run --rm -it -v firstvolume:/mnt --workdir /mnt ubuntu:latest bash ``` @@ -88,7 +88,7 @@ or otherwise play with it. Type `exit` when you are done. The container will stop but the volume will stay, ready to be reused. When it's not needed anymore, remove it: -```sh +```console docker volume list docker volume remove firstvolume ``` @@ -145,7 +145,7 @@ volumes: and run the stack: -```sh +```console docker stack deploy example -c ./example.yml ``` @@ -155,7 +155,7 @@ run service containers on one or more cluster nodes and request the `example_configdata` volume from rclone plugins on the node hosts. You can use the following commands to confirm results: -```sh +```console docker service ls docker service ps example_heimdall docker volume ls @@ -173,7 +173,7 @@ the `docker volume remove example_configdata` command on every node. Volumes can be created with [docker volume create](https://docs.docker.com/engine/reference/commandline/volume_create/). Here are a few examples: -```sh +```console docker volume create vol1 -d rclone -o remote=storj: -o vfs-cache-mode=full docker volume create vol2 -d rclone -o remote=:storj,access_grant=xxx:heimdall docker volume create vol3 -d rclone -o type=storj -o path=heimdall -o storj-access-grant=xxx -o poll-interval=0 @@ -186,7 +186,7 @@ option. Volumes can be inspected as follows: -```sh +```console docker volume list docker volume inspect vol1 ``` @@ -210,13 +210,13 @@ The `remote=:backend:dir/subdir` syntax can be used to create while the `type` and `path` options provide a simpler alternative for this. Using two split options -```sh +```text -o type=backend -o path=dir/subdir ``` is equivalent to the combined syntax -```sh +```text -o remote=:backend:dir/subdir ``` @@ -262,13 +262,13 @@ Inside connection string the backend prefix must be dropped from parameter names but in the `-o param=value` array it must be present. For instance, compare the following option array -```sh +```text -o remote=:sftp:/home -o sftp-host=localhost ``` with equivalent connection string: -```sh +```text -o remote=:sftp,host=localhost:/home ``` @@ -345,7 +345,7 @@ By default they must exist on host at the following locations You can [install managed plugin](https://docs.docker.com/engine/reference/commandline/plugin_install/) with default settings as follows: -```sh +```console docker plugin install rclone/docker-volume-rclone:amd64 --grant-all-permissions --alias rclone ``` @@ -388,7 +388,7 @@ mount namespaces and bind-mounts into requesting user containers. You can tweak a few plugin settings after installation when it's disabled (not in use), for instance: -```sh +```console docker plugin disable rclone docker plugin set rclone RCLONE_VERBOSE=2 config=/etc/rclone args="--vfs-cache-mode=writes --allow-other" docker plugin enable rclone @@ -448,7 +448,7 @@ actual level assigned by rclone in the encapsulated message string. You can set custom plugin options right when you install it, *in one go*: -```sh +```console docker plugin remove rclone docker plugin install rclone/docker-volume-rclone:amd64 \ --alias rclone --grant-all-permissions \ @@ -486,7 +486,7 @@ You can just run it (type `rclone serve docker` and hit enter) for the test. Install *FUSE*: -```sh +```console sudo apt-get -y install fuse ``` @@ -496,7 +496,7 @@ and [docker-volume-rclone.socket](https://raw.githubusercontent.com/rclone/rclon Put them to the `/etc/systemd/system/` directory: -```sh +```console cp docker-volume-plugin.service /etc/systemd/system/ cp docker-volume-plugin.socket /etc/systemd/system/ ``` @@ -505,7 +505,7 @@ Please note that all commands in this section must be run as *root* but we omit `sudo` prefix for brevity. Now create directories required by the service: -```sh +```console mkdir -p /var/lib/docker-volumes/rclone mkdir -p /var/lib/docker-plugins/rclone/config mkdir -p /var/lib/docker-plugins/rclone/cache @@ -513,7 +513,7 @@ mkdir -p /var/lib/docker-plugins/rclone/cache Run the docker plugin service in the socket activated mode: -```sh +```console systemctl daemon-reload systemctl start docker-volume-rclone.service systemctl enable docker-volume-rclone.socket @@ -540,7 +540,7 @@ prefer socket activation. You can [see managed plugin settings](https://docs.docker.com/engine/extend/#debugging-plugins) with -```sh +```console docker plugin list docker plugin inspect rclone ``` @@ -555,20 +555,20 @@ but their actual level can be seen from encapsulated message string. You will usually install the latest version of managed plugin for your platform. Use the following commands to print the actual installed version: -```sh +```console PLUGID=$(docker plugin list --no-trunc | awk '/rclone/{print$1}') sudo runc --root /run/docker/runtime-runc/plugins.moby exec $PLUGID rclone version ``` You can even use `runc` to run shell inside the plugin container: -```sh +```console sudo runc --root /run/docker/runtime-runc/plugins.moby exec --tty $PLUGID bash ``` Also you can use curl to check the plugin socket connectivity: -```sh +```console docker plugin list --no-trunc PLUGID=123abc... sudo curl -H Content-Type:application/json -XPOST -d {} --unix-socket /run/docker/plugins/$PLUGID/rclone.sock http://localhost/Plugin.Activate @@ -582,7 +582,7 @@ diagnosing with the above methods, you can try clearing the state of the plugin. This might be needed because a reinstall don't cleanup existing state files to allow for easy restoration, as stated above. -```sh +```console docker plugin disable rclone # disable the plugin to ensure no interference sudo rm /var/lib/docker-plugins/rclone/cache/docker-plugin.state # removing the plugin state docker plugin enable rclone # re-enable the plugin afterward @@ -598,14 +598,14 @@ it won't even return an error. I hope that docker maintainers will fix this some day. In the meantime be aware that you must remove your volume before recreating it with new settings: -```sh +```console docker volume remove my_vol docker volume create my_vol -d rclone -o opt1=new_val1 ... ``` and verify that settings did update: -```sh +```console docker volume list docker volume inspect my_vol ``` diff --git a/docs/content/docs.md b/docs/content/docs.md index 3c178af79..f3cfb27bd 100644 --- a/docs/content/docs.md +++ b/docs/content/docs.md @@ -22,7 +22,7 @@ file and choose its location.) The easiest way to make the config is to run rclone with the config option: -```sh +```console rclone config ``` @@ -100,7 +100,7 @@ Rclone syncs a directory tree from one storage system to another. Its syntax is like this -```sh +```console rclone subcommand [options] ``` @@ -115,7 +115,7 @@ used before the `subcommand`. Anything after a `--` option will not be interpreted as an option so if you need to add a parameter which starts with a `-` then put a `--` on its own first, eg -```sh +```console rclone lsf -- -directory-starting-with-dash ``` @@ -136,7 +136,7 @@ learning rclone to avoid accidental data loss. rclone uses a system of subcommands. For example -```sh +```console rclone ls remote:path # lists a remote rclone copy /local/path remote:path # copies /local/path to the remote rclone sync --interactive /local/path remote:path # syncs /local/path to the remote @@ -192,7 +192,7 @@ directory` if it isn't. For example, suppose you have a remote with a file in called `test.jpg`, then you could copy just that file like this -```sh +```console rclone copy remote:test.jpg /tmp/download ``` @@ -200,13 +200,13 @@ The file `test.jpg` will be placed inside `/tmp/download`. This is equivalent to specifying -```sh +```console rclone copy --files-from /tmp/files remote: /tmp/download ``` Where `/tmp/files` contains the single line -```sh +```console test.jpg ``` @@ -252,25 +252,25 @@ the command line (or in environment variables). Here are some examples: -```sh +```console rclone lsd --http-url https://pub.rclone.org :http: ``` To list all the directories in the root of `https://pub.rclone.org/`. -```sh +```console rclone lsf --http-url https://example.com :http:path/to/dir ``` To list files and directories in `https://example.com/path/to/dir/` -```sh +```console rclone copy --http-url https://example.com :http:path/to/dir /tmp/dir ``` To copy files and directories in `https://example.com/path/to/dir` to `/tmp/dir`. -```sh +```console rclone copy --sftp-host example.com :sftp:path/to/dir /tmp/dir ``` @@ -284,7 +284,7 @@ syntax, so instead of providing the arguments as command line parameters `--http-url https://pub.rclone.org` they are provided as part of the remote specification as a kind of connection string. -```sh +```console rclone lsd ":http,url='https://pub.rclone.org':" rclone lsf ":http,url='https://example.com':path/to/dir" rclone copy ":http,url='https://example.com':path/to/dir" /tmp/dir @@ -295,7 +295,7 @@ These can apply to modify existing remotes as well as create new remotes with the on the fly syntax. This example is equivalent to adding the `--drive-shared-with-me` parameter to the remote `gdrive:`. -```sh +```console rclone lsf "gdrive,shared_with_me:path/to/dir" ``` @@ -306,13 +306,13 @@ file shared on google drive to the normal drive which **does not work** because the `--drive-shared-with-me` flag applies to both the source and the destination. -```sh +```console rclone copy --drive-shared-with-me gdrive:shared-file.txt gdrive: ``` However using the connection string syntax, this does work. -```sh +```console rclone copy "gdrive,shared_with_me:shared-file.txt" gdrive: ``` @@ -321,13 +321,13 @@ backend. If for example gdriveCrypt is a crypt based on gdrive, then the following command **will not work** as intended, because `shared_with_me` is ignored by the crypt backend: -```sh +```console rclone copy "gdriveCrypt,shared_with_me:shared-file.txt" gdriveCrypt: ``` The connection strings have the following syntax -```sh +```text remote,parameter=value,parameter2=value2:path/to/dir :backend,parameter=value,parameter2=value2:path/to/dir ``` @@ -335,7 +335,7 @@ remote,parameter=value,parameter2=value2:path/to/dir If the `parameter` has a `:` or `,` then it must be placed in quotes `"` or `'`, so -```sh +```text remote,parameter="colon:value",parameter2="comma,value":path/to/dir :backend,parameter='colon:value',parameter2='comma,value':path/to/dir ``` @@ -343,7 +343,7 @@ remote,parameter="colon:value",parameter2="comma,value":path/to/dir If a quoted value needs to include that quote, then it should be doubled, so -```sh +```text remote,parameter="with""quote",parameter2='with''quote':path/to/dir ``` @@ -354,13 +354,13 @@ If you leave off the `=parameter` then rclone will substitute `=true` which works very well with flags. For example, to use s3 configured in the environment you could use: -```sh +```console rclone lsd :s3,env_auth: ``` Which is equivalent to -```sh +```console rclone lsd :s3,env_auth=true: ``` @@ -372,7 +372,7 @@ If you are a shell master then you'll know which strings are OK and which aren't, but if you aren't sure then enclose them in `"` and use `'` as the inside quote. This syntax works on all OSes. -```sh +```console rclone copy ":http,url='https://example.com':path/to/dir" /tmp/dir ``` @@ -381,7 +381,7 @@ strings in the shell (notably `\` and `$` and `"`) so if your strings contain those you can swap the roles of `"` and `'` thus. (This syntax does not work on Windows.) -```sh +```console rclone copy ':http,url="https://example.com":path/to/dir' /tmp/dir ``` @@ -394,13 +394,13 @@ If you supply extra configuration to a backend by command line flag, environment variable or connection string then rclone will add a suffix based on the hash of the config to the name of the remote, eg -```sh +```console rclone -vv lsf --s3-chunk-size 20M s3: ``` Has the log message -```sh +```text DEBUG : s3: detected overridden config - adding "{Srj1p}" suffix to name ``` @@ -411,13 +411,13 @@ This should only be noticeable in the logs. This means that on the fly backends such as -```sh +```console rclone -vv lsf :s3,env_auth: ``` Will get their own names -```sh +```text DEBUG : :s3: detected overridden config - adding "{YTu53}" suffix to name ``` @@ -551,13 +551,13 @@ Here are some gotchas which may help users unfamiliar with the shell rules If your names have spaces or shell metacharacters (e.g. `*`, `?`, `$`, `'`, `"`, etc.) then you must quote them. Use single quotes `'` by default. -```sh +```console rclone copy 'Important files?' remote:backup ``` If you want to send a `'` you will need to use `"`, e.g. -```sh +```console rclone copy "O'Reilly Reviews" remote:backup ``` @@ -590,13 +590,13 @@ file or directory like this then use the full path starting with a So to sync a directory called `sync:me` to a remote called `remote:` use -```sh +```console rclone sync --interactive ./sync:me remote:path ``` or -```sh +```console rclone sync --interactive /full/path/to/sync:me remote:path ``` @@ -611,7 +611,7 @@ to copy them in place. Eg -```sh +```console rclone copy s3:oldbucket s3:newbucket ``` @@ -632,7 +632,7 @@ same. This can be used when scripting to make aged backups efficiently, e.g. -```sh +```console rclone sync --interactive remote:current-backup remote:previous-backup rclone sync --interactive /path/to/files remote:current-backup ``` @@ -872,7 +872,7 @@ excluded by a filter rule. For example -```sh +```console rclone sync --interactive /path/to/local remote:current --backup-dir remote:old ``` @@ -902,7 +902,7 @@ You can use `--bind 0.0.0.0` to force rclone to use IPv4 addresses and This option controls the bandwidth limit. For example -```sh +```text --bwlimit 10M ``` @@ -914,7 +914,7 @@ suffix B|K|M|G|T|P. The default is `0` which means to not limit bandwidth. The upload and download bandwidth can be specified separately, as `--bwlimit UP:DOWN`, so -```sh +```text --bwlimit 10M:100k ``` @@ -922,7 +922,7 @@ would mean limit the upload bandwidth to 10 MiB/s and the download bandwidth to 100 KiB/s. Either limit can be "off" meaning no limit, so to just limit the upload bandwidth you would use -```sh +```text --bwlimit 10M:off ``` @@ -979,13 +979,13 @@ be unlimited. Timeslots without `WEEKDAY` are extended to the whole week. So this example: -```sh +```text --bwlimit "Mon-00:00,512 12:00,1M Sun-20:00,off" ``` Is equivalent to this: -```sh +```text --bwlimit "Mon-00:00,512Mon-12:00,1M Tue-12:00,1M Wed-12:00,1M Thu-12:00,1M Fri-12:00,1M Sat-12:00,1M Sun-12:00,1M Sun-20:00,off" ``` @@ -1005,14 +1005,14 @@ of a long running rclone transfer and to restore it back to the value specified with `--bwlimit` quickly when needed. Assuming there is only one rclone instance running, you can toggle the limiter like this: -```sh +```console kill -SIGUSR2 $(pidof rclone) ``` If you configure rclone with a [remote control](/rc) then you can use change the bwlimit dynamically: -```sh +```console rclone rc core/bwlimit rate=1M ``` @@ -1023,7 +1023,7 @@ This option controls per file bandwidth limit. For the options see the For example use this to allow no transfers to be faster than 1 MiB/s -```sh +```text --bwlimit-file 1M ``` @@ -1313,7 +1313,7 @@ time rclone started up. This disables a comma separated list of optional features. For example to disable server-side move and server-side copy use: -```sh +```text --disable move,copy ``` @@ -1321,13 +1321,13 @@ The features can be put in any case. To see a list of which features can be disabled use: -```sh +```text --disable help ``` The features a remote has can be seen in JSON format with: -```sh +```console rclone backend features remote: ``` @@ -1367,7 +1367,7 @@ support ([RFC 8622](https://tools.ietf.org/html/rfc8622)). For example, if you configured QoS on router to handle LE properly. Running: -```sh +```console rclone copy --dscp LE from:/from to:/to ``` @@ -1459,7 +1459,7 @@ This flag is supported for all HTTP based backends even those not supported by `--header-upload` and `--header-download` so may be used as a workaround for those with care. -```sh +```console rclone ls remote:test --header "X-Rclone: Foo" --header "X-LetMeIn: Yes" ``` @@ -1468,7 +1468,7 @@ rclone ls remote:test --header "X-Rclone: Foo" --header "X-LetMeIn: Yes" Add an HTTP header for all download transactions. The flag can be repeated to add multiple headers. -```sh +```console rclone sync --interactive s3:test/src ~/dst --header-download "X-Amz-Meta-Test: Foo" --header-download "X-Amz-Meta-Test2: Bar" ``` @@ -1480,7 +1480,7 @@ currently supported backends. Add an HTTP header for all upload transactions. The flag can be repeated to add multiple headers. -```sh +```console rclone sync --interactive ~/src s3:test/dst --header-upload "Content-Disposition: attachment; filename='cool.html'" --header-upload "X-Amz-Meta-Test: FooBar" ``` @@ -1658,7 +1658,7 @@ especially with `rclone sync`. For example -```sh +```console $ rclone delete --interactive /tmp/dir rclone: delete "important-file.txt"? y) Yes, this is OK (default) @@ -1748,7 +1748,7 @@ ignored. For example if the following flags are in use -```sh +```console rclone --log-file rclone.log --log-file-max-size 1M --log-file-max-backups 3 ``` @@ -1843,7 +1843,7 @@ once as administrator to create the registry key in advance. severe) than or equal to the `--log-level`. For example to log DEBUG to a log file but ERRORs to the event log you would use -```sh +```text --log-file rclone.log --log-level DEBUG --windows-event-log ERROR ``` @@ -2074,7 +2074,7 @@ it in `"`, if you want a literal `"` in an argument then enclose the argument in `"` and double the `"`. See [CSV encoding](https://godoc.org/encoding/csv) for more info. -```sh +```text --metadata-mapper "python bin/test_metadata_mapper.py" --metadata-mapper 'python bin/test_metadata_mapper.py "argument with a space"' --metadata-mapper 'python bin/test_metadata_mapper.py "argument with ""two"" quotes"' @@ -2445,7 +2445,7 @@ for more info. Eg -```sh +```text --password-command "echo hello" --password-command 'echo "hello with space"' --password-command 'echo "hello with ""quotes"" and space"' @@ -2650,7 +2650,7 @@ or with `--backup-dir`. See `--backup-dir` for more info. For example -```sh +```console rclone copy --interactive /path/to/local/file remote:current --suffix .bak ``` @@ -2661,7 +2661,7 @@ If using `rclone sync` with `--suffix` and without `--backup-dir` then it is recommended to put a filter rule in excluding the suffix otherwise the `sync` will delete the backup files. -```sh +```console rclone sync --interactive /path/to/local/file remote:current --suffix .bak --exclude "*.bak" ``` @@ -3044,7 +3044,7 @@ have to supply the password every time you start rclone. To add a password to your rclone configuration, execute `rclone config`. -```sh +```console $ rclone config Current remotes: @@ -3058,7 +3058,7 @@ e/n/d/s/q> Go into `s`, Set configuration password: -```sh +```text e/n/d/s/q> s Your configuration is not encrypted. If you add a password, you will protect your login information to cloud services. @@ -3131,7 +3131,7 @@ environment variables. The script is supplied either via One useful example of this is using the `passwordstore` application to retrieve the password: -```sh +```console export RCLONE_PASSWORD_COMMAND="pass rclone/config" ``` @@ -3177,13 +3177,13 @@ at rest or transfer. Detailed instructions for popular OSes: - Generate and store a password - ```sh + ```console security add-generic-password -a rclone -s config -w $(openssl rand -base64 40) ``` - Add the retrieval instruction to your `.zprofile` / `.profile` - ```sh + ```console export RCLONE_PASSWORD_COMMAND="/usr/bin/security find-generic-password -a rclone -s config -w" ``` @@ -3196,13 +3196,13 @@ at rest or transfer. Detailed instructions for popular OSes: - Generate and store a password - ```sh + ```console echo $(openssl rand -base64 40) | pass insert -m rclone/config ``` - Add the retrieval instruction - ```sh + ```console export RCLONE_PASSWORD_COMMAND="/usr/bin/pass rclone/config" ``` @@ -3462,7 +3462,7 @@ so it can only contain letters, digits, or the `_` (underscore) character. For example, to configure an S3 remote named `mys3:` without a config file (using unix ways of setting environment variables): -```sh +```console $ export RCLONE_CONFIG_MYS3_TYPE=s3 $ export RCLONE_CONFIG_MYS3_ACCESS_KEY_ID=XXX $ export RCLONE_CONFIG_MYS3_SECRET_ACCESS_KEY=XXX @@ -3482,7 +3482,7 @@ You must write the name in uppercase in the environment variable, but as seen from example above it will be listed and can be accessed in lowercase, while you can also refer to the same remote in uppercase: -```sh +```console $ rclone lsd mys3: -1 2016-09-21 12:54:21 -1 my-bucket $ rclone lsd MYS3: @@ -3497,7 +3497,7 @@ set the access key of all remotes using S3, including myS3Crypt. Note also that now rclone has [connection strings](#connection-strings), it is probably easier to use those instead which makes the above example -```sh +```console rclone lsd :s3,access_key_id=XXX,secret_access_key=XXX: ``` diff --git a/docs/content/doi.md b/docs/content/doi.md index 75fb8fd7b..b4318f085 100644 --- a/docs/content/doi.md +++ b/docs/content/doi.md @@ -27,7 +27,7 @@ Paths may be as deep as required, e.g. `remote:directory/subdirectory`. Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` diff --git a/docs/content/downloads.md b/docs/content/downloads.md index 0d28ae305..32dd0c055 100644 --- a/docs/content/downloads.md +++ b/docs/content/downloads.md @@ -56,13 +56,13 @@ signatures on the release. To install rclone on Linux/macOS/BSD systems, run: -```sh +```console sudo -v ; curl https://rclone.org/install.sh | sudo bash ``` For beta installation, run: -```sh +```console sudo -v ; curl https://rclone.org/install.sh | sudo bash -s beta ``` diff --git a/docs/content/drive.md b/docs/content/drive.md index 5b3579635..5153f89f5 100644 --- a/docs/content/drive.md +++ b/docs/content/drive.md @@ -18,7 +18,7 @@ through it. Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -110,19 +110,19 @@ You can then use it like this, List directories in top level of your drive -```sh +```console rclone lsd remote: ``` List all the files in your drive -```sh +```console rclone ls remote: ``` To copy a local directory to a drive directory called backup -```sh +```console rclone copy /home/source remote:backup ``` @@ -270,7 +270,7 @@ account key" button. ##### 3. Configure rclone, assuming a new install -```sh +```text rclone config n/s/q> n # New @@ -375,7 +375,7 @@ It will use the `--checkers` value to specify the number of requests to run in In tests, these batch requests were up to 20x faster than the regular method. Running the following command against different sized folders gives: -```sh +```console rclone lsjson -vv -R --checkers=6 gdrive:folder ``` diff --git a/docs/content/dropbox.md b/docs/content/dropbox.md index 327c68b8e..c01bc2892 100644 --- a/docs/content/dropbox.md +++ b/docs/content/dropbox.md @@ -19,7 +19,7 @@ through it. Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -73,19 +73,19 @@ You can then use it like this, List directories in top level of your dropbox -```sh +```console rclone lsd remote: ``` List all the files in your dropbox -```sh +```console rclone ls remote: ``` To copy a local directory to a dropbox directory called backup -```sh +```console rclone copy /home/source remote:backup ``` diff --git a/docs/content/faq.md b/docs/content/faq.md index e2cb94ac2..be3e43ff5 100644 --- a/docs/content/faq.md +++ b/docs/content/faq.md @@ -33,7 +33,7 @@ If you need to configure a remote, see the [config help docs](/docs/#configure). If you are using rclone entirely with [on the fly remotes](/docs/#backend-path-to-dir), you can create an empty config file to get rid of this notice, for example: -```sh +```console rclone config touch ``` @@ -48,7 +48,7 @@ The syncs would be incremental (on a file by file basis). e.g. -```sh +```console rclone sync --interactive drive:Folder s3:bucket ``` @@ -57,7 +57,7 @@ rclone sync --interactive drive:Folder s3:bucket You can use rclone from multiple places at the same time if you choose different subdirectory for the output, e.g. -```sh +```console Server A> rclone sync --interactive /tmp/whatever remote:ServerA Server B> rclone sync --interactive /tmp/whatever remote:ServerB ``` @@ -65,7 +65,7 @@ Server B> rclone sync --interactive /tmp/whatever remote:ServerB If you sync to the same directory then you should use rclone copy otherwise the two instances of rclone may delete each other's files, e.g. -```sh +```console Server A> rclone copy /tmp/whatever remote:Backup Server B> rclone copy /tmp/whatever remote:Backup ``` @@ -119,7 +119,7 @@ may use `http_proxy` but another one `HTTP_PROXY`. The `Go` libraries used by `rclone` will try both variations, but you may wish to set all possibilities. So, on Linux, you may end up with code similar to -```sh +```console export http_proxy=http://proxyserver:12345 export https_proxy=$http_proxy export HTTP_PROXY=$http_proxy @@ -128,7 +128,7 @@ export HTTPS_PROXY=$http_proxy Note: If the proxy server requires a username and password, then use -```sh +```console export http_proxy=http://username:password@proxyserver:12345 export https_proxy=$http_proxy export HTTP_PROXY=$http_proxy @@ -141,7 +141,7 @@ For instance "foo.com" also matches "bar.foo.com". e.g. -```sh +```console export no_proxy=localhost,127.0.0.0/8,my.host.name export NO_PROXY=$no_proxy ``` @@ -170,7 +170,7 @@ where `rclone` can't verify the server with the SSL root certificates. Rclone (via the Go runtime) tries to load the root certificates from these places on Linux. -```sh +```text "/etc/ssl/certs/ca-certificates.crt", // Debian/Ubuntu/Gentoo etc. "/etc/pki/tls/certs/ca-bundle.crt", // Fedora/RHEL "/etc/ssl/ca-bundle.pem", // OpenSUSE @@ -180,7 +180,7 @@ these places on Linux. So doing something like this should fix the problem. It also sets the time which is important for SSL to work properly. -```sh +```console mkdir -p /etc/ssl/certs/ curl -o /etc/ssl/certs/ca-certificates.crt https://raw.githubusercontent.com/bagder/ca-bundle/master/ca-bundle.crt ntpclient -s -h pool.ntp.org @@ -193,7 +193,7 @@ provide the SSL root certificates on Unix systems other than macOS. Note that you may need to add the `--insecure` option to the `curl` command line if it doesn't work without. -```sh +```console curl --insecure -o /etc/ssl/certs/ca-certificates.crt https://raw.githubusercontent.com/bagder/ca-bundle/master/ca-bundle.crt ``` @@ -202,7 +202,7 @@ On macOS, you can install Homebrew, and specify the SSL root certificates with the [--ca-cert](/docs/#ca-cert-stringarray) flag. -```sh +```console brew install ca-certificates find $(brew --prefix)/etc/ca-certificates -type f ``` diff --git a/docs/content/fichier.md b/docs/content/fichier.md index 7701de80b..51dc084db 100644 --- a/docs/content/fichier.md +++ b/docs/content/fichier.md @@ -21,7 +21,7 @@ which you need to do in your browser. Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -68,19 +68,19 @@ Once configured you can then use `rclone` like this (replace `remote` with the n List directories in top level of your 1Fichier account -```sh +```console rclone lsd remote: ``` List all the files in your 1Fichier account -```sh +```console rclone ls remote: ``` To copy a local directory to a 1Fichier directory called backup -```sh +```console rclone copy /home/source remote:backup ``` diff --git a/docs/content/filefabric.md b/docs/content/filefabric.md index a2ac93144..e27d30f1a 100644 --- a/docs/content/filefabric.md +++ b/docs/content/filefabric.md @@ -19,7 +19,7 @@ do in your browser. `rclone config` walks you through it. Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -93,19 +93,19 @@ Once configured you can then use `rclone` like this (replace `remote` with the n List directories in top level of your Enterprise File Fabric -```sh +```console rclone lsd remote: ``` List all the files in your Enterprise File Fabric -```sh +```console rclone ls remote: ``` To copy a local directory to an Enterprise File Fabric directory called backup -```sh +```console rclone copy /home/source remote:backup ``` @@ -148,7 +148,7 @@ In order to do this you will have to find the `Folder ID` of the directory you wish rclone to display. These aren't displayed in the web interface, but you can use `rclone lsf` to find them, for example -```sh +```console $ rclone lsf --dirs-only -Fip --csv filefabric: 120673758,Burnt PDFs/ 120673759,My Quick Uploads/ diff --git a/docs/content/filelu.md b/docs/content/filelu.md index 546b68420..4482d3296 100644 --- a/docs/content/filelu.md +++ b/docs/content/filelu.md @@ -18,7 +18,7 @@ device. Here is an example of how to make a remote called `filelu`. First, run: -```sh +```console rclone config ``` @@ -56,7 +56,7 @@ A path without an initial `/` will operate in the `Rclone` directory. A path with an initial `/` will operate at the root where you can see the `Rclone` directory. -```sh +```console $ rclone lsf TestFileLu:/ CCTV/ Camera/ @@ -72,79 +72,79 @@ Videos/ Create a new folder named `foldername` in the `Rclone` directory: -```sh +```console rclone mkdir filelu:foldername ``` Delete a folder on FileLu: -```sh +```console rclone rmdir filelu:/folder/path/ ``` Delete a file on FileLu: -```sh +```console rclone delete filelu:/hello.txt ``` List files from your FileLu account: -```sh +```console rclone ls filelu: ``` List all folders: -```sh +```console rclone lsd filelu: ``` Copy a specific file to the FileLu root: -```sh +```console rclone copy D:\hello.txt filelu: ``` Copy files from a local directory to a FileLu directory: -```sh +```console rclone copy D:/local-folder filelu:/remote-folder/path/ ``` Download a file from FileLu into a local directory: -```sh +```console rclone copy filelu:/file-path/hello.txt D:/local-folder ``` Move files from a local directory to a FileLu directory: -```sh +```console rclone move D:\local-folder filelu:/remote-path/ ``` Sync files from a local directory to a FileLu directory: -```sh +```console rclone sync --interactive D:/local-folder filelu:/remote-path/ ``` Mount remote to local Linux: -```sh +```console rclone mount filelu: /root/mnt --vfs-cache-mode full ``` Mount remote to local Windows: -```sh +```console rclone mount filelu: D:/local_mnt --vfs-cache-mode full ``` Get storage info about the FileLu account: -```sh +```console rclone about filelu: ``` diff --git a/docs/content/filescom.md b/docs/content/filescom.md index 56d1a2e89..ca3eabea6 100644 --- a/docs/content/filescom.md +++ b/docs/content/filescom.md @@ -19,7 +19,7 @@ password. Alternatively, you can authenticate using an API Key from Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -88,26 +88,26 @@ Once configured you can use rclone. See all files in the top level: -```sh +```console rclone lsf remote: ``` Make a new directory in the root: -```sh +```console rclone mkdir remote:dir ``` Recursively List the contents: -```sh +```console rclone ls remote: ``` Sync `/home/local/directory` to the remote directory, deleting any excess files in the directory. -```sh +```console rclone sync --interactive /home/local/directory remote:dir ``` diff --git a/docs/content/filtering.md b/docs/content/filtering.md index 319dccf5b..8c587c968 100644 --- a/docs/content/filtering.md +++ b/docs/content/filtering.md @@ -468,14 +468,14 @@ E.g. `rclone ls remote: --include "*.{png,jpg}"` lists the files on E.g. multiple rclone copy commands can be combined with `--include` and a pattern-list. -```sh +```console rclone copy /vol1/A remote:A rclone copy /vol1/B remote:B ``` is equivalent to: -```sh +```console rclone copy /vol1 remote: --include "{A,B}/**" ``` @@ -677,7 +677,7 @@ user2/prefect Then copy these to a remote: -```sh +```console rclone copy --files-from files-from.txt /home remote:backup ``` @@ -699,7 +699,7 @@ Alternatively if `/` is chosen as root `files-from.txt` will be: The copy command will be: -```sh +```console rclone copy --files-from files-from.txt / remote:backup ``` @@ -805,7 +805,7 @@ useful for: The flag takes two parameters expressed as a fraction: -```sh +```text --hash-filter K/N ``` @@ -824,7 +824,7 @@ Each partition is non-overlapping, ensuring all files are covered without duplic Use `@` as `K` to randomly select a partition: -```sh +```text --hash-filter @/M ``` @@ -854,7 +854,7 @@ This will stay constant across retries. Assuming the current directory contains `file1.jpg` through `file9.jpg`: -```sh +```console $ rclone lsf --hash-filter 0/4 . file1.jpg file5.jpg @@ -879,13 +879,13 @@ file5.jpg ##### Syncing the first quarter of files -```sh +```console rclone sync --hash-filter 1/4 source:path destination:path ``` ##### Checking a random 1% of files for integrity -```sh +```console rclone check --download --hash-filter @/100 source:path destination:path ``` @@ -901,7 +901,7 @@ on the destination which are excluded from the command. E.g. the scope of `rclone sync --interactive A: B:` can be restricted: -```sh +```console rclone --min-size 50k --delete-excluded sync A: B: ``` @@ -950,13 +950,13 @@ expressions](#regexp). For example if you wished to list only local files with a mode of `100664` you could do that with: -```sh +```console rclone lsf -M --files-only --metadata-include "mode=100664" . ``` Or if you wished to show files with an `atime`, `mtime` or `btime` at a given date: -```sh +```console rclone lsf -M --files-only --metadata-include "[abm]time=2022-12-16*" . ``` diff --git a/docs/content/ftp.md b/docs/content/ftp.md index 638ec2f9e..137b82fb7 100644 --- a/docs/content/ftp.md +++ b/docs/content/ftp.md @@ -20,7 +20,7 @@ a `/` it is relative to the home directory of the user. An empty path To create an FTP configuration named `remote`, run -```sh +```console rclone config ``` @@ -88,26 +88,26 @@ y/e/d> y To see all directories in the home directory of `remote` -```sh +```console rclone lsd remote: ``` Make a new directory -```sh +```console rclone mkdir remote:path/to/directory ``` List the contents of a directory -```sh +```console rclone ls remote:path/to/directory ``` Sync `/home/local/directory` to the remote directory, deleting any excess files in the directory. -```sh +```console rclone sync --interactive /home/local/directory remote:directory ``` @@ -124,7 +124,7 @@ Using [on-the-fly](#backend-path-to-dir) or such servers, without requiring any configuration in advance. The following are examples of that: -```sh +```console rclone lsf :ftp: --ftp-host=speedtest.tele2.net --ftp-user=anonymous --ftp-pass=$(rclone obscure dummy) rclone lsf :ftp,host=speedtest.tele2.net,user=anonymous,pass=$(rclone obscure dummy): ``` @@ -136,7 +136,7 @@ command to create a password string in the format required by the an already obscured string representation of the same password "dummy", and therefore works even in Windows Command Prompt: -```sh +```console rclone lsf :ftp: --ftp-host=speedtest.tele2.net --ftp-user=anonymous --ftp-pass=IXs2wc8OJOz7SYLBk47Ji1rHTmxM rclone lsf :ftp,host=speedtest.tele2.net,user=anonymous,pass=IXs2wc8OJOz7SYLBk47Ji1rHTmxM: ``` diff --git a/docs/content/gofile.md b/docs/content/gofile.md index 6d37101c7..2594e8b18 100644 --- a/docs/content/gofile.md +++ b/docs/content/gofile.md @@ -21,7 +21,7 @@ premium account. Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -70,13 +70,13 @@ Once configured you can then use `rclone` like this (replace `remote` with the n List directories and files in the top level of your Gofile -```sh +```console rclone lsf remote: ``` To copy a local directory to an Gofile directory called backup -```sh +```console rclone copy /home/source remote:backup ``` @@ -139,7 +139,7 @@ directory you wish rclone to display. You can do this with rclone -```sh +```console $ rclone lsf -Fip --dirs-only remote: d6341f53-ee65-4f29-9f59-d11e8070b2a0;Files/ f4f5c9b8-6ece-478b-b03e-4538edfe5a1c;Photos/ diff --git a/docs/content/googlecloudstorage.md b/docs/content/googlecloudstorage.md index fbd47f42a..f3eef23cd 100644 --- a/docs/content/googlecloudstorage.md +++ b/docs/content/googlecloudstorage.md @@ -17,7 +17,7 @@ through it. Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -163,26 +163,26 @@ This remote is called `remote` and can now be used like this See all the buckets in your project -```sh +```console rclone lsd remote: ``` Make a new bucket -```sh +```console rclone mkdir remote:bucket ``` List the contents of a bucket -```sh +```console rclone ls remote:bucket ``` Sync `/home/local/directory` to the remote bucket, deleting any excess files in the bucket. -```sh +```console rclone sync --interactive /home/local/directory remote:bucket ``` @@ -224,7 +224,7 @@ If you already have a working service account, skip to step 3. #### 1. Create a service account using -```sh +```console gcloud iam service-accounts create gcs-read-only ``` @@ -232,7 +232,7 @@ You can re-use an existing service account as well (like the one created above) #### 2. Attach a Viewer (read-only) or User (read-write) role to the service account -```sh +```console $ PROJECT_ID=my-project $ gcloud --verbose iam service-accounts add-iam-policy-binding \ gcs-read-only@${PROJECT_ID}.iam.gserviceaccount.com \ @@ -249,7 +249,7 @@ pre-defined roles: #### 3. Get a temporary access key for the service account -```sh +```console $ gcloud auth application-default print-access-token \ --impersonate-service-account \ gcs-read-only@${PROJECT_ID}.iam.gserviceaccount.com @@ -262,13 +262,13 @@ ya29.c.c0ASRK0GbAFEewXD [truncated] hit `CTRL-C` when you see *waiting for code*. This will save the config without doing oauth flow -```sh +```console rclone config update ${REMOTE_NAME} access_token ya29.c.c0Axxxx ``` #### 5. Run rclone as usual -```sh +```console rclone ls dev-gcs:${MY_BUCKET}/ ``` diff --git a/docs/content/googlephotos.md b/docs/content/googlephotos.md index 2b87b235a..a440c84c6 100644 --- a/docs/content/googlephotos.md +++ b/docs/content/googlephotos.md @@ -27,7 +27,7 @@ through it. Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -111,26 +111,26 @@ This remote is called `remote` and can now be used like this See all the albums in your photos -```sh +```console rclone lsd remote:album ``` Make a new album -```sh +```console rclone mkdir remote:album/newAlbum ``` List the contents of an album -```sh +```console rclone ls remote:album/newAlbum ``` Sync `/home/local/images` to the Google Photos, removing any excess files in the album. -```sh +```console rclone sync --interactive /home/local/image remote:album/newAlbum ``` @@ -213,7 +213,7 @@ may create new directories (albums) under `album`. If you copy files with a directory hierarchy in there then rclone will create albums with the `/` character in them. For example if you do -```sh +```console rclone copy /path/to/images remote:album/images ``` diff --git a/docs/content/gui.md b/docs/content/gui.md index d0c72beb8..65abc63a7 100644 --- a/docs/content/gui.md +++ b/docs/content/gui.md @@ -13,7 +13,7 @@ change. Run this command in a terminal and rclone will download and then display the GUI in a web browser. -```sh +```console rclone rcd --rc-web-gui ``` diff --git a/docs/content/hasher.md b/docs/content/hasher.md index 2d483d67f..ccfc141e1 100644 --- a/docs/content/hasher.md +++ b/docs/content/hasher.md @@ -115,9 +115,8 @@ Now you can use it as `Hasher2:subdir/file` instead of base remote. Hasher will transparently update cache with new checksums when a file is fully read or overwritten, like: -```sh +```console rclone copy External:path/file Hasher:dest/path - rclone cat Hasher:path/to/file > /dev/null ``` @@ -126,17 +125,15 @@ for a subtree is to **re-download** all files in the subtree. For example, use `hashsum --download` using **any** supported hashsum on the command line (we just care to re-read): -```sh +```console rclone hashsum MD5 --download Hasher:path/to/subtree > /dev/null - rclone backend dump Hasher:path/to/subtree ``` You can print or drop hashsum cache using custom backend commands: -```sh +```console rclone backend dump Hasher:dir/subdir - rclone backend drop Hasher: ``` @@ -145,7 +142,7 @@ rclone backend drop Hasher: Hasher supports two backend commands: generic SUM file `import` and faster but less consistent `stickyimport`. -```sh +```console rclone backend import Hasher:dir/subdir SHA1 /path/to/SHA1SUM [--checkers 4] ``` @@ -165,7 +162,7 @@ correspondingly. `--checkers` to make it faster. Or use `stickyimport` if you don't care about fingerprints and consistency. -```sh +```console rclone backend stickyimport hasher:path/to/data sha1 remote:/path/to/sum.sha1 ``` diff --git a/docs/content/hdfs.md b/docs/content/hdfs.md index 005228c16..70c4d505e 100644 --- a/docs/content/hdfs.md +++ b/docs/content/hdfs.md @@ -16,7 +16,7 @@ Paths are specified as `remote:` or `remote:path/to/dir`. Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -86,19 +86,19 @@ This remote is called `remote` and can now be used like this See all the top level directories -```sh +```console rclone lsd remote: ``` List the contents of a directory -```sh +```console rclone ls remote:directory ``` Sync the remote `directory` to `/home/local/directory`, deleting any excess files. -```sh +```console rclone sync --interactive remote:directory /home/local/directory ``` @@ -109,7 +109,7 @@ or use the docker image from the tests: If you want to build the docker image -```sh +```console git clone https://github.com/rclone/rclone.git cd rclone/fstest/testserver/images/test-hdfs docker build --rm -t rclone/test-hdfs . @@ -117,7 +117,7 @@ docker build --rm -t rclone/test-hdfs . Or you can just use the latest one pushed -```sh +```console docker run --rm --name "rclone-hdfs" -p 127.0.0.1:9866:9866 -p 127.0.0.1:8020:8020 --hostname "rclone-hdfs" rclone/test-hdfs ``` diff --git a/docs/content/hidrive.md b/docs/content/hidrive.md index 316b0c073..6de047f0b 100644 --- a/docs/content/hidrive.md +++ b/docs/content/hidrive.md @@ -18,7 +18,7 @@ which you need to do in your browser. Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -85,19 +85,19 @@ Once configured you can then use `rclone` like this (replace `remote` with the n List directories in top level of your HiDrive root folder -```sh +```console rclone lsd remote: ``` List all the files in your HiDrive filesystem -```sh +```console rclone ls remote: ``` To copy a local directory to a HiDrive directory called backup -```sh +```console rclone copy /home/source remote:backup ``` @@ -129,7 +129,7 @@ To fix this you will need to authorize rclone to access your HiDrive account aga Using -```sh +```console rclone config reconnect remote: ``` @@ -188,7 +188,7 @@ This works by prepending the contents of the `root_prefix` option to any paths accessed by rclone. For example, the following two ways to access the home directory are equivalent: -```sh +```console rclone lsd --hidrive-root-prefix="/users/test/" remote:path rclone lsd remote:/users/test/path ``` diff --git a/docs/content/http.md b/docs/content/http.md index 2f6aee587..5a79ac8ba 100644 --- a/docs/content/http.md +++ b/docs/content/http.md @@ -39,7 +39,7 @@ To just download a single file it is easier to use Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -94,19 +94,19 @@ This remote is called `remote` and can now be used like this See all the top level directories -```sh +```console rclone lsd remote: ``` List the contents of a directory -```sh +```console rclone ls remote:directory ``` Sync the remote `directory` to `/home/local/directory`, deleting any excess files. -```sh +```console rclone sync --interactive remote:directory /home/local/directory ``` @@ -127,13 +127,13 @@ No checksums are stored. Since the http remote only has one config parameter it is easy to use without a config file: -```sh +```console rclone lsd --http-url https://beta.rclone.org :http: ``` or: -```sh +```console rclone lsd :http,url='https://beta.rclone.org': ``` diff --git a/docs/content/iclouddrive.md b/docs/content/iclouddrive.md index d3e60f336..18cfebf12 100644 --- a/docs/content/iclouddrive.md +++ b/docs/content/iclouddrive.md @@ -22,7 +22,7 @@ or `rclone config`. Here is an example of how to make a remote called `iclouddrive`. First run: -```sh +```console rclone config ``` diff --git a/docs/content/imagekit.md b/docs/content/imagekit.md index ffa4f39a9..7792ace58 100644 --- a/docs/content/imagekit.md +++ b/docs/content/imagekit.md @@ -27,7 +27,7 @@ from the developer section. Now run -```sh +```console rclone config ``` @@ -88,19 +88,19 @@ y/e/d> y List directories in the top level of your Media Library -```sh +```console rclone lsd imagekit-media-library: ``` Make a new directory. -```sh +```console rclone mkdir imagekit-media-library:directory ``` List the contents of a directory. -```sh +```console rclone ls imagekit-media-library:directory ``` diff --git a/docs/content/install.md b/docs/content/install.md index b276dffdb..4b50b049e 100644 --- a/docs/content/install.md +++ b/docs/content/install.md @@ -29,13 +29,13 @@ signatures on the release. To install rclone on Linux/macOS/BSD systems, run: -```sh +```console sudo -v ; curl https://rclone.org/install.sh | sudo bash ``` For beta installation, run: -```sh +```console sudo -v ; curl https://rclone.org/install.sh | sudo bash -s beta ``` @@ -48,7 +48,7 @@ won't re-download if not needed. Fetch and unpack -```sh +```console curl -O https://downloads.rclone.org/rclone-current-linux-amd64.zip unzip rclone-current-linux-amd64.zip cd rclone-*-linux-amd64 @@ -56,7 +56,7 @@ cd rclone-*-linux-amd64 Copy binary file -```sh +```console sudo cp rclone /usr/bin/ sudo chown root:root /usr/bin/rclone sudo chmod 755 /usr/bin/rclone @@ -64,7 +64,7 @@ sudo chmod 755 /usr/bin/rclone Install manpage -```sh +```console sudo mkdir -p /usr/local/share/man/man1 sudo cp rclone.1 /usr/local/share/man/man1/ sudo mandb @@ -72,7 +72,7 @@ sudo mandb Run `rclone config` to setup. See [rclone config docs](/docs/) for more details. -```sh +```console rclone config ``` @@ -80,7 +80,7 @@ rclone config ### Installation with brew {#macos-brew} -```sh +```console brew install rclone ``` @@ -98,7 +98,7 @@ developers so it may be out of date. Its current version is as below. On macOS, rclone can also be installed via [MacPorts](https://www.macports.org): -```sh +```console sudo port install rclone ``` @@ -116,19 +116,19 @@ notarized it is enough to download with `curl`. Download the latest version of rclone. -```sh +```console cd && curl -O https://downloads.rclone.org/rclone-current-osx-amd64.zip ``` Unzip the download and cd to the extracted folder. -```sh +```console unzip -a rclone-current-osx-amd64.zip && cd rclone-*-osx-amd64 ``` Move rclone to your $PATH. You will be prompted for your password. -```sh +```console sudo mkdir -p /usr/local/bin sudo mv rclone /usr/local/bin/ ``` @@ -137,13 +137,13 @@ sudo mv rclone /usr/local/bin/ Remove the leftover files. -```sh +```console cd .. && rm -rf rclone-*-osx-amd64 rclone-current-osx-amd64.zip ``` Run `rclone config` to setup. See [rclone config docs](/docs/) for more details. -```sh +```console rclone config ``` @@ -153,14 +153,14 @@ When downloading a binary with a web browser, the browser will set the macOS gatekeeper quarantine attribute. Starting from Catalina, when attempting to run `rclone`, a pop-up will appear saying: -```sh +```text "rclone" cannot be opened because the developer cannot be verified. macOS cannot verify that this app is free from malware. ``` The simplest fix is to run -```sh +```console xattr -d com.apple.quarantine rclone ``` @@ -274,7 +274,7 @@ The `:latest` tag will always point to the latest stable release. You can use the `:beta` tag to get the latest build from master. You can also use version tags, e.g. `:1.49.1`, `:1.49` or `:1`. -```sh +```console $ docker pull rclone/rclone:latest latest: Pulling from rclone/rclone Digest: sha256:0e0ced72671989bb837fea8e88578b3fc48371aa45d209663683e24cfdaa0e11 @@ -357,7 +357,7 @@ kill %1 Make sure you have [Snapd installed](https://snapcraft.io/docs/installing-snapd) -```sh +```console sudo snap install rclone ``` @@ -384,7 +384,7 @@ Go version 1.24 or newer is required, the latest release is recommended. You can get it from your package manager, or download it from [golang.org/dl](https://golang.org/dl/). Then you can run the following: -```sh +```console git clone https://github.com/rclone/rclone.git cd rclone go build @@ -398,7 +398,7 @@ in the same folder. As an initial check you can now run `./rclone version` Note that on macOS and Windows the [mount](https://rclone.org/commands/rclone_mount/) command will not be available unless you specify an additional build tag `cmount`. -```sh +```console go build -tags cmount ``` @@ -424,7 +424,7 @@ You may add arguments `-ldflags -s` to omit symbol table and debug information, making the executable file smaller, and `-trimpath` to remove references to local file system paths. The official rclone releases are built with both of these. -```sh +```console go build -trimpath -ldflags -s -tags cmount ``` @@ -435,7 +435,7 @@ or `fs.VersionSuffix` (to keep default number but customize the suffix). This can be done from the build command, by adding to the `-ldflags` argument value as shown below. -```sh +```console go build -trimpath -ldflags "-s -X github.com/rclone/rclone/fs.Version=v9.9.9-test" -tags cmount ``` @@ -446,7 +446,7 @@ It generates a Windows resource system object file, with extension .syso, e.g. `resource_windows_amd64.syso`, that will be automatically picked up by future build commands. -```sh +```console go run bin/resource_windows.go ``` @@ -458,7 +458,7 @@ override this version variable in the build command as described above, you need to do that also when generating the resource file, or else it will still use the value from the source. -```sh +```console go run bin/resource_windows.go -version v9.9.9-test ``` @@ -468,13 +468,13 @@ followed by additional commit details, embeds version information binary resourc on Windows, and copies the resulting rclone executable into your GOPATH bin folder (`$(go env GOPATH)/bin`, which corresponds to `~/go/bin/rclone` by default). -```sh +```console make ``` To include mount command on macOS and Windows with Makefile build: -```sh +```console make GOTAGS=cmount ``` @@ -491,7 +491,7 @@ The source will be stored it in the Go module cache, and the resulting executable will be in your GOPATH bin folder (`$(go env GOPATH)/bin`, which corresponds to `~/go/bin/rclone` by default). -```sh +```console go install github.com/rclone/rclone@latest ``` diff --git a/docs/content/internetarchive.md b/docs/content/internetarchive.md index cd01aa947..119878de1 100644 --- a/docs/content/internetarchive.md +++ b/docs/content/internetarchive.md @@ -20,20 +20,20 @@ Once you have made a remote, you can use it like this: Make a new item -```sh +```console rclone mkdir remote:item ``` List the contents of a item -```sh +```console rclone ls remote:item ``` Sync `/home/local/directory` to the remote item, deleting any excess files in the item. -```sh +```console rclone sync --interactive /home/local/directory remote:item ``` @@ -95,7 +95,7 @@ changeable, as they are created by the Internet Archive automatically. These auto-created files can be excluded from the sync using [metadata filtering](/filtering/#metadata). -```sh +```console rclone sync ... --metadata-exclude "source=metadata" --metadata-exclude "format=Metadata" ``` @@ -110,7 +110,7 @@ Most applies to the other providers as well, any differences are described [belo First run -```sh +```console rclone config ``` diff --git a/docs/content/jottacloud.md b/docs/content/jottacloud.md index 2268558a9..9a895b95d 100644 --- a/docs/content/jottacloud.md +++ b/docs/content/jottacloud.md @@ -178,7 +178,7 @@ supported by rclone, but the support will be removed in a future version. Here is an example of how to make a remote called `remote` with the default setup. First run: -```sh +```console rclone config ``` @@ -281,19 +281,19 @@ Once configured you can then use `rclone` like this (replace `remote` with the n List directories in top level of your Jottacloud -```sh +```console rclone lsd remote: ``` List all the files in your Jottacloud -```sh +```console rclone ls remote: ``` To copy a local directory to an Jottacloud directory called backup -```sh +```console rclone copy /home/source remote:backup ``` diff --git a/docs/content/koofr.md b/docs/content/koofr.md index 8ef9641d1..15470be42 100644 --- a/docs/content/koofr.md +++ b/docs/content/koofr.md @@ -19,7 +19,7 @@ giving the password a nice name like `rclone` and clicking on generate. Here is an example of how to make a remote called `koofr`. First run: -```sh +```console rclone config ``` @@ -91,19 +91,19 @@ Once configured you can then use `rclone` like this, List directories in top level of your Koofr -```sh +```console rclone lsd koofr: ``` List all the files in your Koofr -```sh +```console rclone ls koofr: ``` To copy a local directory to an Koofr directory called backup -```sh +```console rclone copy /home/source koofr:backup ``` @@ -253,7 +253,7 @@ provides a Koofr API. Here is an example of how to make a remote called `ds`. First run: -```sh +```console rclone config ``` @@ -322,7 +322,7 @@ You may also want to use another, public or private storage provider that runs a Here is an example of how to make a remote called `other`. First run: -```sh +```console rclone config ``` diff --git a/docs/content/linkbox.md b/docs/content/linkbox.md index 2cb877046..5a2076071 100644 --- a/docs/content/linkbox.md +++ b/docs/content/linkbox.md @@ -14,7 +14,7 @@ Here is an example of making a remote for Linkbox. First run: -```sh +```console rclone config ``` diff --git a/docs/content/local.md b/docs/content/local.md index f44a4b1f3..83d5ecc82 100644 --- a/docs/content/local.md +++ b/docs/content/local.md @@ -8,7 +8,7 @@ versionIntroduced: "v0.91" Local paths are specified as normal filesystem paths, e.g. `/path/to/wherever`, so -```sh +```console rclone sync --interactive /home/source /tmp/destination ``` @@ -184,7 +184,7 @@ This flag applies to all commands. For example, supposing you have a directory structure like this -```sh +```console $ tree /tmp/a /tmp/a ├── b -> ../b @@ -196,7 +196,7 @@ $ tree /tmp/a Then you can see the difference with and without the flag like this -```sh +```console $ rclone ls /tmp/a 6 one 6 two/three @@ -204,7 +204,7 @@ $ rclone ls /tmp/a and -```sh +```console $ rclone -L ls /tmp/a 4174 expected 6 one @@ -227,7 +227,7 @@ This flag applies to all commands. For example, supposing you have a directory structure like this -```sh +```console $ tree /tmp/a /tmp/a ├── file1 -> ./file4 @@ -236,13 +236,13 @@ $ tree /tmp/a Copying the entire directory with '-l' -```sh +```console rclone copy -l /tmp/a/ remote:/tmp/a/ ``` The remote files are created with a `.rclonelink` suffix -```sh +```console $ rclone ls remote:/tmp/a 5 file1.rclonelink 14 file2.rclonelink @@ -250,7 +250,7 @@ $ rclone ls remote:/tmp/a The remote files will contain the target of the symbolic links -```sh +```console $ rclone cat remote:/tmp/a/file1.rclonelink ./file4 @@ -260,7 +260,7 @@ $ rclone cat remote:/tmp/a/file2.rclonelink Copying them back with '-l' -```sh +```console $ rclone copy -l remote:/tmp/a/ /tmp/b/ $ tree /tmp/b @@ -271,7 +271,7 @@ $ tree /tmp/b However, if copied back without '-l' -```sh +```console $ rclone copyto remote:/tmp/a/ /tmp/b/ $ tree /tmp/b @@ -282,7 +282,7 @@ $ tree /tmp/b If you want to copy a single file with `-l` then you must use the `.rclonelink` suffix. -```sh +```console $ rclone copy -l remote:/tmp/a/file1.rclonelink /tmp/c $ tree /tmp/c @@ -306,7 +306,7 @@ different file systems. For example if you have a directory hierarchy like this -```sh +```console root ├── disk1 - disk1 mounted on the root │   └── file3 - stored on disk1 @@ -319,13 +319,13 @@ root Using `rclone --one-file-system copy root remote:` will only copy `file1` and `file2`. E.g. -```sh +```console $ rclone -q --one-file-system ls root 0 file1 0 file2 ``` -```sh +```console $ rclone -q ls root 0 disk1/file3 0 disk2/file4 diff --git a/docs/content/mailru.md b/docs/content/mailru.md index 116ad7b6a..8390204e4 100644 --- a/docs/content/mailru.md +++ b/docs/content/mailru.md @@ -48,7 +48,7 @@ give an error like `oauth2: server response missing access_token`. Now run -```sh +```console rclone config ``` @@ -119,26 +119,26 @@ You can use the configured backend as shown below: See top level directories -```sh +```console rclone lsd remote: ``` Make a new directory -```sh +```console rclone mkdir remote:directory ``` List the contents of a directory -```sh +```console rclone ls remote:directory ``` Sync `/home/local/directory` to the remote path, deleting any excess files in the path. -```sh +```console rclone sync --interactive /home/local/directory remote:directory ``` diff --git a/docs/content/mega.md b/docs/content/mega.md index 4d61b1794..61e88cc84 100644 --- a/docs/content/mega.md +++ b/docs/content/mega.md @@ -27,7 +27,7 @@ Paths may be as deep as required, e.g. `remote:directory/subdirectory`. Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -79,19 +79,19 @@ Once configured you can then use `rclone` like this (replace `remote` with the n List directories in top level of your Mega -```sh +```console rclone lsd remote: ``` List all the files in your Mega -```sh +```console rclone ls remote: ``` To copy a local directory to an Mega directory called backup -```sh +```console rclone copy /home/source remote:backup ``` diff --git a/docs/content/memory.md b/docs/content/memory.md index 269ea3ac9..e1c2a21d1 100644 --- a/docs/content/memory.md +++ b/docs/content/memory.md @@ -50,7 +50,7 @@ y/e/d> y Because the memory backend isn't persistent it is most useful for testing or with an rclone server or rclone mount, e.g. -```sh +```console rclone mount :memory: /mnt/tmp rclone serve webdav :memory: rclone serve sftp :memory: diff --git a/docs/content/netstorage.md b/docs/content/netstorage.md index 65434eb96..ded4a14b1 100644 --- a/docs/content/netstorage.md +++ b/docs/content/netstorage.md @@ -18,7 +18,7 @@ For example, this is commonly configured with or without a CP code: See all buckets -```sh +```console rclone lsd remote: ``` @@ -31,7 +31,7 @@ Here's an example of how to make a remote called `ns1`. 1. To begin the interactive configuration process, enter this command: - ```sh + ```console rclone config ``` @@ -128,25 +128,25 @@ commands, visit . ### See contents of a directory in your project -```sh +```console rclone lsd ns1:/974012/testing/ ``` ### Sync the contents local with remote -```sh +```console rclone sync . ns1:/974012/testing/ ``` ### Upload local content to remote -```sh +```console rclone copy notes.txt ns1:/974012/testing/ ``` ### Delete content on remote -```sh +```console rclone delete ns1:/974012/testing/notes.txt ``` @@ -155,7 +155,7 @@ rclone delete ns1:/974012/testing/notes.txt Your credentials must have access to two CP codes on the same remote. You can't perform operations between different remotes. -```sh +```console rclone move ns1:/974012/testing/notes.txt ns1:/974450/testing2/ ``` diff --git a/docs/content/onedrive.md b/docs/content/onedrive.md index b7f77c7f6..da2722aef 100644 --- a/docs/content/onedrive.md +++ b/docs/content/onedrive.md @@ -18,7 +18,7 @@ you through it. Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -112,19 +112,19 @@ Once configured you can then use `rclone` like this (replace `remote` with the n List directories in top level of your OneDrive -```sh +```console rclone lsd remote: ``` List all the files in your OneDrive -```sh +```console rclone ls remote: ``` To copy a local directory to an OneDrive directory called backup -```sh +```console rclone copy /home/source remote:backup ``` diff --git a/docs/content/opendrive.md b/docs/content/opendrive.md index 21fba294e..dfac0aae1 100644 --- a/docs/content/opendrive.md +++ b/docs/content/opendrive.md @@ -14,7 +14,7 @@ Paths may be as deep as required, e.g. `remote:directory/subdirectory`. Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -57,19 +57,19 @@ y/e/d> y List directories in top level of your OpenDrive -```sh +```console rclone lsd remote: ``` List all the files in your OpenDrive -```sh +```console rclone ls remote: ``` To copy a local directory to an OpenDrive directory called backup -```sh +```console rclone copy /home/source remote:backup ``` diff --git a/docs/content/oracleobjectstorage/_index.md b/docs/content/oracleobjectstorage/_index.md index aad34649c..849b89da5 100644 --- a/docs/content/oracleobjectstorage/_index.md +++ b/docs/content/oracleobjectstorage/_index.md @@ -18,7 +18,7 @@ You may put subdirectories in too, e.g. `remote:bucket/path/to/dir`. Sample command to transfer local artifacts to remote:bucket in oracle object storage: -```sh +```console rclone -vvv --progress --stats-one-line --max-stats-groups 10 --log-format date,time,UTC,longfile --fast-list --buffer-size 256Mi --oos-no-check-bucket --oos-upload-cutoff 10Mi --multi-thread-cutoff 16Mi --multi-thread-streams 3000 --transfers 3000 --checkers 64 --retries 2 --oos-chunk-size 10Mi --oos-upload-concurrency 10000 --oos-attempt-resume-upload --oos-leave-parts-on-error sync ./artifacts remote:bucket -vv ``` @@ -29,7 +29,7 @@ walks you through it. Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -139,19 +139,19 @@ y/e/d> y See all buckets -```sh +```console rclone lsd remote: ``` Create a new bucket -```sh +```console rclone mkdir remote:bucket ``` List the contents of a bucket -```sh +```console rclone ls remote:bucket rclone ls remote:bucket --max-depth 1 ``` @@ -208,7 +208,7 @@ have to be stored and managed. Sample rclone configuration file for Authentication Provider Instance Principal: -```sh +```console [opc@rclone ~]$ cat ~/.config/rclone/rclone.conf [oos] type = oracleobjectstorage @@ -242,7 +242,7 @@ resources that are not compute instances such as To use resource principal ensure Rclone process is started with these environment variables set in its process. -```sh +```console export OCI_RESOURCE_PRINCIPAL_VERSION=2.2 export OCI_RESOURCE_PRINCIPAL_REGION=us-ashburn-1 export OCI_RESOURCE_PRINCIPAL_PRIVATE_PEM=/usr/share/model-server/key.pem @@ -268,7 +268,7 @@ Workload Identity, see [Granting Workloads Access to OCI Resources](https://docs To use workload identity, ensure Rclone is started with these environment variables set in its process. -```sh +```console export OCI_RESOURCE_PRINCIPAL_VERSION=2.2 export OCI_RESOURCE_PRINCIPAL_REGION=us-ashburn-1 ``` diff --git a/docs/content/oracleobjectstorage/tutorial_mount.md b/docs/content/oracleobjectstorage/tutorial_mount.md index e24c50f17..c50937dfa 100644 --- a/docs/content/oracleobjectstorage/tutorial_mount.md +++ b/docs/content/oracleobjectstorage/tutorial_mount.md @@ -21,7 +21,7 @@ In oracle linux 8, Rclone can be installed from [OL8_Developer](https://yum.oracle.com/repo/OracleLinux/OL8/developer/x86_64/index.html) Yum Repo, Please enable the repo if not enabled already. -```sh +```console [opc@base-inst-boot ~]$ sudo yum-config-manager --enable ol8_developer [opc@base-inst-boot ~]$ sudo yum install -y rclone [opc@base-inst-boot ~]$ sudo yum install -y fuse @@ -48,7 +48,7 @@ To run it as a mount helper you should symlink rclone binary to /sbin/mount.rclo and optionally /usr/bin/rclonefs, e.g. `ln -s /usr/bin/rclone /sbin/mount.rclone`. rclone will detect it and translate command-line arguments appropriately. -```sh +```console ln -s /usr/bin/rclone /sbin/mount.rclone ``` @@ -65,7 +65,7 @@ Rclone configuration file needs to have 3 remote sections, one section of each of above 3 buckets. Create a configuration file in a accessible location that rclone program can read. -```sh +```console [opc@base-inst-boot ~]$ mkdir -p /etc/rclone [opc@base-inst-boot ~]$ sudo touch /etc/rclone/rclone.conf @@ -135,7 +135,7 @@ Create a dynamic group say rclone-dynamic-group that the oci compute instance becomes a member of the below group says all instances belonging to compartment a...c is member of this dynamic-group. -```sh +```console any {instance.compartment.id = '', instance.compartment.id = '', instance.compartment.id = '' @@ -146,7 +146,7 @@ Now that you have a dynamic group, you need to add a policy allowing what permissions this dynamic-group has. In our case, we want this dynamic-group to access object-storage. So create a policy now. -```sh +```text allow dynamic-group rclone-dynamic-group to manage object-family in compartment compartment-a allow dynamic-group rclone-dynamic-group to manage object-family in compartment compartment-b allow dynamic-group rclone-dynamic-group to manage object-family in compartment compartment-c @@ -157,19 +157,20 @@ if not please troubleshoot any mistakes you did so far. Please note, identity can take upto a minute to ensure policy gets reflected. ## Step 4: Setup Mount Folders + Let's assume you have to mount 3 buckets, bucket-a, bucket-b, bucket-c at path /opt/mnt/bucket-a, /opt/mnt/bucket-b, /opt/mnt/bucket-c respectively. Create the mount folder and set its ownership to desired user, group. -```sh +```console [opc@base-inst-boot ~]$ sudo mkdir /opt/mnt [opc@base-inst-boot ~]$ sudo chown -R opc:adm /opt/mnt ``` Set chmod permissions to user, group, others as desired for each mount path -```sh +```console [opc@base-inst-boot ~]$ sudo chmod 764 /opt/mnt [opc@base-inst-boot ~]$ ls -al /opt/mnt/ total 0 @@ -205,7 +206,7 @@ for solutions to make mount more reliable. First lets understand the rclone mount flags and some global flags for troubleshooting. -```sh +```console rclone mount \ ossa:bucket-a \ # Remote:bucket-name /opt/mnt/bucket-a \ # Local mount folder @@ -289,7 +290,7 @@ changed relative to a remote file. Fingerprints are made from: Add this entry in /etc/fstab: -```sh +```text ossa:bucket-a /opt/mnt/bucket-a rclone rw,umask=0117,nofail,_netdev,args2env,config=/etc/rclone/rclone.conf,uid=1000,gid=4, file_perms=0760,dir_perms=0760,allow_other,vfs_cache_mode=writes,cache_dir=/tmp/rclone/cache 0 0 ``` @@ -303,7 +304,7 @@ quotes of the same type should be doubled. Then run sudo mount -av -```sh +```console [opc@base-inst-boot ~]$ sudo mount -av / : ignored /boot : already mounted @@ -320,7 +321,7 @@ If you are familiar with configuring systemd unit files, you can also configure the each rclone mount into a systemd units file. various examples in git search: -```sh +```console tee "/etc/systemd/system/rclonebucketa.service" > /dev/null <" imported gpg: Total number processed: 1 @@ -112,7 +112,7 @@ gpg: imported: 1 Then check the signature: -```sh +```console $ gpg --verify SHA256SUMS gpg: Signature made Mon 17 Jul 2023 15:03:17 BST gpg: using DSA key FBF737ECE9F8AB18604BD2AC93935E02FF3B54FA @@ -128,14 +128,14 @@ Repeat for `MD5SUMS` and `SHA1SUMS` if desired. Now that we know the signatures on the hashes are OK we can verify the binaries match the hashes, completing the verification. -```sh +```console $ sha256sum -c SHA256SUMS 2>&1 | grep OK rclone-v1.63.1-windows-amd64.zip: OK ``` Or do the check with rclone -```sh +```console $ rclone hashsum sha256 -C SHA256SUMS rclone-v1.63.1-windows-amd64.zip 2023/09/11 10:53:58 NOTICE: SHA256SUMS: improperly formatted checksum line 0 2023/09/11 10:53:58 NOTICE: SHA256SUMS: improperly formatted checksum line 1 @@ -150,7 +150,7 @@ $ rclone hashsum sha256 -C SHA256SUMS rclone-v1.63.1-windows-amd64.zip You can verify the signatures and hashes in one command line like this: -```sh +```console $ h=$(gpg --decrypt SHA256SUMS) && echo "$h" | sha256sum - -c --ignore-missing gpg: Signature made Mon 17 Jul 2023 15:03:17 BST gpg: using DSA key FBF737ECE9F8AB18604BD2AC93935E02FF3B54FA diff --git a/docs/content/remote_setup.md b/docs/content/remote_setup.md index e5c1cd052..3e3a68c0b 100644 --- a/docs/content/remote_setup.md +++ b/docs/content/remote_setup.md @@ -81,13 +81,13 @@ sure). Start by running [rclone config](/commands/rclone_config) to create the configuration file on your desktop machine. -```sh +```console rclone config ``` Then locate the file by running [rclone config file](/commands/rclone_config_file). -```sh +```console $ rclone config file Configuration file is stored at: /home/user/.rclone.conf @@ -103,7 +103,7 @@ If you have an SSH client installed on your local machine, you can set up an SSH tunnel to redirect the port 53682 into the headless machine by using the following command: -```sh +```console ssh -L localhost:53682:localhost:53682 username@remote_server ``` diff --git a/docs/content/s3.md b/docs/content/s3.md index 51a034a4e..12a254d2b 100644 --- a/docs/content/s3.md +++ b/docs/content/s3.md @@ -68,26 +68,26 @@ you can use it like this: See all buckets -```sh +```console rclone lsd remote: ``` Make a new bucket -```sh +```console rclone mkdir remote:bucket ``` List the contents of a bucket -```sh +```console rclone ls remote:bucket ``` Sync `/home/local/directory` to the remote bucket, deleting any excess files in the bucket. -```sh +```console rclone sync --interactive /home/local/directory remote:bucket ``` @@ -98,7 +98,7 @@ Most applies to the other providers as well, any differences are described [belo First run -```sh +```console rclone config ``` @@ -329,7 +329,7 @@ metadata `X-Amz-Meta-Md5chksum` which is a base64 encoded MD5 hash (in the same format as is required for `Content-MD5`). You can use base64 -d and hexdump to check this value manually: -```sh +```console echo 'VWTGdNx3LyXQDfA0e2Edxw==' | base64 -d | hexdump ``` @@ -404,7 +404,7 @@ individually. This takes one API call per directory. Using the memory first using a smaller number of API calls (one per 1000 objects). See the [rclone docs](/docs/#fast-list) for more details. -```sh +```console rclone sync --fast-list --checksum /path/to/source s3:bucket ``` @@ -419,7 +419,7 @@ instead of through directory listings. You can do a "top-up" sync very cheaply by using `--max-age` and `--no-traverse` to copy only recent files, eg -```sh +```console rclone copy --max-age 24h --no-traverse /path/to/source s3:bucket ``` @@ -448,7 +448,7 @@ the objects, as no data is transferred. For rclone to use server-side copy, you must use the same remote for the source and destination. -```sh +```console rclone copy s3:source-bucket s3:destination-bucket ``` @@ -469,7 +469,7 @@ For example, with AWS S3, if you can increase the number of checkers to values like 200. If you are doing a server-side copy, you can also increase the number of transfers to 200. -```sh +```console rclone sync --transfers 200 --checkers 200 --checksum s3:source-bucket s3:destination-bucket ``` @@ -589,7 +589,7 @@ version followed by a `cleanup` of the old versions. Show current version and all the versions with `--s3-versions` flag. -```sh +```console $ rclone -q ls s3:cleanup-test 9 one.txt @@ -602,7 +602,7 @@ $ rclone -q --s3-versions ls s3:cleanup-test Retrieve an old version -```sh +```console $ rclone -q --s3-versions copy s3:cleanup-test/one-v2016-07-04-141003-000.txt /tmp $ ls -l /tmp/one-v2016-07-04-141003-000.txt @@ -611,7 +611,7 @@ $ ls -l /tmp/one-v2016-07-04-141003-000.txt Clean up all the old versions and show that they've gone. -```sh +```console $ rclone -q backend cleanup-hidden s3:cleanup-test $ rclone -q ls s3:cleanup-test @@ -627,7 +627,7 @@ When using `--s3-versions` flag rclone is relying on the file name to work out whether the objects are versions or not. Versions' names are created by inserting timestamp between file name and its extension. -```sh +```text 9 file.txt 8 file-v2023-07-17-161032-000.txt 16 file-v2023-06-15-141003-000.txt @@ -2598,7 +2598,7 @@ upload_cutoff = 0 Here is an example of making an [Alibaba Cloud (Aliyun) OSS](https://www.alibabacloud.com/product/oss/) configuration. First run: -```sh +```console rclone config ``` @@ -2875,7 +2875,7 @@ use the secret key as `xxxxxx/xxxx` it will work fine. Here is an example of making an [China Mobile Ecloud Elastic Object Storage (EOS)](https:///ecloud.10086.cn/home/product-introduction/eos/) configuration. First run: -```sh +```console rclone config ``` @@ -3131,7 +3131,7 @@ services. Here is an example of making a Cloudflare R2 configuration. First run: -```sh +```console rclone config ``` @@ -3916,7 +3916,7 @@ v2_auth> Here is an example of making an [IDrive e2](https://www.idrive.com/e2/) configuration. First run: -```sh +```console rclone config ``` @@ -4435,7 +4435,7 @@ leviia s3 Here is an example of making a [Liara Object Storage](https://liara.ir/landing/object-storage) configuration. First run: -```sh +```console rclone config ``` @@ -4538,7 +4538,7 @@ storage_class = Here is an example of making a [Linode Object Storage](https://www.linode.com/products/object-storage/) configuration. First run: -```sh +```console rclone config ``` @@ -4695,7 +4695,7 @@ endpoint = eu-central-1.linodeobjects.com Here is an example of making a [Magalu Object Storage](https://magalu.cloud/object-storage/) configuration. First run: -```sh +```console rclone config ``` @@ -4818,7 +4818,7 @@ included in existing Pro plans. Here is an example of making a configuration. First run: -```sh +```console rclone config ``` @@ -5361,7 +5361,7 @@ acl = private Here is an example of making a [Petabox](https://petabox.io/) configuration. First run: -```sh +```console rclone config ``` @@ -5532,7 +5532,7 @@ To configure rclone for Pure Storage FlashBlade: First run: -```sh +```console rclone config ``` @@ -5833,7 +5833,7 @@ FileLu S5 is designed for scalability, security, and simplicity, with predictabl Here is an example of making a configuration. First run: -```sh +```console rclone config ``` diff --git a/docs/content/seafile.md b/docs/content/seafile.md index e2f5ea74b..b15044b04 100644 --- a/docs/content/seafile.md +++ b/docs/content/seafile.md @@ -30,7 +30,7 @@ There are two distinct modes you can setup your remote: Here is an example of making a seafile configuration for a user with **no** two-factor authentication. First run -```sh +```console rclone config ``` @@ -108,26 +108,26 @@ server and can now be used like this: See all libraries -```sh +```console rclone lsd seafile: ``` Create a new library -```sh +```console rclone mkdir seafile:library ``` List the contents of a library -```sh +```console rclone ls seafile:library ``` Sync `/home/local/directory` to the remote library, deleting any excess files in the library. -```sh +```console rclone sync --interactive /home/local/directory seafile:library ``` @@ -215,26 +215,26 @@ pointing at the root of the library `My Library`: See all files in the library: -```sh +```console rclone lsd seafile: ``` Create a new directory inside the library -```sh +```console rclone mkdir seafile:directory ``` List the contents of a directory -```sh +```console rclone ls seafile:directory ``` Sync `/home/local/directory` to the remote library, deleting any excess files in the library. -```sh +```console rclone sync --interactive /home/local/directory seafile: ``` @@ -264,16 +264,16 @@ as they can't be used in JSON strings. Rclone supports generating share links for non-encrypted libraries only. They can either be for a file or a directory: -```sh -rclone link seafile:seafile-tutorial.doc +```console +$ rclone link seafile:seafile-tutorial.doc http://my.seafile.server/f/fdcd8a2f93f84b8b90f4/ ``` or if run on a directory you will get: -```sh -rclone link seafile:dir +```console +$ rclone link seafile:dir http://my.seafile.server/d/9ea2455f6f55478bbb0d/ ``` diff --git a/docs/content/sftp.md b/docs/content/sftp.md index ab717bdb6..f53541892 100644 --- a/docs/content/sftp.md +++ b/docs/content/sftp.md @@ -42,7 +42,7 @@ the server, see [shell access considerations](#shell-access-considerations). Here is an example of making an SFTP configuration. First run -```sh +```console rclone config ``` @@ -100,39 +100,39 @@ This remote is called `remote` and can now be used like this: See all directories in the home directory -```sh +```console rclone lsd remote: ``` See all directories in the root directory -```sh +```console rclone lsd remote:/ ``` Make a new directory -```sh +```console rclone mkdir remote:path/to/directory ``` List the contents of a directory -```sh +```console rclone ls remote:path/to/directory ``` Sync `/home/local/directory` to the remote directory, deleting any excess files in the directory. -```sh +```console rclone sync --interactive /home/local/directory remote:directory ``` Mount the remote path `/srv/www-data/` to the local path `/mnt/www-data` -```sh +```console rclone mount remote:/srv/www-data/ /mnt/www-data ``` @@ -158,7 +158,7 @@ key_pem = -----BEGIN RSA PRIVATE KEY-----\nMaMbaIXtE\n0gAMbMbaSsd\nMbaass\n----- This will generate it correctly for key_pem for use in the config: -```sh +```console awk '{printf "%s\\n", $0}' < ~/.ssh/id_rsa ``` @@ -202,7 +202,7 @@ merged file in both places. Note: the cert must come first in the file. e.g. -```sh +```console cat id_rsa-cert.pub id_rsa > merged_key ``` @@ -229,7 +229,7 @@ known_hosts_file = ~/.ssh/known_hosts Alternatively you can create your own known hosts file like this: -```sh +```console ssh-keyscan -t dsa,rsa,ecdsa,ed25519 example.com >> known_hosts ``` @@ -272,13 +272,13 @@ Note that there seem to be various problems with using an ssh-agent on macOS due to recent changes in the OS. The most effective work-around seems to be to start an ssh-agent in each session, e.g. -```sh +```console eval `ssh-agent -s` && ssh-add -A ``` And then at the end of the session -```sh +```console eval `ssh-agent -k` ``` diff --git a/docs/content/sharefile.md b/docs/content/sharefile.md index c40105f9c..eef5ecdc6 100644 --- a/docs/content/sharefile.md +++ b/docs/content/sharefile.md @@ -17,7 +17,7 @@ through it. Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -96,19 +96,19 @@ Once configured you can then use `rclone` like this (replace `remote` with the n List directories in top level of your ShareFile -```sh +```console rclone lsd remote: ``` List all the files in your ShareFile -```sh +```console rclone ls remote: ``` To copy a local directory to an ShareFile directory called backup -```sh +```console rclone copy /home/source remote:backup ``` diff --git a/docs/content/sia.md b/docs/content/sia.md index 2ffdd0233..35b11d290 100644 --- a/docs/content/sia.md +++ b/docs/content/sia.md @@ -61,7 +61,7 @@ Notes: Here is an example of how to make a `sia` remote called `mySia`. First, run: -```sh +```console rclone config ``` @@ -117,19 +117,19 @@ Once configured, you can then use `rclone` like this: - List directories in top level of your Sia storage - ```sh + ```console rclone lsd mySia: ``` - List all the files in your Sia storage - ```sh + ```console rclone ls mySia: ``` - Upload a local directory to the Sia directory called *backup* - ```sh + ```console rclone copy /home/source mySia:backup ``` diff --git a/docs/content/smb.md b/docs/content/smb.md index 7fcd91266..1ef15a7eb 100644 --- a/docs/content/smb.md +++ b/docs/content/smb.md @@ -36,7 +36,7 @@ Here is an example of making a SMB configuration. First run -```sh +```console rclone config ``` diff --git a/docs/content/storj.md b/docs/content/storj.md index 53ab22010..c133b1904 100644 --- a/docs/content/storj.md +++ b/docs/content/storj.md @@ -119,7 +119,7 @@ To make a new Storj configuration you need one of the following: Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -333,7 +333,7 @@ Once configured you can then use `rclone` like this. Use the `mkdir` command to create new bucket, e.g. `bucket`. -```sh +```console rclone mkdir remote:bucket ``` @@ -341,7 +341,7 @@ rclone mkdir remote:bucket Use the `lsf` command to list all buckets. -```sh +```console rclone lsf remote: ``` @@ -351,24 +351,32 @@ Note the colon (`:`) character at the end of the command line. Use the `rmdir` command to delete an empty bucket. - rclone rmdir remote:bucket +```console +rclone rmdir remote:bucket +``` Use the `purge` command to delete a non-empty bucket with all its content. - rclone purge remote:bucket +```console +rclone purge remote:bucket +``` ### Upload objects Use the `copy` command to upload an object. - rclone copy --progress /home/local/directory/file.ext remote:bucket/path/to/dir/ +```console +rclone copy --progress /home/local/directory/file.ext remote:bucket/path/to/dir/ +``` The `--progress` flag is for displaying progress information. Remove it if you don't need this information. Use a folder in the local path to upload all its objects. - rclone copy --progress /home/local/directory/ remote:bucket/path/to/dir/ +```console +rclone copy --progress /home/local/directory/ remote:bucket/path/to/dir/ +``` Only modified files will be copied. @@ -376,57 +384,70 @@ Only modified files will be copied. Use the `ls` command to list recursively all objects in a bucket. -```sh +```console rclone ls remote:bucket ``` - Add the folder to the remote path to list recursively all objects in this folder. -```sh -rclone ls remote:bucket -``` +```console +$ rclone ls remote:bucket /path/to/dir/ +``` Use the `lsf` command to list non-recursively all objects in a bucket or a folder. - rclone lsf remote:bucket/path/to/dir/ +```console +rclone lsf remote:bucket/path/to/dir/ +``` ### Download objects Use the `copy` command to download an object. - rclone copy --progress remote:bucket/path/to/dir/file.ext /home/local/directory/ +```console +rclone copy --progress remote:bucket/path/to/dir/file.ext /home/local/directory/ +``` The `--progress` flag is for displaying progress information. Remove it if you don't need this information. Use a folder in the remote path to download all its objects. - rclone copy --progress remote:bucket/path/to/dir/ /home/local/directory/ +```console +rclone copy --progress remote:bucket/path/to/dir/ /home/local/directory/ +``` ### Delete objects Use the `deletefile` command to delete a single object. - rclone deletefile remote:bucket/path/to/dir/file.ext +```console +rclone deletefile remote:bucket/path/to/dir/file.ext +``` Use the `delete` command to delete all object in a folder. - rclone delete remote:bucket/path/to/dir/ +```console +rclone delete remote:bucket/path/to/dir/ +``` ### Print the total size of objects Use the `size` command to print the total size of objects in a bucket or a folder. - rclone size remote:bucket/path/to/dir/ +```console +rclone size remote:bucket/path/to/dir/ +``` ### Sync two Locations Use the `sync` command to sync the source to the destination, changing the destination only, deleting any excess files. - rclone sync --interactive --progress /home/local/directory/ remote:bucket/path/to/dir/ +```console +rclone sync --interactive --progress /home/local/directory/ remote:bucket/path/to/dir/ +``` The `--progress` flag is for displaying progress information. Remove it if you don't need this information. @@ -436,15 +457,21 @@ to see exactly what would be copied and deleted. The sync can be done also from Storj to the local file system. - rclone sync --interactive --progress remote:bucket/path/to/dir/ /home/local/directory/ +```console +rclone sync --interactive --progress remote:bucket/path/to/dir/ /home/local/directory/ +``` Or between two Storj buckets. - rclone sync --interactive --progress remote-us:bucket/path/to/dir/ remote-europe:bucket/path/to/dir/ +```console +rclone sync --interactive --progress remote-us:bucket/path/to/dir/ remote-europe:bucket/path/to/dir/ +``` Or even between another cloud storage and Storj. - rclone sync --interactive --progress s3:bucket/path/to/dir/ storj:bucket/path/to/dir/ +```console +rclone sync --interactive --progress s3:bucket/path/to/dir/ storj:bucket/path/to/dir/ +``` ## Limitations diff --git a/docs/content/sugarsync.md b/docs/content/sugarsync.md index 2075e00c7..deb13145d 100644 --- a/docs/content/sugarsync.md +++ b/docs/content/sugarsync.md @@ -17,7 +17,7 @@ can do with rclone. `rclone config` walks you through it. Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -82,19 +82,19 @@ Once configured you can then use `rclone` like this (replace `remote` with the n List directories (sync folders) in top level of your SugarSync -```sh +```console rclone lsd remote: ``` List all the files in your SugarSync folder "Test" -```sh +```console rclone ls remote:Test ``` To copy a local directory to an SugarSync folder called backup -```sh +```console rclone copy /home/source remote:backup ``` diff --git a/docs/content/swift.md b/docs/content/swift.md index 4c221f876..fd5e0c2ca 100644 --- a/docs/content/swift.md +++ b/docs/content/swift.md @@ -23,7 +23,7 @@ command.) You may put subdirectories in too, e.g. `remote:container/path/to/dir Here is an example of making a swift configuration. First run -```sh +```console rclone config ``` @@ -126,26 +126,26 @@ This remote is called `remote` and can now be used like this See all containers -```sh +```console rclone lsd remote: ``` Make a new container -```sh +```console rclone mkdir remote:container ``` List the contents of a container -```sh +```console rclone ls remote:container ``` Sync `/home/local/directory` to the remote container, deleting any excess files in the container. -```sh +```console rclone sync --interactive /home/local/directory remote:container ``` diff --git a/docs/content/ulozto.md b/docs/content/ulozto.md index 823d1c4d2..18eda03a3 100644 --- a/docs/content/ulozto.md +++ b/docs/content/ulozto.md @@ -17,7 +17,7 @@ The initial setup for Uloz.to involves filling in the user credentials. Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -77,19 +77,19 @@ Once configured you can then use `rclone` like this (replace `remote` with the n List folders in root level folder: -```sh +```console rclone lsd remote: ``` List all the files in your root folder: -```sh +```console rclone ls remote: ``` To copy a local folder to a Uloz.to folder called backup: -```sh +```console rclone copy /home/source remote:backup ``` diff --git a/docs/content/union.md b/docs/content/union.md index 0a6fe6291..b3107c9a1 100644 --- a/docs/content/union.md +++ b/docs/content/union.md @@ -35,7 +35,7 @@ mydrive:private/backup/../desktop`. Here is an example of how to make a union called `remote` for local folders. First run: -```sh +```console rclone config ``` @@ -101,20 +101,20 @@ Once configured you can then use `rclone` like this, List directories in top level in `remote1:dir1`, `remote2:dir2` and `remote3:dir3` -```sh +```console rclone lsd remote: ``` List all the files in `remote1:dir1`, `remote2:dir2` and `remote3:dir3` -```sh +```console rclone ls remote: ``` Copy another local directory to the union directory called source, which will be placed into `remote3:dir3` -```sh +```console rclone copy C:\source remote:source ``` diff --git a/docs/content/uptobox.md b/docs/content/uptobox.md index 2e4ca3617..58f10756b 100644 --- a/docs/content/uptobox.md +++ b/docs/content/uptobox.md @@ -22,7 +22,7 @@ it in your [account settings](https://uptobox.com/my_account). Here is an example of how to make a remote called `remote` with the default setup. First run: -```sh +```console rclone config ``` @@ -76,19 +76,19 @@ Once configured you can then use `rclone` like this (replace `remote` with the n List directories in top level of your Uptobox -```sh +```console rclone lsd remote: ``` List all the files in your Uptobox -```sh +```console rclone ls remote: ``` To copy a local directory to an Uptobox directory called backup -```sh +```console rclone copy /home/source remote:backup ``` diff --git a/docs/content/webdav.md b/docs/content/webdav.md index 127f3f79b..45490ea23 100644 --- a/docs/content/webdav.md +++ b/docs/content/webdav.md @@ -18,7 +18,7 @@ connecting to then rclone can enable extra features. Here is an example of how to make a remote called `remote`. First run: -```sh +```console rclone config ``` @@ -93,19 +93,19 @@ Once configured you can then use `rclone` like this (replace `remote` with the n List directories in top level of your WebDAV -```sh +```console rclone lsd remote: ``` List all the files in your WebDAV -```sh +```console rclone ls remote: ``` To copy a local directory to an WebDAV directory called backup -```sh +```console rclone copy /home/source remote:backup ``` diff --git a/docs/content/yandex.md b/docs/content/yandex.md index 9eb35f653..70174cf8b 100644 --- a/docs/content/yandex.md +++ b/docs/content/yandex.md @@ -12,7 +12,7 @@ versionIntroduced: "v1.26" Here is an example of making a yandex configuration. First run -```sh +```console rclone config ``` @@ -73,26 +73,26 @@ Once configured you can then use `rclone` like this (replace `remote` with the n See top level directories -```sh +```console rclone lsd remote: ``` Make a new directory -```sh +```console rclone mkdir remote:directory ``` List the contents of a directory -```sh +```console rclone ls remote:directory ``` Sync `/home/local/directory` to the remote path, deleting any excess files in the path. -```sh +```console rclone sync --interactive /home/local/directory remote:directory ``` diff --git a/docs/content/zoho.md b/docs/content/zoho.md index df4efee97..a5efd3c50 100644 --- a/docs/content/zoho.md +++ b/docs/content/zoho.md @@ -13,7 +13,7 @@ created by [Zoho](https://zoho.com). Here is an example of making a zoho configuration. First run -```sh +```console rclone config ``` @@ -93,26 +93,26 @@ Once configured you can then use `rclone` like this (replace `remote` with the n See top level directories -```sh +```console rclone lsd remote: ``` Make a new directory -```sh +```console rclone mkdir remote:directory ``` List the contents of a directory -```sh +```console rclone ls remote:directory ``` Sync `/home/local/directory` to the remote path, deleting any excess files in the path. -```sh +```console rclone sync --interactive /home/local/directory remote:directory ``` diff --git a/lib/http/auth.go b/lib/http/auth.go index babd91163..d883cba64 100644 --- a/lib/http/auth.go +++ b/lib/http/auth.go @@ -34,7 +34,7 @@ authentication. Bcrypt is recommended. To create an htpasswd file: -` + "```sh" + ` +` + "```console" + ` touch htpasswd htpasswd -B htpasswd user htpasswd -B htpasswd anotherUser diff --git a/lib/http/server.go b/lib/http/server.go index 83d0162dd..835030cf7 100644 --- a/lib/http/server.go +++ b/lib/http/server.go @@ -90,7 +90,7 @@ It can be configured with .socket and .service unit files as described in Socket activation can be tested ad-hoc with the ` + "`systemd-socket-activate`" + `command -` + "```sh" + ` +` + "```console" + ` systemd-socket-activate -l 8000 -- rclone serve ` + "```" + ` diff --git a/lib/transform/gen_help.go b/lib/transform/gen_help.go index ec93ea2a0..9abd65a50 100644 --- a/lib/transform/gen_help.go +++ b/lib/transform/gen_help.go @@ -97,7 +97,7 @@ func (e example) output() string { func sprintExamples() string { s := "Examples:\n" for _, e := range examples { - s += fmt.Sprintf("\n```sh\n%s\n", e.command()) + s += fmt.Sprintf("\n```console\n%s\n", e.command()) s += fmt.Sprintf("// Output: %s\n```\n", e.output()) } return s diff --git a/lib/transform/transform.md b/lib/transform/transform.md index d1c674b30..42a31ce09 100644 --- a/lib/transform/transform.md +++ b/lib/transform/transform.md @@ -151,82 +151,82 @@ SquareBracket Examples: -```sh -rclone convmv "stories/The Quick Brown Fox!.txt" --name-transform "all,uppercase" -// Output: STORIES/THE QUICK BROWN FOX!.TXT +```console +$ rclone convmv "stories/The Quick Brown Fox!.txt" --name-transform "all,uppercase" +STORIES/THE QUICK BROWN FOX!.TXT ``` -```sh -rclone convmv "stories/The Quick Brown Fox!.txt" --name-transform "all,replace=Fox:Turtle" --name-transform "all,replace=Quick:Slow" -// Output: stories/The Slow Brown Turtle!.txt +```console +$ rclone convmv "stories/The Quick Brown Fox!.txt" --name-transform "all,replace=Fox:Turtle" --name-transform "all,replace=Quick:Slow" +stories/The Slow Brown Turtle!.txt ``` -```sh -rclone convmv "stories/The Quick Brown Fox!.txt" --name-transform "all,base64encode" -// Output: c3Rvcmllcw==/VGhlIFF1aWNrIEJyb3duIEZveCEudHh0 +```console +$ rclone convmv "stories/The Quick Brown Fox!.txt" --name-transform "all,base64encode" +c3Rvcmllcw==/VGhlIFF1aWNrIEJyb3duIEZveCEudHh0 ``` -```sh -rclone convmv "c3Rvcmllcw==/VGhlIFF1aWNrIEJyb3duIEZveCEudHh0" --name-transform "all,base64decode" -// Output: stories/The Quick Brown Fox!.txt +```console +$ rclone convmv "c3Rvcmllcw==/VGhlIFF1aWNrIEJyb3duIEZveCEudHh0" --name-transform "all,base64decode" +stories/The Quick Brown Fox!.txt ``` -```sh -rclone convmv "stories/The Quick Brown 🦊 Fox Went to the Café!.txt" --name-transform "all,nfc" -// Output: stories/The Quick Brown 🦊 Fox Went to the Café!.txt +```console +$ rclone convmv "stories/The Quick Brown 🦊 Fox Went to the Café!.txt" --name-transform "all,nfc" +stories/The Quick Brown 🦊 Fox Went to the Café!.txt ``` -```sh -rclone convmv "stories/The Quick Brown 🦊 Fox Went to the Café!.txt" --name-transform "all,nfd" -// Output: stories/The Quick Brown 🦊 Fox Went to the Café!.txt +```console +$ rclone convmv "stories/The Quick Brown 🦊 Fox Went to the Café!.txt" --name-transform "all,nfd" +stories/The Quick Brown 🦊 Fox Went to the Café!.txt ``` -```sh -rclone convmv "stories/The Quick Brown 🦊 Fox!.txt" --name-transform "all,ascii" -// Output: stories/The Quick Brown Fox!.txt +```console +$ rclone convmv "stories/The Quick Brown 🦊 Fox!.txt" --name-transform "all,ascii" +stories/The Quick Brown Fox!.txt ``` -```sh -rclone convmv "stories/The Quick Brown Fox!.txt" --name-transform "all,trimsuffix=.txt" -// Output: stories/The Quick Brown Fox! +```console +$ rclone convmv "stories/The Quick Brown Fox!.txt" --name-transform "all,trimsuffix=.txt" +stories/The Quick Brown Fox! ``` -```sh -rclone convmv "stories/The Quick Brown Fox!.txt" --name-transform "all,prefix=OLD_" -// Output: OLD_stories/OLD_The Quick Brown Fox!.txt +```console +$ rclone convmv "stories/The Quick Brown Fox!.txt" --name-transform "all,prefix=OLD_" +OLD_stories/OLD_The Quick Brown Fox!.txt ``` -```sh -rclone convmv "stories/The Quick Brown 🦊 Fox Went to the Café!.txt" --name-transform "all,charmap=ISO-8859-7" -// Output: stories/The Quick Brown _ Fox Went to the Caf_!.txt +```console +$ rclone convmv "stories/The Quick Brown 🦊 Fox Went to the Café!.txt" --name-transform "all,charmap=ISO-8859-7" +stories/The Quick Brown _ Fox Went to the Caf_!.txt ``` -```sh -rclone convmv "stories/The Quick Brown Fox: A Memoir [draft].txt" --name-transform "all,encoder=Colon,SquareBracket" -// Output: stories/The Quick Brown Fox: A Memoir [draft].txt +```console +$ rclone convmv "stories/The Quick Brown Fox: A Memoir [draft].txt" --name-transform "all,encoder=Colon,SquareBracket" +stories/The Quick Brown Fox: A Memoir [draft].txt ``` -```sh -rclone convmv "stories/The Quick Brown 🦊 Fox Went to the Café!.txt" --name-transform "all,truncate=21" -// Output: stories/The Quick Brown 🦊 Fox +```console +$ rclone convmv "stories/The Quick Brown 🦊 Fox Went to the Café!.txt" --name-transform "all,truncate=21" +stories/The Quick Brown 🦊 Fox ``` -```sh -rclone convmv "stories/The Quick Brown Fox!.txt" --name-transform "all,command=echo" -// Output: stories/The Quick Brown Fox!.txt +```console +$ rclone convmv "stories/The Quick Brown Fox!.txt" --name-transform "all,command=echo" +stories/The Quick Brown Fox!.txt ``` -```sh -rclone convmv "stories/The Quick Brown Fox!" --name-transform "date=-{YYYYMMDD}" -// Output: stories/The Quick Brown Fox!-20250830 +```console +$ rclone convmv "stories/The Quick Brown Fox!" --name-transform "date=-{YYYYMMDD}" +stories/The Quick Brown Fox!-20250830 ``` -```sh -rclone convmv "stories/The Quick Brown Fox!" --name-transform "date=-{macfriendlytime}" -// Output: stories/The Quick Brown Fox!-2025-08-30 1234AM +```console +$ rclone convmv "stories/The Quick Brown Fox!" --name-transform "date=-{macfriendlytime}" +stories/The Quick Brown Fox!-2025-08-30 1234AM ``` -```sh -rclone convmv "stories/The Quick Brown Fox!.txt" --name-transform "all,regex=[\\.\\w]/ab" -// Output: ababababababab/ababab ababababab ababababab ababab!abababab +```console +$ rclone convmv "stories/The Quick Brown Fox!.txt" --name-transform "all,regex=[\\.\\w]/ab" +ababababababab/ababab ababababab ababababab ababab!abababab ``` diff --git a/vfs/vfs.md b/vfs/vfs.md index 612facf36..ba2b891c7 100644 --- a/vfs/vfs.md +++ b/vfs/vfs.md @@ -34,20 +34,20 @@ You can send a `SIGHUP` signal to rclone for it to flush all directory caches, regardless of how old they are. Assuming only one rclone instance is running, you can reset the cache like this: -```sh +```console kill -SIGHUP $(pidof rclone) ``` If you configure rclone with a [remote control](/rc) then you can use rclone rc to flush the whole directory cache: -```sh +```console rclone rc vfs/forget ``` Or individual files or directories: -```sh +```console rclone rc vfs/forget file=path/to/file dir=path/to/dir ``` @@ -462,7 +462,7 @@ Note that some backends won't create metadata unless you pass in the For example, using `rclone mount` with `--metadata --vfs-metadata-extension .metadata` we get -```sh +```console $ ls -l /mnt/ total 1048577 -rw-rw-r-- 1 user user 1073741824 Mar 3 16:03 1G