// Copyright (c) Acrosync LLC. All rights reserved. // Free for personal use and commercial trial // Commercial use requires per-user licenses available from https://duplicacy.com package duplicacy import ( "encoding/json" "fmt" "io/ioutil" "net" "os" "path" "regexp" "runtime" "strconv" "strings" "golang.org/x/crypto/ssh" "golang.org/x/crypto/ssh/agent" ) type Storage interface { // ListFiles return the list of files and subdirectories under 'dir'. A subdirectories returned must have a trailing '/', with // a size of 0. If 'dir' is 'snapshots', only subdirectories will be returned. If 'dir' is 'snapshots/repository_id', then only // files will be returned. If 'dir' is 'chunks', the implementation can return the list either recusively or non-recusively. ListFiles(threadIndex int, dir string) (files []string, sizes []int64, err error) // DeleteFile deletes the file or directory at 'filePath'. DeleteFile(threadIndex int, filePath string) (err error) // MoveFile renames the file. MoveFile(threadIndex int, from string, to string) (err error) // CreateDirectory creates a new directory. CreateDirectory(threadIndex int, dir string) (err error) // GetFileInfo returns the information about the file or directory at 'filePath'. GetFileInfo(threadIndex int, filePath string) (exist bool, isDir bool, size int64, err error) // FindChunk finds the chunk with the specified id. If 'isFossil' is true, it will search for chunk files with // the suffix '.fsl'. FindChunk(threadIndex int, chunkID string, isFossil bool) (filePath string, exist bool, size int64, err error) // DownloadFile reads the file at 'filePath' into the chunk. DownloadFile(threadIndex int, filePath string, chunk *Chunk) (err error) // UploadFile writes 'content' to the file at 'filePath'. UploadFile(threadIndex int, filePath string, content []byte) (err error) // SetNestingLevels sets up the chunk nesting structure. SetNestingLevels(config *Config) // If a local snapshot cache is needed for the storage to avoid downloading/uploading chunks too often when // managing snapshots. IsCacheNeeded() bool // If the 'MoveFile' method is implemented. IsMoveFileImplemented() bool // If the storage can guarantee strong consistency. IsStrongConsistent() bool // If the storage supports fast listing of files names. IsFastListing() bool // Enable the test mode. EnableTestMode() // Set the maximum transfer speeds. SetRateLimits(downloadRateLimit int, uploadRateLimit int) } // StorageBase is the base struct from which all storages are derived from type StorageBase struct { DownloadRateLimit int // Maximum download rate (bytes/seconds) UploadRateLimit int // Maximum upload reate (bytes/seconds) DerivedStorage Storage // Used as the pointer to the derived storage class readLevels []int // At which nesting level to find the chunk with the given id writeLevel int // Store the uploaded chunk to this level } // SetRateLimits sets the maximum download and upload rates func (storage *StorageBase) SetRateLimits(downloadRateLimit int, uploadRateLimit int) { storage.DownloadRateLimit = downloadRateLimit storage.UploadRateLimit = uploadRateLimit } // SetDefaultNestingLevels sets the default read and write levels. This is usually called by // derived storages to set the levels with old values so that storages initialized by earlier versions // will continue to work. func (storage *StorageBase) SetDefaultNestingLevels(readLevels []int, writeLevel int) { storage.readLevels = readLevels storage.writeLevel = writeLevel } // SetNestingLevels sets the new read and write levels (normally both at 1) if the 'config' file has // the 'fixed-nesting' key, or if a file named 'nesting' exists on the storage. func (storage *StorageBase) SetNestingLevels(config *Config) { // 'FixedNesting' is true only for the 'config' file with the new format (2.0.10+) if config.FixedNesting { storage.readLevels = nil // Check if the 'nesting' file exist exist, _, _, err := storage.DerivedStorage.GetFileInfo(0, "nesting") if err == nil && exist { nestingFile := CreateChunk(CreateConfig(), true) if storage.DerivedStorage.DownloadFile(0, "nesting", nestingFile) == nil { var nesting struct { ReadLevels []int `json:"read-levels"` WriteLevel int `json:"write-level"` } if json.Unmarshal(nestingFile.GetBytes(), &nesting) == nil { storage.readLevels = nesting.ReadLevels storage.writeLevel = nesting.WriteLevel } } } if len(storage.readLevels) == 0 { storage.readLevels = []int{1} storage.writeLevel = 1 } } LOG_DEBUG("STORAGE_NESTING", "Chunk read levels: %v, write level: %d", storage.readLevels, storage.writeLevel) for _, level := range storage.readLevels { if storage.writeLevel == level { return } } LOG_ERROR("STORAGE_NESTING", "The write level %d isn't in the read levels %v", storage.readLevels, storage.writeLevel) } // FindChunk finds the chunk with the specified id at the levels one by one as specified by 'readLevels'. func (storage *StorageBase) FindChunk(threadIndex int, chunkID string, isFossil bool) (filePath string, exist bool, size int64, err error) { chunkPaths := make([]string, 0) for _, level := range storage.readLevels { chunkPath := "chunks/" for i := 0; i < level; i++ { chunkPath += chunkID[2*i:2*i+2] + "/" } chunkPath += chunkID[2*level:] if isFossil { chunkPath += ".fsl" } exist, _, size, err = storage.DerivedStorage.GetFileInfo(threadIndex, chunkPath) if err == nil && exist { return chunkPath, exist, size, err } chunkPaths = append(chunkPaths, chunkPath) } for i, level := range storage.readLevels { if storage.writeLevel == level { return chunkPaths[i], false, 0, nil } } return "", false, 0, fmt.Errorf("Invalid chunk nesting setup") } func checkHostKey(hostname string, remote net.Addr, key ssh.PublicKey) error { if preferencePath == "" { return fmt.Errorf("Can't verify SSH host since the preference path is not set") } hostFile := path.Join(preferencePath, "known_hosts") file, err := os.OpenFile(hostFile, os.O_RDWR|os.O_CREATE, 0600) if err != nil { return err } defer file.Close() content, err := ioutil.ReadAll(file) if err != nil { return err } lineRegex := regexp.MustCompile(`^([^\s]+)\s+(.+)`) keyString := string(ssh.MarshalAuthorizedKey(key)) keyString = strings.Replace(keyString, "\n", "", -1) remoteAddress := remote.String() if strings.HasSuffix(remoteAddress, ":22") { remoteAddress = remoteAddress[:len(remoteAddress)-len(":22")] } for i, line := range strings.Split(string(content), "\n") { matched := lineRegex.FindStringSubmatch(line) if matched == nil { continue } if matched[1] == remote.String() { if keyString != matched[2] { LOG_WARN("HOSTKEY_OLD", "The existing key for '%s' is %s (file %s, line %d)", remote.String(), matched[2], hostFile, i) LOG_WARN("HOSTKEY_NEW", "The new key is '%s'", keyString) return fmt.Errorf("The host key for '%s' has changed", remote.String()) } else { return nil } } } file.Write([]byte(remote.String() + " " + keyString + "\n")) return nil } // CreateStorage creates a storage object based on the provide storage URL. func CreateStorage(preference Preference, resetPassword bool, threads int) (storage Storage) { storageURL := preference.StorageURL isFileStorage := false isCacheNeeded := false if strings.HasPrefix(storageURL, "/") { isFileStorage = true } else if runtime.GOOS == "windows" { if len(storageURL) >= 3 && storageURL[1] == ':' && (storageURL[2] == '/' || storageURL[2] == '\\') { volume := strings.ToLower(storageURL[:1]) if volume[0] >= 'a' && volume[0] <= 'z' { isFileStorage = true } } if !isFileStorage && strings.HasPrefix(storageURL, `\\`) { isFileStorage = true isCacheNeeded = true } } if isFileStorage { fileStorage, err := CreateFileStorage(storageURL, isCacheNeeded, threads) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the file storage at %s: %v", storageURL, err) return nil } return fileStorage } if strings.HasPrefix(storageURL, "flat://") { fileStorage, err := CreateFileStorage(storageURL[7:], false, threads) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the file storage at %s: %v", storageURL, err) return nil } return fileStorage } if strings.HasPrefix(storageURL, "samba://") { fileStorage, err := CreateFileStorage(storageURL[8:], true, threads) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the file storage at %s: %v", storageURL, err) return nil } return fileStorage } // Added \! to matched[2] because OneDrive drive ids contain ! (e.g. "b!xxx") urlRegex := regexp.MustCompile(`^([\w-]+)://([\w\-@\.\!]+@)?([^/]+)(/(.+))?`) matched := urlRegex.FindStringSubmatch(storageURL) if matched == nil { LOG_ERROR("STORAGE_CREATE", "Unrecognizable storage URL: %s", storageURL) return nil } else if matched[1] == "sftp" || matched[1] == "sftpc" { server := matched[3] username := matched[2] storageDir := matched[5] port := 22 if strings.Contains(server, ":") { index := strings.Index(server, ":") port, _ = strconv.Atoi(server[index+1:]) server = server[:index] } if storageDir == "" { LOG_ERROR("STORAGE_CREATE", "The SFTP storage directory can't be empty") return nil } if username != "" { username = username[:len(username)-1] } // If ssh_key_file is set, skip password-based login keyFile := GetPasswordFromPreference(preference, "ssh_key_file") passphrase := "" password := "" passwordCallback := func() (string, error) { LOG_DEBUG("SSH_PASSWORD", "Attempting password login") password = GetPassword(preference, "ssh_password", "Enter SSH password:", false, resetPassword) return password, nil } keyboardInteractive := func(user, instruction string, questions []string, echos []bool) (answers []string, err error) { if len(questions) == 1 { LOG_DEBUG("SSH_INTERACTIVE", "Attempting keyboard interactive login") password = GetPassword(preference, "ssh_password", "Enter SSH password:", false, resetPassword) answers = []string{password} return answers, nil } else { return nil, nil } } publicKeysCallback := func() ([]ssh.Signer, error) { LOG_DEBUG("SSH_PUBLICKEY", "Attempting public key authentication") signers := []ssh.Signer{} agentSock := os.Getenv("SSH_AUTH_SOCK") if agentSock != "" { connection, err := net.Dial("unix", agentSock) // TODO: looks like we need to close the connection if err == nil { LOG_DEBUG("SSH_AGENT", "Attempting public key authentication via agent") sshAgent := agent.NewClient(connection) signers, err = sshAgent.Signers() if err != nil { LOG_DEBUG("SSH_AGENT", "Can't log in using public key authentication via agent: %v", err) } else if len(signers) == 0 { LOG_DEBUG("SSH_AGENT", "SSH agent doesn't return any signer") } } } keyFile = GetPassword(preference, "ssh_key_file", "Enter the path of the private key file:", true, resetPassword) var keySigner ssh.Signer var err error if keyFile == "" { LOG_INFO("SSH_PUBLICKEY", "No private key file is provided") } else { var content []byte content, err = ioutil.ReadFile(keyFile) if err != nil { LOG_INFO("SSH_PUBLICKEY", "Failed to read the private key file: %v", err) } else { keySigner, err = ssh.ParsePrivateKey(content) if err != nil { if _, ok := err.(*ssh.PassphraseMissingError); ok { LOG_TRACE("SSH_PUBLICKEY", "The private key file is encrypted") passphrase = GetPassword(preference, "ssh_passphrase", "Enter the passphrase to decrypt the private key file:", false, resetPassword) if len(passphrase) == 0 { LOG_INFO("SSH_PUBLICKEY", "No passphrase to descrypt the private key file %s", keyFile) } else { keySigner, err = ssh.ParsePrivateKeyWithPassphrase(content, []byte(passphrase)) if err != nil { LOG_INFO("SSH_PUBLICKEY", "Failed to parse the encrypted private key file %s: %v", keyFile, err) } } } else { LOG_INFO("SSH_PUBLICKEY", "Failed to parse the private key file %s: %v", keyFile, err) } } if keySigner != nil { certFile := keyFile + "-cert.pub" if stat, err := os.Stat(certFile); err == nil && !stat.IsDir() { LOG_DEBUG("SSH_CERTIFICATE", "Attempting to use ssh certificate from file %s", certFile) var content []byte content, err = ioutil.ReadFile(certFile) if err != nil { LOG_INFO("SSH_CERTIFICATE", "Failed to read ssh certificate file %s: %v", certFile, err) } else { pubKey, _, _, _, err := ssh.ParseAuthorizedKey(content) if err != nil { LOG_INFO("SSH_CERTIFICATE", "Failed parse ssh certificate file %s: %v", certFile, err) } else { certSigner, err := ssh.NewCertSigner(pubKey.(*ssh.Certificate), keySigner) if err != nil { LOG_INFO("SSH_CERTIFICATE", "Failed to create certificate signer: %v", err) } else { keySigner = certSigner } } } } } } } if keySigner != nil { signers = append(signers, keySigner) } if len(signers) > 0 { return signers, nil } else { return nil, err } } authMethods := []ssh.AuthMethod{} passwordAuthMethods := []ssh.AuthMethod{ ssh.PasswordCallback(passwordCallback), ssh.KeyboardInteractive(keyboardInteractive), } keyFileAuthMethods := []ssh.AuthMethod{ ssh.PublicKeysCallback(publicKeysCallback), } if keyFile != "" { authMethods = append(keyFileAuthMethods, passwordAuthMethods...) } else { authMethods = append(passwordAuthMethods, keyFileAuthMethods...) } if RunInBackground { passwordKey := "ssh_password" keyFileKey := "ssh_key_file" if preference.Name != "default" { passwordKey = preference.Name + "_" + passwordKey keyFileKey = preference.Name + "_" + keyFileKey } authMethods = []ssh.AuthMethod{} if keyringGet(passwordKey) != "" { authMethods = append(authMethods, ssh.PasswordCallback(passwordCallback)) authMethods = append(authMethods, ssh.KeyboardInteractive(keyboardInteractive)) } if keyringGet(keyFileKey) != "" || os.Getenv("SSH_AUTH_SOCK") != "" { authMethods = append(authMethods, ssh.PublicKeysCallback(publicKeysCallback)) } } hostKeyChecker := func(hostname string, remote net.Addr, key ssh.PublicKey) error { return checkHostKey(hostname, remote, key) } sftpStorage, err := CreateSFTPStorage(matched[1] == "sftpc", server, port, username, storageDir, 2, authMethods, hostKeyChecker, threads) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the SFTP storage at %s: %v", storageURL, err) return nil } if keyFile != "" { SavePassword(preference, "ssh_key_file", keyFile) if passphrase != "" { SavePassword(preference, "ssh_passphrase", passphrase) } } else if password != "" { SavePassword(preference, "ssh_password", password) } return sftpStorage } else if matched[1] == "s3" || matched[1] == "s3c" || matched[1] == "minio" || matched[1] == "minios" || matched[1] == "s3-token" { // urlRegex := regexp.MustCompile(`^(\w+)://([\w\-]+@)?([^/]+)(/(.+))?`) region := matched[2] endpoint := matched[3] bucket := matched[5] if region != "" { region = region[:len(region)-1] } if strings.EqualFold(endpoint, "amazon") || strings.EqualFold(endpoint, "amazon.com") { endpoint = "" } storageDir := "" if strings.Contains(bucket, "/") { firstSlash := strings.Index(bucket, "/") storageDir = bucket[firstSlash+1:] bucket = bucket[:firstSlash] } accessKey := GetPassword(preference, "s3_id", "Enter S3 Access Key ID:", true, resetPassword) secretKey := GetPassword(preference, "s3_secret", "Enter S3 Secret Access Key:", true, resetPassword) var err error if matched[1] == "s3c" { storage, err = CreateS3CStorage(region, endpoint, bucket, storageDir, accessKey, secretKey, threads) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the S3C storage at %s: %v", storageURL, err) return nil } } else { isMinioCompatible := (matched[1] == "minio" || matched[1] == "minios") isSSLSupported := (matched[1] == "s3" || matched[1] == "minios") if matched[1] == "s3-token" { token := GetPassword(preference, "s3_token", "Enter S3 Token (Optional):", true, resetPassword) storage, err = CreateS3StorageWithToken(region, endpoint, bucket, storageDir, accessKey, secretKey, token, threads, isSSLSupported, isMinioCompatible) } else { storage, err = CreateS3Storage(region, endpoint, bucket, storageDir, accessKey, secretKey, threads, isSSLSupported, isMinioCompatible) } if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the S3 storage at %s: %v", storageURL, err) return nil } } SavePassword(preference, "s3_id", accessKey) SavePassword(preference, "s3_secret", secretKey) return storage } else if matched[1] == "wasabi" { region := matched[2] endpoint := matched[3] bucket := matched[5] if region != "" { region = region[:len(region)-1] } key := GetPassword(preference, "wasabi_key", "Enter Wasabi key:", true, resetPassword) secret := GetPassword(preference, "wasabi_secret", "Enter Wasabi secret:", true, resetPassword) storageDir := "" if strings.Contains(bucket, "/") { firstSlash := strings.Index(bucket, "/") storageDir = bucket[firstSlash+1:] bucket = bucket[:firstSlash] } storage, err := CreateWasabiStorage(region, endpoint, bucket, storageDir, key, secret, threads) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the Wasabi storage at %s: %v", storageURL, err) return nil } SavePassword(preference, "wasabi_key", key) SavePassword(preference, "wasabi_secret", secret) return storage } else if matched[1] == "dropbox" { storageDir := matched[3] + matched[5] token := GetPassword(preference, "dropbox_token", "Enter Dropbox refresh token:", true, resetPassword) dropboxStorage, err := CreateDropboxStorage(token, storageDir, 1, threads) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the dropbox storage: %v", err) return nil } SavePassword(preference, "dropbox_token", token) return dropboxStorage } else if matched[1] == "b2" { bucket := matched[3] storageDir := matched[5] accountID := GetPassword(preference, "b2_id", "Enter Backblaze account or application id:", true, resetPassword) applicationKey := GetPassword(preference, "b2_key", "Enter corresponding Backblaze application key:", true, resetPassword) b2Storage, err := CreateB2Storage(accountID, applicationKey, "", bucket, storageDir, threads) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the Backblaze B2 storage at %s: %v", storageURL, err) return nil } SavePassword(preference, "b2_id", accountID) SavePassword(preference, "b2_key", applicationKey) return b2Storage } else if matched[1] == "b2-custom" { b2customUrlRegex := regexp.MustCompile(`^b2-custom://([^/]+)/([^/]+)(/(.+))?`) matched := b2customUrlRegex.FindStringSubmatch(storageURL) downloadURL := "https://" + matched[1] bucket := matched[2] storageDir := matched[4] accountID := GetPassword(preference, "b2_id", "Enter Backblaze account or application id:", true, resetPassword) applicationKey := GetPassword(preference, "b2_key", "Enter corresponding Backblaze application key:", true, resetPassword) b2Storage, err := CreateB2Storage(accountID, applicationKey, downloadURL, bucket, storageDir, threads) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the Backblaze B2 storage at %s: %v", storageURL, err) return nil } SavePassword(preference, "b2_id", accountID) SavePassword(preference, "b2_key", applicationKey) return b2Storage } else if matched[1] == "azure" { account := matched[3] container := matched[5] if container == "" { LOG_ERROR("STORAGE_CREATE", "The container name for the Azure storage can't be empty") return nil } prompt := fmt.Sprintf("Enter the Access Key for the Azure storage account %s:", account) accessKey := GetPassword(preference, "azure_key", prompt, true, resetPassword) azureStorage, err := CreateAzureStorage(account, accessKey, container, threads) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the Azure storage at %s: %v", storageURL, err) return nil } SavePassword(preference, "azure_key", accessKey) return azureStorage } else if matched[1] == "acd" { storagePath := matched[3] + matched[4] prompt := fmt.Sprintf("Enter the path of the Amazon Cloud Drive token file (downloadable from https://duplicacy.com/acd_start):") tokenFile := GetPassword(preference, "acd_token", prompt, true, resetPassword) acdStorage, err := CreateACDStorage(tokenFile, storagePath, threads) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the Amazon Cloud Drive storage at %s: %v", storageURL, err) return nil } SavePassword(preference, "acd_token", tokenFile) return acdStorage } else if matched[1] == "gcs" { bucket := matched[3] storageDir := matched[5] prompt := fmt.Sprintf("Enter the path of the Google Cloud Storage token file (downloadable from https://duplicacy.com/gcs_start) or the service account credential file:") tokenFile := GetPassword(preference, "gcs_token", prompt, true, resetPassword) gcsStorage, err := CreateGCSStorage(tokenFile, bucket, storageDir, threads) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the Google Cloud Storage backend at %s: %v", storageURL, err) return nil } SavePassword(preference, "gcs_token", tokenFile) return gcsStorage } else if matched[1] == "gcd" { // Handle writing directly to the root of the drive // For gcd://driveid@/, driveid@ is match[3] not match[2] if matched[2] == "" && strings.HasSuffix(matched[3], "@") { matched[2], matched[3] = matched[3], matched[2] } driveID := matched[2] if driveID != "" { driveID = driveID[:len(driveID)-1] } storagePath := matched[3] + matched[4] prompt := fmt.Sprintf("Enter the path of the Google Drive token file (downloadable from https://duplicacy.com/gcd_start):") tokenFile := GetPassword(preference, "gcd_token", prompt, true, resetPassword) gcdStorage, err := CreateGCDStorage(tokenFile, driveID, storagePath, threads) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the Google Drive storage at %s: %v", storageURL, err) return nil } SavePassword(preference, "gcd_token", tokenFile) return gcdStorage } else if matched[1] == "one" || matched[1] == "odb" { // Handle writing directly to the root of the drive // For odb://drive_id@/, drive_id@ is match[3] not match[2] if matched[2] == "" && strings.HasSuffix(matched[3], "@") { matched[2], matched[3] = matched[3], matched[2] } drive_id := matched[2] if len(drive_id) > 0 { drive_id = drive_id[:len(drive_id)-1] } storagePath := matched[3] + matched[4] prompt := fmt.Sprintf("Enter the path of the OneDrive token file (downloadable from https://duplicacy.com/one_start):") tokenFile := GetPassword(preference, matched[1] + "_token", prompt, true, resetPassword) // client_id, just like tokenFile, can be stored in preferences //prompt = fmt.Sprintf("Enter client_id for custom Azure app (if empty will use duplicacy.com one):") client_id := GetPasswordFromPreference(preference, matched[1] + "_client_id") client_secret := "" if client_id != "" { // client_secret should go into keyring prompt = fmt.Sprintf("Enter client_secret for custom Azure app (if empty will use duplicacy.com one):") client_secret = GetPassword(preference, matched[1] + "_client_secret", prompt, true, resetPassword) } oneDriveStorage, err := CreateOneDriveStorage(tokenFile, matched[1] == "odb", storagePath, threads, client_id, client_secret, drive_id) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the OneDrive storage at %s: %v", storageURL, err) return nil } SavePassword(preference, matched[1] + "_token", tokenFile) if client_id != "" { SavePassword(preference, matched[1] + "_client_secret", client_secret) } return oneDriveStorage } else if matched[1] == "hubic" { storagePath := matched[3] + matched[4] prompt := fmt.Sprintf("Enter the path of the Hubic token file (downloadable from https://duplicacy.com/hubic_start):") tokenFile := GetPassword(preference, "hubic_token", prompt, true, resetPassword) hubicStorage, err := CreateHubicStorage(tokenFile, storagePath, threads) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the Hubic storage at %s: %v", storageURL, err) return nil } SavePassword(preference, "hubic_token", tokenFile) return hubicStorage } else if matched[1] == "swift" { prompt := fmt.Sprintf("Enter the OpenStack Swift key:") key := GetPassword(preference, "swift_key", prompt, true, resetPassword) swiftStorage, err := CreateSwiftStorage(storageURL[8:], key, threads) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the OpenStack Swift storage at %s: %v", storageURL, err) return nil } SavePassword(preference, "swift_key", key) return swiftStorage } else if matched[1] == "webdav" || matched[1] == "webdav-http" { server := matched[3] username := matched[2] if username == "" { LOG_ERROR("STORAGE_CREATE", "No username is provided to access the WebDAV storage") return nil } username = username[:len(username)-1] storageDir := matched[5] port := 0 useHTTP := matched[1] == "webdav-http" if strings.Contains(server, ":") { index := strings.Index(server, ":") port, _ = strconv.Atoi(server[index+1:]) server = server[:index] } prompt := fmt.Sprintf("Enter the WebDAV password:") password := GetPassword(preference, "webdav_password", prompt, true, resetPassword) webDAVStorage, err := CreateWebDAVStorage(server, port, username, password, storageDir, useHTTP, threads) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the WebDAV storage at %s: %v", storageURL, err) return nil } SavePassword(preference, "webdav_password", password) return webDAVStorage } else if matched[1] == "fabric" { endpoint := matched[3] storageDir := matched[5] prompt := fmt.Sprintf("Enter the token for accessing the Storage Made Easy File Fabric storage:") token := GetPassword(preference, "fabric_token", prompt, true, resetPassword) smeStorage, err := CreateFileFabricStorage(endpoint, token, storageDir, threads) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the File Fabric storage at %s: %v", storageURL, err) return nil } SavePassword(preference, "fabric_token", token) return smeStorage } else if matched[1] == "storj" { satellite := matched[2] + matched[3] bucket := matched[5] storageDir := "" index := strings.Index(bucket, "/") if index >= 0 { storageDir = bucket[index + 1:] bucket = bucket[:index] } apiKey := GetPassword(preference, "storj_key", "Enter the API access key:", true, resetPassword) passphrase := GetPassword(preference, "storj_passphrase", "Enter the passphrase:", true, resetPassword) storjStorage, err := CreateStorjStorage(satellite, apiKey, passphrase, bucket, storageDir, threads) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the Storj storage at %s: %v", storageURL, err) return nil } SavePassword(preference, "storj_key", apiKey) SavePassword(preference, "storj_passphrase", passphrase) return storjStorage } else if matched[1] == "smb" { server := matched[3] username := matched[2] if username == "" { LOG_ERROR("STORAGE_CREATE", "No username is provided to access the SAMBA storage") return nil } username = username[:len(username)-1] storageDir := matched[5] port := 445 if strings.Contains(server, ":") { index := strings.Index(server, ":") port, _ = strconv.Atoi(server[index+1:]) server = server[:index] } if !strings.Contains(storageDir, "/") { LOG_ERROR("STORAGE_CREATE", "No share name specified for the SAMBA storage") return nil } index := strings.Index(storageDir, "/") shareName := storageDir[:index] storageDir = storageDir[index+1:] prompt := fmt.Sprintf("Enter the SAMBA password:") password := GetPassword(preference, "smb_password", prompt, true, resetPassword) sambaStorage, err := CreateSambaStorage(server, port, username, password, shareName, storageDir, threads) if err != nil { LOG_ERROR("STORAGE_CREATE", "Failed to load the SAMBA storage at %s: %v", storageURL, err) return nil } SavePassword(preference, "smb_password", password) return sambaStorage } else { LOG_ERROR("STORAGE_CREATE", "The storage type '%s' is not supported", matched[1]) return nil } }