diff --git a/backend/b2/api/types.go b/backend/b2/api/types.go index b221d4c2d..ab4b221ec 100644 --- a/backend/b2/api/types.go +++ b/backend/b2/api/types.go @@ -48,6 +48,14 @@ type LifecycleRule struct { FileNamePrefix string `json:"fileNamePrefix"` } +// ServerSideEncryption is a configuration object for B2 Server-Side Encryption +type ServerSideEncryption struct { + Mode string `json:"mode"` + Algorithm string `json:"algorithm"` // Encryption algorith to use + CustomerKey string `json:"customerKey"` // User provided Base64 encoded key that is used by the server to encrypt files + CustomerKeyMd5 string `json:"customerKeyMd5"` // An MD5 hash of the decoded key +} + // Timestamp is a UTC time when this file was uploaded. It is a base // 10 number of milliseconds since midnight, January 1, 1970 UTC. This // fits in a 64 bit integer such as the type "long" in the programming @@ -261,21 +269,22 @@ type GetFileInfoRequest struct { // // Example: { "src_last_modified_millis" : "1452802803026", "large_file_sha1" : "a3195dc1e7b46a2ff5da4b3c179175b75671e80d", "color": "blue" } type StartLargeFileRequest struct { - BucketID string `json:"bucketId"` //The ID of the bucket that the file will go in. - Name string `json:"fileName"` // The name of the file. See Files for requirements on file names. - ContentType string `json:"contentType"` // The MIME type of the content of the file, which will be returned in the Content-Type header when downloading the file. Use the Content-Type b2/x-auto to automatically set the stored Content-Type post upload. In the case where a file extension is absent or the lookup fails, the Content-Type is set to application/octet-stream. - Info map[string]string `json:"fileInfo"` // A JSON object holding the name/value pairs for the custom file info. + BucketID string `json:"bucketId"` // The ID of the bucket that the file will go in. + Name string `json:"fileName"` // The name of the file. See Files for requirements on file names. + ContentType string `json:"contentType"` // The MIME type of the content of the file, which will be returned in the Content-Type header when downloading the file. Use the Content-Type b2/x-auto to automatically set the stored Content-Type post upload. In the case where a file extension is absent or the lookup fails, the Content-Type is set to application/octet-stream. + Info map[string]string `json:"fileInfo"` // A JSON object holding the name/value pairs for the custom file info. + ServerSideEncryption ServerSideEncryption `json:"serverSideEncryption,omitempty"` // A JSON object holding values related to Server-Side Encryption } // StartLargeFileResponse is the response to StartLargeFileRequest type StartLargeFileResponse struct { - ID string `json:"fileId"` // The unique identifier for this version of this file. Used with b2_get_file_info, b2_download_file_by_id, and b2_delete_file_version. - Name string `json:"fileName"` // The name of this file, which can be used with b2_download_file_by_name. - AccountID string `json:"accountId"` // The identifier for the account. - BucketID string `json:"bucketId"` // The unique ID of the bucket. - ContentType string `json:"contentType"` // The MIME type of the file. - Info map[string]string `json:"fileInfo"` // The custom information that was uploaded with the file. This is a JSON object, holding the name/value pairs that were uploaded with the file. - UploadTimestamp Timestamp `json:"uploadTimestamp"` // This is a UTC time when this file was uploaded. + ID string `json:"fileId"` // The unique identifier for this version of this file. Used with b2_get_file_info, b2_download_file_by_id, and b2_delete_file_version. + Name string `json:"fileName"` // The name of this file, which can be used with b2_download_file_by_name. + AccountID string `json:"accountId"` // The identifier for the account. + BucketID string `json:"bucketId"` // The unique ID of the bucket. + ContentType string `json:"contentType"` // The MIME type of the file. + Info map[string]string `json:"fileInfo"` // The custom information that was uploaded with the file. This is a JSON object, holding the name/value pairs that were uploaded with the file. + UploadTimestamp Timestamp `json:"uploadTimestamp,omitempty"` // This is a UTC time when this file was uploaded. } // GetUploadPartURLRequest is passed to b2_get_upload_part_url @@ -325,21 +334,25 @@ type CancelLargeFileResponse struct { // CopyFileRequest is as passed to b2_copy_file type CopyFileRequest struct { - SourceID string `json:"sourceFileId"` // The ID of the source file being copied. - Name string `json:"fileName"` // The name of the new file being created. - Range string `json:"range,omitempty"` // The range of bytes to copy. If not provided, the whole source file will be copied. - MetadataDirective string `json:"metadataDirective,omitempty"` // The strategy for how to populate metadata for the new file: COPY or REPLACE - ContentType string `json:"contentType,omitempty"` // The MIME type of the content of the file (REPLACE only) - Info map[string]string `json:"fileInfo,omitempty"` // This field stores the metadata that will be stored with the file. (REPLACE only) - DestBucketID string `json:"destinationBucketId,omitempty"` // The destination ID of the bucket if set, if not the source bucket will be used + SourceID string `json:"sourceFileId"` // The ID of the source file being copied. + Name string `json:"fileName"` // The name of the new file being created. + Range string `json:"range,omitempty"` // The range of bytes to copy. If not provided, the whole source file will be copied. + MetadataDirective string `json:"metadataDirective,omitempty"` // The strategy for how to populate metadata for the new file: COPY or REPLACE + ContentType string `json:"contentType,omitempty"` // The MIME type of the content of the file (REPLACE only) + Info map[string]string `json:"fileInfo,omitempty"` // This field stores the metadata that will be stored with the file. (REPLACE only) + DestBucketID string `json:"destinationBucketId,omitempty"` // The destination ID of the bucket if set, if not the source bucket will be used + SourceServerSideEncryption ServerSideEncryption `json:"sourceServerSideEncryption,omitempty"` // A JSON object holding values related to Server-Side Encryption for the source file + DestinationServerSideEncryption ServerSideEncryption `json:"destinationServerSideEncryption,omitempty"` // A JSON object holding values related to Server-Side Encryption for the destination file } // CopyPartRequest is the request for b2_copy_part - the response is UploadPartResponse type CopyPartRequest struct { - SourceID string `json:"sourceFileId"` // The ID of the source file being copied. - LargeFileID string `json:"largeFileId"` // The ID of the large file the part will belong to, as returned by b2_start_large_file. - PartNumber int64 `json:"partNumber"` // Which part this is (starting from 1) - Range string `json:"range,omitempty"` // The range of bytes to copy. If not provided, the whole source file will be copied. + SourceID string `json:"sourceFileId"` // The ID of the source file being copied. + LargeFileID string `json:"largeFileId"` // The ID of the large file the part will belong to, as returned by b2_start_large_file. + PartNumber int64 `json:"partNumber"` // Which part this is (starting from 1) + Range string `json:"range,omitempty"` // The range of bytes to copy. If not provided, the whole source file will be copied. + SourceServerSideEncryption ServerSideEncryption `json:"sourceServerSideEncryption,omitempty"` // A JSON object holding values related to Server-Side Encryption for the source file + DestinationServerSideEncryption ServerSideEncryption `json:"destinationServerSideEncryption,omitempty"` // A JSON object holding values related to Server-Side Encryption for the destination file } // UpdateBucketRequest describes a request to modify a B2 bucket diff --git a/backend/b2/b2.go b/backend/b2/b2.go index 70e4c928a..4bf4a2a5a 100644 --- a/backend/b2/b2.go +++ b/backend/b2/b2.go @@ -8,7 +8,9 @@ import ( "bufio" "bytes" "context" + "crypto/md5" "crypto/sha1" + "encoding/base64" "encoding/json" "errors" "fmt" @@ -53,6 +55,9 @@ const ( nameHeader = "X-Bz-File-Name" timestampHeader = "X-Bz-Upload-Timestamp" retryAfterHeader = "Retry-After" + sseAlgorithmHeader = "X-Bz-Server-Side-Encryption-Customer-Algorithm" + sseKeyHeader = "X-Bz-Server-Side-Encryption-Customer-Key" + sseMd5Header = "X-Bz-Server-Side-Encryption-Customer-Key-Md5" minSleep = 10 * time.Millisecond maxSleep = 5 * time.Minute decayConstant = 1 // bigger for slower decay, exponential @@ -252,6 +257,51 @@ See: [rclone backend lifecycle](#lifecycle) for setting lifecycles after bucket Default: (encoder.Display | encoder.EncodeBackSlash | encoder.EncodeInvalidUtf8), + }, { + Name: "sse_customer_algorithm", + Help: "If using SSE-C, the server-side encryption algorithm used when storing this object in B2.", + Advanced: true, + Examples: []fs.OptionExample{{ + Value: "", + Help: "None", + }, { + Value: "AES256", + Help: "Advanced Encryption Standard (256 bits key length)", + }}, + }, { + Name: "sse_customer_key", + Help: `To use SSE-C, you may provide the secret encryption key encoded in a UTF-8 compatible string to encrypt/decrypt your data + +Alternatively you can provide --sse-customer-key-base64.`, + Advanced: true, + Examples: []fs.OptionExample{{ + Value: "", + Help: "None", + }}, + Sensitive: true, + }, { + Name: "sse_customer_key_base64", + Help: `To use SSE-C, you may provide the secret encryption key encoded in Base64 format to encrypt/decrypt your data + +Alternatively you can provide --sse-customer-key.`, + Advanced: true, + Examples: []fs.OptionExample{{ + Value: "", + Help: "None", + }}, + Sensitive: true, + }, { + Name: "sse_customer_key_md5", + Help: `If using SSE-C you may provide the secret encryption key MD5 checksum (optional). + +If you leave it blank, this is calculated automatically from the sse_customer_key provided. +`, + Advanced: true, + Examples: []fs.OptionExample{{ + Value: "", + Help: "None", + }}, + Sensitive: true, }}, }) } @@ -274,6 +324,10 @@ type Options struct { DownloadAuthorizationDuration fs.Duration `config:"download_auth_duration"` Lifecycle int `config:"lifecycle"` Enc encoder.MultiEncoder `config:"encoding"` + SSECustomerAlgorithm string `config:"sse_customer_algorithm"` + SSECustomerKey string `config:"sse_customer_key"` + SSECustomerKeyBase64 string `config:"sse_customer_key_base64"` + SSECustomerKeyMD5 string `config:"sse_customer_key_md5"` } // Fs represents a remote b2 server @@ -504,6 +558,24 @@ func NewFs(ctx context.Context, name, root string, m configmap.Mapper) (fs.Fs, e if opt.Endpoint == "" { opt.Endpoint = defaultEndpoint } + if opt.SSECustomerKey != "" && opt.SSECustomerKeyBase64 != "" { + return nil, errors.New("b2: can't use both sse_customer_key and sse_customer_key_base64 at the same time") + } else if opt.SSECustomerKeyBase64 != "" { + // Decode the Base64-encoded key and store it in the SSECustomerKey field + decoded, err := base64.StdEncoding.DecodeString(opt.SSECustomerKeyBase64) + if err != nil { + return nil, fmt.Errorf("b2: Could not decode sse_customer_key_base64: %w", err) + } + opt.SSECustomerKey = string(decoded) + } else { + // Encode the raw key as Base64 + opt.SSECustomerKeyBase64 = base64.StdEncoding.EncodeToString([]byte(opt.SSECustomerKey)) + } + if opt.SSECustomerKey != "" && opt.SSECustomerKeyMD5 == "" { + // Calculate CustomerKeyMd5 if not supplied + md5sumBinary := md5.Sum([]byte(opt.SSECustomerKey)) + opt.SSECustomerKeyMD5 = base64.StdEncoding.EncodeToString(md5sumBinary[:]) + } ci := fs.GetConfig(ctx) f := &Fs{ name: name, @@ -1435,6 +1507,16 @@ func (f *Fs) copy(ctx context.Context, dstObj *Object, srcObj *Object, newInfo * Name: f.opt.Enc.FromStandardPath(dstPath), DestBucketID: destBucketID, } + if f.opt.SSECustomerKey != "" && f.opt.SSECustomerKeyMD5 != "" { + serverSideEncryptionConfig := api.ServerSideEncryption{ + Mode: "SSE-C", + Algorithm: f.opt.SSECustomerAlgorithm, + CustomerKey: f.opt.SSECustomerKeyBase64, + CustomerKeyMd5: f.opt.SSECustomerKeyMD5, + } + request.SourceServerSideEncryption = serverSideEncryptionConfig + request.DestinationServerSideEncryption = serverSideEncryptionConfig + } if newInfo == nil { request.MetadataDirective = "COPY" } else { @@ -1866,9 +1948,10 @@ var _ io.ReadCloser = &openFile{} func (o *Object) getOrHead(ctx context.Context, method string, options []fs.OpenOption) (resp *http.Response, info *api.File, err error) { opts := rest.Opts{ - Method: method, - Options: options, - NoResponse: method == "HEAD", + Method: method, + Options: options, + NoResponse: method == "HEAD", + ExtraHeaders: map[string]string{}, } // Use downloadUrl from backblaze if downloadUrl is not set @@ -1886,6 +1969,11 @@ func (o *Object) getOrHead(ctx context.Context, method string, options []fs.Open bucket, bucketPath := o.split() opts.Path += "/file/" + urlEncode(o.fs.opt.Enc.FromStandardName(bucket)) + "/" + urlEncode(o.fs.opt.Enc.FromStandardPath(bucketPath)) } + if o.fs.opt.SSECustomerKey != "" && o.fs.opt.SSECustomerKeyMD5 != "" { + opts.ExtraHeaders[sseAlgorithmHeader] = o.fs.opt.SSECustomerAlgorithm + opts.ExtraHeaders[sseKeyHeader] = o.fs.opt.SSECustomerKeyBase64 + opts.ExtraHeaders[sseMd5Header] = o.fs.opt.SSECustomerKeyMD5 + } err = o.fs.pacer.Call(func() (bool, error) { resp, err = o.fs.srv.Call(ctx, &opts) return o.fs.shouldRetry(ctx, resp, err) @@ -2150,6 +2238,11 @@ func (o *Object) Update(ctx context.Context, in io.Reader, src fs.ObjectInfo, op }, ContentLength: &size, } + if o.fs.opt.SSECustomerKey != "" && o.fs.opt.SSECustomerKeyMD5 != "" { + opts.ExtraHeaders[sseAlgorithmHeader] = o.fs.opt.SSECustomerAlgorithm + opts.ExtraHeaders[sseKeyHeader] = o.fs.opt.SSECustomerKeyBase64 + opts.ExtraHeaders[sseMd5Header] = o.fs.opt.SSECustomerKeyMD5 + } var response api.FileInfo // Don't retry, return a retry error instead err = o.fs.pacer.CallNoRetry(func() (bool, error) { diff --git a/backend/b2/upload.go b/backend/b2/upload.go index e2cd4a1c2..bc35ad3c5 100644 --- a/backend/b2/upload.go +++ b/backend/b2/upload.go @@ -144,6 +144,14 @@ func (f *Fs) newLargeUpload(ctx context.Context, o *Object, in io.Reader, src fs request.ContentType = newInfo.ContentType request.Info = newInfo.Info } + if o.fs.opt.SSECustomerKey != "" && o.fs.opt.SSECustomerKeyMD5 != "" { + request.ServerSideEncryption = api.ServerSideEncryption{ + Mode: "SSE-C", + Algorithm: o.fs.opt.SSECustomerAlgorithm, + CustomerKey: o.fs.opt.SSECustomerKeyBase64, + CustomerKeyMd5: o.fs.opt.SSECustomerKeyMD5, + } + } opts := rest.Opts{ Method: "POST", Path: "/b2_start_large_file", @@ -295,6 +303,12 @@ func (up *largeUpload) WriteChunk(ctx context.Context, chunkNumber int, reader i ContentLength: &sizeWithHash, } + if up.o.fs.opt.SSECustomerKey != "" && up.o.fs.opt.SSECustomerKeyMD5 != "" { + opts.ExtraHeaders[sseAlgorithmHeader] = up.o.fs.opt.SSECustomerAlgorithm + opts.ExtraHeaders[sseKeyHeader] = up.o.fs.opt.SSECustomerKeyBase64 + opts.ExtraHeaders[sseMd5Header] = up.o.fs.opt.SSECustomerKeyMD5 + } + var response api.UploadPartResponse resp, err := up.f.srv.CallJSON(ctx, &opts, nil, &response) @@ -334,6 +348,17 @@ func (up *largeUpload) copyChunk(ctx context.Context, part int, partSize int64) PartNumber: int64(part + 1), Range: fmt.Sprintf("bytes=%d-%d", offset, offset+partSize-1), } + + if up.o.fs.opt.SSECustomerKey != "" && up.o.fs.opt.SSECustomerKeyMD5 != "" { + serverSideEncryptionConfig := api.ServerSideEncryption{ + Mode: "SSE-C", + Algorithm: up.o.fs.opt.SSECustomerAlgorithm, + CustomerKey: up.o.fs.opt.SSECustomerKeyBase64, + CustomerKeyMd5: up.o.fs.opt.SSECustomerKeyMD5, + } + request.SourceServerSideEncryption = serverSideEncryptionConfig + request.DestinationServerSideEncryption = serverSideEncryptionConfig + } var response api.UploadPartResponse resp, err := up.f.srv.CallJSON(ctx, &opts, &request, &response) retry, err := up.f.shouldRetry(ctx, resp, err)