1
0
mirror of https://github.com/gilbertchen/duplicacy synced 2025-12-06 00:03:38 +00:00

Use multiple threads to list the chunks directory for Google Drive

This commit is contained in:
Gilbert Chen
2020-06-15 12:49:13 -04:00
parent 5d45999077
commit 153f6a2d20

View File

@@ -86,6 +86,10 @@ func (storage *GCDStorage) shouldRetry(threadIndex int, err error) (bool, error)
// Request timeout // Request timeout
message = e.Message message = e.Message
retry = true retry = true
} else if e.Code == 400 && strings.Contains(e.Message, "failedPrecondition") {
// Daily quota exceeded
message = e.Message
retry = true
} else if e.Code == 401 { } else if e.Code == 401 {
// Only retry on authorization error when storage has been connected before // Only retry on authorization error when storage has been connected before
if storage.isConnected { if storage.isConnected {
@@ -476,39 +480,76 @@ func (storage *GCDStorage) ListFiles(threadIndex int, dir string) ([]string, []i
} }
return files, nil, nil return files, nil, nil
} else { } else {
files := []string{} lock := sync.Mutex {}
sizes := []int64{} allFiles := []string{}
allSizes := []int64{}
errorChannel := make(chan error)
directoryChannel := make(chan string)
activeWorkers := 0
parents := []string{"chunks", "fossils"} parents := []string{"chunks", "fossils"}
for i := 0; i < len(parents); i++ { for len(parents) > 0 || activeWorkers > 0 {
parent := parents[i]
pathID, ok := storage.findPathID(parent) if len(parents) > 0 && activeWorkers < storage.numberOfThreads {
if !ok { parent := parents[0]
continue parents = parents[1:]
} activeWorkers++
entries, err := storage.listFiles(threadIndex, pathID, true, true) go func(parent string) {
if err != nil { pathID, ok := storage.findPathID(parent)
return nil, nil, err if !ok {
} return
for _, entry := range entries { }
if entry.MimeType != GCDDirectoryMimeType { entries, err := storage.listFiles(threadIndex, pathID, true, true)
name := entry.Name if err != nil {
if strings.HasPrefix(parent, "fossils") { errorChannel <- err
name = parent + "/" + name + ".fsl" return
name = name[len("fossils/"):] }
} else {
name = parent + "/" + name LOG_DEBUG("GCD_STORAGE", "Listing %s; %d items returned", parent, len(entries))
name = name[len("chunks/"):]
files := []string {}
sizes := []int64 {}
for _, entry := range entries {
if entry.MimeType != GCDDirectoryMimeType {
name := entry.Name
if strings.HasPrefix(parent, "fossils") {
name = parent + "/" + name + ".fsl"
name = name[len("fossils/"):]
} else {
name = parent + "/" + name
name = name[len("chunks/"):]
}
files = append(files, name)
sizes = append(sizes, entry.Size)
} else {
directoryChannel <- parent+"/"+entry.Name
storage.savePathID(parent+"/"+entry.Name, entry.Id)
}
}
lock.Lock()
allFiles = append(allFiles, files...)
allSizes = append(allSizes, sizes...)
lock.Unlock()
directoryChannel <- ""
} (parent)
}
if activeWorkers > 0 {
select {
case err := <- errorChannel:
return nil, nil, err
case directory := <- directoryChannel:
if directory == "" {
activeWorkers--
} else {
parents = append(parents, directory)
} }
files = append(files, name)
sizes = append(sizes, entry.Size)
} else {
parents = append(parents, parent+"/"+entry.Name)
storage.savePathID(parent+"/"+entry.Name, entry.Id)
} }
} }
} }
return files, sizes, nil
return allFiles, allSizes, nil
} }
} }