mirror of
https://github.com/rclone/rclone.git
synced 2025-12-06 00:03:32 +00:00
onedrive: work around duplicated directory listing entries
When paging big directories onedrive sometimes duplicates the last item on one page as the first item of the next page. This patch detects that and skips the duplicated item with an error message. See: https://forum.rclone.org/t/unexpected-duplicates-on-onedrive-with-0s-in-filename/23164
This commit is contained in:
@@ -897,6 +897,7 @@ func (f *Fs) listAll(ctx context.Context, dirID string, directoriesOnly bool, fi
|
||||
// Top parameter asks for bigger pages of data
|
||||
// https://dev.onedrive.com/odata/optional-query-parameters.htm
|
||||
opts := f.newOptsCall(dirID, "GET", "/children?$top=1000")
|
||||
lastID := "\x00"
|
||||
OUTER:
|
||||
for {
|
||||
var result api.ListChildrenResponse
|
||||
@@ -911,6 +912,10 @@ OUTER:
|
||||
if len(result.Value) == 0 {
|
||||
break
|
||||
}
|
||||
if result.Value[0].ID == lastID {
|
||||
fs.Errorf(f, "Skipping duplicate entry %q in directory %q", lastID, dirID)
|
||||
result.Value = result.Value[1:]
|
||||
}
|
||||
for i := range result.Value {
|
||||
item := &result.Value[i]
|
||||
isFolder := item.GetFolder() != nil
|
||||
@@ -937,6 +942,9 @@ OUTER:
|
||||
}
|
||||
opts.Path = ""
|
||||
opts.RootURL = result.NextLink
|
||||
if len(result.Value) > 0 {
|
||||
lastID = result.Value[len(result.Value)-1].ID
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user