Make get_dir_size robust to concurrent deletions.

ref #2055
This commit is contained in:
Arseny Sher
2022-07-18 13:40:42 +03:00
parent 7987889cb3
commit eeff56aeb7

View File

@@ -83,6 +83,9 @@ def get_dir_size(path: str) -> int:
totalbytes = 0
for root, dirs, files in os.walk(path):
for name in files:
totalbytes += os.path.getsize(os.path.join(root, name))
try:
totalbytes += os.path.getsize(os.path.join(root, name))
except FileNotFoundError as e:
pass # file could be concurrently removed
return totalbytes