for url in addresses:
file_name = url.rsplit('/', 1)[-1]
fname_with_path = os.path.join(download_directory, file_name)
attempts = 1
while attempts < 5:
try:
urllib.request.urlretrieve(url, fname_with_path)
print("%-3s %-60s %25s" % ('--', file_name, 'downloaded'), end='')
break
except:
attempts += 1
if attempts > 1 and attempts < 5:
print('tried to download ', file_name, ' attempt:', attempts)
if attempts == 5:
print("%-3s %-60s %25s" % ('--', file_name, 'FAILED'), end='')
pass
Here's a part of code for downloading files from a list of urls. Since some files are big enough I'd like to know many KBs of a file are alredy downloaded. For example
file1: 348 / 2980
Also I'd like to update the progress on the same line, not like this:
file1: 348 / 2980
file1: 355 / 2980
file1: 389 / 2980
file1: 402 / 2980