Correctly calculate download speed if we're resuming

This commit is contained in:
hygienic-books 2022-03-20 02:25:02 +01:00
parent b5dff485d9
commit 4809846edf

View File

@ -48,7 +48,9 @@ from rich.progress import (
# TODO set locale for datetime and others to globally stick to en_US # TODO set locale for datetime and others to globally stick to en_US
download_start_time = 0 download_start_time = 0
download_last_update_time = 0 download_last_update_time = 0
size_downloaded = 0 total_content_length = 0
size_downloaded_for_progress_tracking = 0
size_downloaded_for_speed_tracking = 0
file_lock_timeout = 1 file_lock_timeout = 1
state_lock_file_ext = ".lock" state_lock_file_ext = ".lock"
@ -566,7 +568,8 @@ def copy_url(
global download_start_time global download_start_time
global download_last_update_time global download_last_update_time
global size_downloaded global size_downloaded_for_progress_tracking
global size_downloaded_for_speed_tracking
update_interval = config_obj.getint(section_name, "dl_progress_update_interval") update_interval = config_obj.getint(section_name, "dl_progress_update_interval")
max_quality_url = video_metadata["url"] max_quality_url = video_metadata["url"]
@ -590,21 +593,24 @@ def copy_url(
log.info(f"resume_header: {resume_header}") log.info(f"resume_header: {resume_header}")
r = requests.get(max_quality_url, headers=resume_header, stream=True) r = requests.get(max_quality_url, headers=resume_header, stream=True)
for chunk in r.iter_content(32768): for chunk in r.iter_content(32768):
size_downloaded += len(chunk) size_downloaded_for_progress_tracking += len(chunk)
size_downloaded_for_speed_tracking += len(chunk)
tmp_file.write(chunk) tmp_file.write(chunk)
if time.time() - download_last_update_time >= update_interval: if time.time() - download_last_update_time >= update_interval:
download_last_update_time = time.time() download_last_update_time = time.time()
dl_speed_so_far = size_downloaded / (download_last_update_time - download_start_time) time_in_progress = download_last_update_time - download_start_time
dl_speed_so_far = size_downloaded_for_speed_tracking / time_in_progress
human_dl_speed_so_far = f"{humanize.naturalsize(dl_speed_so_far, binary=True)}/s" human_dl_speed_so_far = f"{humanize.naturalsize(dl_speed_so_far, binary=True)}/s"
percentage_done = size_downloaded / total_content_length * 100 data_missing = total_content_length - size_downloaded_for_progress_tracking
time_til_completion = 1 / dl_speed_so_far * data_missing
human_time_til_completion = humanize.naturaldelta(d.timedelta(seconds=time_til_completion))
percentage_done = size_downloaded_for_progress_tracking / total_content_length * 100
human_pct = "{:.1f}".format(percentage_done) human_pct = "{:.1f}".format(percentage_done)
human_size_dl = humanize.naturalsize(size_downloaded, binary=True) human_size_dl = humanize.naturalsize(size_downloaded_for_progress_tracking, binary=True)
human_total_dl = humanize.naturalsize(total_content_length, binary=True) human_total_dl = humanize.naturalsize(total_content_length, binary=True)
log.debug(f"[thread] Downloaded {human_pct}% ({human_size_dl}/{human_total_dl} " log.debug(f"[thread] Downloaded {human_pct}% ({human_size_dl}/{human_total_dl} "
f"at an average {human_dl_speed_so_far})") f"at an average {human_dl_speed_so_far}, approximately {human_time_til_completion} "
if done_event.is_set(): f"left til completion.)")
log.info(f"""{shorthand_uuid} Download of "{show_name}" interrupted""")
return
log.info(f"""{shorthand_uuid} Download of "{show_name}" done""") log.info(f"""{shorthand_uuid} Download of "{show_name}" done""")
except IOError: except IOError:
log.error(f"{shorthand_uuid} IOError during download. Aborting this download thread ...") log.error(f"{shorthand_uuid} IOError during download. Aborting this download thread ...")