If a partial download exists in temp dir we resume it

This commit is contained in:
hygienic-books 2022-03-20 02:34:15 +01:00
parent 380fb4bf2e
commit d70766bae0

View File

@ -568,6 +568,7 @@ def copy_url(
max_quality_url = video_metadata["url"]
filename = get_filename(section_name, config_obj, show, max_quality_url, shorthand_uuid)
resume_header = {}
tmp_file_open_mode = "wb"
tmp_file_size = 0
tmp_path = os.path.join(tmp_dir, filename)
dest_path = os.path.join(dest_dir, filename)
@ -578,12 +579,14 @@ def copy_url(
tmp_file_size = os.path.getsize(tmp_path)
log.debug(f"{shorthand_uuid} Temporary file '{tmp_path}' exists likely from a previous incomplete "
f"download attempt, size is {humanize.naturalsize(tmp_file_size, binary=True)}. Resuming ...")
tmp_file_open_mode = "ab"
try:
with open(tmp_path, "wb") as tmp_file:
with open(tmp_path, tmp_file_open_mode) as tmp_file:
log.info(f"""{shorthand_uuid} Downloading "{show_name}" ...""")
if tmp_file_size > 0:
resume_header = {"range": f"bytes={tmp_file_size}-"}
log.info(f"resume_header: {resume_header}")
log.debug(f"resume_header: {resume_header}")
size_downloaded_for_progress_tracking += tmp_file_size
r = requests.get(max_quality_url, headers=resume_header, stream=True)
for chunk in r.iter_content(32768):
size_downloaded_for_progress_tracking += len(chunk)