Calculate total downloadable content length after deciding which downloads we need

This commit is contained in:
hygienic-books 2022-03-20 02:33:32 +01:00
parent e395309011
commit 380fb4bf2e

View File

@ -551,7 +551,6 @@ def copy_url(
config_obj: configparser.ConfigParser(), config_obj: configparser.ConfigParser(),
show: type_def.mvw_json_response.Show, show: type_def.mvw_json_response.Show,
video_metadata: dict, video_metadata: dict,
total_content_length: int,
state_file_abs_path: str, state_file_abs_path: str,
show_name: str, show_name: str,
job_uuid: str, job_uuid: str,
@ -667,6 +666,7 @@ def download_media(
global download_start_time global download_start_time
global download_last_update_time global download_last_update_time
global total_content_length
dl_threads = config_obj.getint(section_name, "dl_threads") dl_threads = config_obj.getint(section_name, "dl_threads")
state_file_abs_path = get_state_file_abs_path(section_name, config_obj) state_file_abs_path = get_state_file_abs_path(section_name, config_obj)
@ -694,27 +694,35 @@ def download_media(
download_start_time = download_last_update_time download_start_time = download_last_update_time
update_interval = config_obj.getint(section_name, "dl_progress_update_interval") update_interval = config_obj.getint(section_name, "dl_progress_update_interval")
log.debug(f"""Will provide updates every {update_interval} {p.plural("second", update_interval)}""") log.debug(f"""Will provide updates every {update_interval} {p.plural("second", update_interval)}""")
with lock: for result in json_obj.result.results.copy():
state_file_none_or_valid_json(state_file_abs_path) show_name = f"{result.topic} - {result.title}"
for result in json_obj.result.results.copy(): future = None
show_name = f"{result.topic} - {result.title}" if not is_already_downloaded(result, json_state, show_name):
if not is_already_downloaded(result, state_file_abs_path, show_name): max_quality_url = get_max_quality_url(result)
job_uuid = str(uuid.uuid4()) content_length = get_content_length(max_quality_url)
shorthand_uuid = f"[{job_uuid[:2]}..{job_uuid[-2:]}]" video_metadata[result.id] = {"url": max_quality_url, "content_length": content_length}
log.debug(f"""Queuing "{show_name}" for download ...""") total_content_length += video_metadata[result.id]["content_length"]
pool.submit( log.debug(f"Total download size upped to "
copy_url, f"{humanize.naturalsize(total_content_length, binary=True)}")
section_name,
config_obj, job_uuid = str(uuid.uuid4())
result, shorthand_uuid = f"[{job_uuid[:2]}..{job_uuid[-2:]}]"
video_metadata[result.id], log.debug(f"{shorthand_uuid} Job UUID {job_uuid} generated, shorthand is {shorthand_uuid}")
video_metadata["total_content_length"], log.debug(f"""{shorthand_uuid} Queuing "{show_name}" for download ...""")
state_file_abs_path, future = pool.submit(
show_name, copy_url,
job_uuid, section_name,
shorthand_uuid, config_obj,
tmp_dir, result,
dest_dir) video_metadata[result.id],
state_file_abs_path,
show_name,
job_uuid,
shorthand_uuid,
tmp_dir,
dest_dir)
if future is not None:
future.result()
if __name__ == '__main__': if __name__ == '__main__':