def get_conf(name : str) -> Union[str, int, bool]: check_init() if name in _config: return _config[name] if name in C.DEFAULT_CONFIG: log.warning("hydownloader", f'Configuration key not found in user config, default value was used: {name}') return C.DEFAULT_CONFIG[name] log.fatal("hydownloader", f'Invalid configuration key: {name}')
def api_worker(path: str, debug: bool) -> None: global _srv if db.get_conf('daemon.ssl') and os.path.isfile(path+"/server.pem"): log.info("hydownloader", "Starting daemon (with SSL)...") _srv = SSLWSGIRefServer(path+"/server.pem", host=db.get_conf('daemon.host'), port=db.get_conf('daemon.port')) bottle.run(server=_srv, debug=debug) else: if db.get_conf('daemon.ssl'): log.warning("hydownloader", "SSL enabled in config, but no server.pem file found in the db folder, continuing without SSL...") log.info("hydownloader", "Starting daemon...") _srv = SSLWSGIRefServer("", host=db.get_conf('daemon.host'), port=db.get_conf('daemon.port')) bottle.run(server=_srv, debug=debug)
def start(path : str, debug : bool) -> None: log.init(path, debug) db.init(path) process_additional_data() subs_thread = threading.Thread(target=subscription_worker, name='Subscription worker', daemon=True) subs_thread.start() url_thread = threading.Thread(target=url_queue_worker, name='Single URL queue worker', daemon=True) url_thread.start() if db.get_conf('daemon.ssl') and os.path.isfile(path+"/server.pem"): log.info("hydownloader", "Starting daemon (with SSL)...") srv = SSLWSGIRefServer(path+"/server.pem", host=db.get_conf('daemon.host'), port=db.get_conf('daemon.port')) bottle.run(server=srv, debug=debug) else: if db.get_conf('daemon.ssl'): log.warning("hydownloader", "SSL enabled in config, but no server.pem file found in the db folder, continuing without SSL...") log.info("hydownloader", "Starting daemon...") srv = SSLWSGIRefServer("", host=db.get_conf('daemon.host'), port=db.get_conf('daemon.port')) bottle.run(server=srv, debug=debug)
def subscription_worker() -> None: global _sub_worker_ended_flag try: log.info("hydownloader", "Starting subscription worker thread...") with _worker_lock: _sub_worker_ended_flag = False while True: time.sleep(2) with _worker_lock: if _end_threads_flag: break subs_due = db.get_due_subscriptions() if not subs_due: with _worker_lock: if _sub_worker_paused_flag: set_subscription_worker_status("paused") else: set_subscription_worker_status("nothing to do: checked for due subscriptions, found none") sub = subs_due[0] if subs_due else None while sub: with _worker_lock: if _end_threads_flag: break if _sub_worker_paused_flag: set_subscription_worker_status("paused") break initial_check = sub['last_check'] is None url = urls.subscription_data_to_url(sub['downloader'], sub['keywords']) check_started_time = time.time() status_msg = f"checking subscription: {sub['id']} (downloader: {sub['downloader']}, keywords: {sub['keywords']})" set_subscription_worker_status(status_msg) log.info(f"subscription-{sub['id']}", status_msg.capitalize()) if initial_check: log.info(f"subscription-{sub['id']}", "This is the first check for this subscription") result = gallery_dl_utils.run_gallery_dl( url=url, ignore_anchor=False, metadata_only=False, log_file=db.get_rootpath()+f"/logs/subscription-{sub['id']}-gallery-dl-latest.txt", old_log_file=db.get_rootpath()+f"/logs/subscription-{sub['id']}-gallery-dl-old.txt", console_output_file=db.get_rootpath()+f"/temp/subscription-{sub['id']}-gallery-dl-output.txt", unsupported_urls_file=db.get_rootpath()+f"/logs/subscription-{sub['id']}-unsupported-urls-gallery-dl-latest.txt", old_unsupported_urls_file=db.get_rootpath()+f"/logs/subscription-{sub['id']}-unsupported-urls-gallery-dl-old.txt", overwrite_existing=False, filter_=sub['filter'], chapter_filter=None, subscription_mode=True, abort_after=sub['abort_after'], max_file_count = sub['max_files_initial'] if initial_check else sub['max_files_regular'] ) if result: log.warning(f"subscription-{sub['id']}", "Error: "+result) else: sub['last_successful_check'] = check_started_time sub['last_check'] = check_started_time new_files, skipped_files = process_additional_data(subscription_id = sub['id']) check_ended_time = time.time() db.add_subscription_check(sub['id'], new_files=new_files, already_seen_files=skipped_files, time_started=check_started_time, time_finished=check_ended_time, status=result) db.add_or_update_subscriptions([sub]) status_msg = f"finished checking subscription: {sub['id']} (downloader: {sub['downloader']}, keywords: {sub['keywords']}), new files: {new_files}, skipped: {skipped_files}" set_subscription_worker_status(status_msg) log.info(f"subscription-{sub['id']}", status_msg.capitalize()) subs_due = db.get_due_subscriptions() sub = subs_due[0] if subs_due else None with _worker_lock: if _end_threads_flag: break with _worker_lock: if _end_threads_flag: log.info("hydownloader", "Stopping subscription worker thread") _sub_worker_ended_flag = True except Exception as e: log.fatal("hydownloader", "Uncaught exception in subscription worker thread", e) shutdown()
def url_queue_worker() -> None: global _url_worker_ended_flag try: log.info("hydownloader", "Starting single URL queue worker thread...") with _worker_lock: _url_worker_ended_flag = False while True: time.sleep(2) with _worker_lock: if _end_threads_flag: break urls_to_dl = db.get_urls_to_download() if not urls_to_dl: with _worker_lock: if _url_worker_paused_flag: set_url_worker_status("paused") else: set_url_worker_status("nothing to do: checked for queued URLs, found none") urlinfo = urls_to_dl[0] if urls_to_dl else None while urlinfo: with _worker_lock: if _end_threads_flag: break if _url_worker_paused_flag: set_url_worker_status("paused") break check_time = time.time() status_msg = f"downloading URL: {urlinfo['url']}" set_url_worker_status(status_msg) log.info("single url downloader", status_msg.capitalize()) result = gallery_dl_utils.run_gallery_dl( url=urlinfo['url'], ignore_anchor=urlinfo['ignore_anchor'], metadata_only=urlinfo['metadata_only'], log_file=db.get_rootpath()+f"/logs/single-urls-{urlinfo['id']}-gallery-dl-latest.txt", old_log_file=db.get_rootpath()+f"/logs/single-urls-{urlinfo['id']}-gallery-dl-old.txt", console_output_file=db.get_rootpath()+f"/temp/single-url-{urlinfo['id']}-gallery-dl-output.txt", unsupported_urls_file=db.get_rootpath()+f"/logs/single-urls-{urlinfo['id']}-unsupported-urls-gallery-dl-latest.txt", old_unsupported_urls_file=db.get_rootpath()+f"/logs/single-urls-{urlinfo['id']}-unsupported-urls-gallery-dl-old.txt", overwrite_existing=urlinfo['overwrite_existing'], filter_=urlinfo['filter'], chapter_filter=None, subscription_mode=False, max_file_count = urlinfo['max_files'] ) if result: log.warning("single url downloader", f"Error while downloading {urlinfo['url']}: {result}") urlinfo['status'] = 1 urlinfo['status_text'] = result else: urlinfo['status'] = 0 urlinfo['status_text'] = 'ok' urlinfo['time_processed'] = check_time new_files, skipped_files = process_additional_data(url_id = urlinfo['id']) urlinfo['new_files'] = new_files urlinfo['already_seen_files'] = skipped_files db.add_or_update_urls([urlinfo]) status_msg = f"finished checking URL: {urlinfo['url']}, new files: {new_files}, skipped: {skipped_files}" set_url_worker_status(status_msg) log.info("single url downloader", status_msg.capitalize()) urls_to_dl = db.get_urls_to_download() urlinfo = urls_to_dl[0] if urls_to_dl else None with _worker_lock: if _end_threads_flag: break with _worker_lock: if _end_threads_flag: log.info("hydownloader", "Stopping single URL queue worker thread") _url_worker_ended_flag = True except Exception as e: log.fatal("hydownloader", "Uncaught exception in URL worker thread", e) shutdown()
def route_kill_current_url() -> dict: check_access() gallery_dl_utils.stop_process('url worker') log.warning("hydownloader", "Current URL download force-stopped via API") return {'status': True}
def route_kill_current_sub() -> dict: check_access() gallery_dl_utils.stop_process('sub worker') log.warning("hydownloader", "Current subscription check force-stopped via API") return {'status': True}