def worker(): while not done: job = queue.get() job.status = Job.RUNNING jobshandler.put((Actions.SET_STATUS, (job.id, job.status))) if job.type == JobType.YDL_DOWNLOAD: output = io.StringIO() # FIXME intialize this ? stdout_thread = Thread(target=download_log_update, args=(job, output)) stdout_thread.start() try: job.log = Job.clean_logs( download(job.url, {'format': job.format}, output, job.id)) job.status = Job.COMPLETED except Exception as e: job.status = Job.FAILED job.log += str(e) print("Exception during download task:\n" + str(e)) stdout_thread.join() elif job.type == JobType.YDL_UPDATE: rc, log = update() job.log = Job.clean_logs(log) job.status = Job.COMPLETED if rc == 0 else Job.FAILED jobshandler.put((Actions.UPDATE, job)) queue.task_done()
def resume_pending(): db = JobsDB(readonly=False) jobs = db.get_all() not_endeds = [ job for job in jobs if job['status'] == "Pending" or job['status'] == 'Running' ] for pending in not_endeds: job = Job(pending["name"], Job.PENDING, "Queue stopped", pending["format"]) job.id = pending["id"] jobshandler.put((Actions.RESUME, job))
def resume_pending(): db = JobsDB(readonly=False) jobs = db.get_all() not_endeds = [job for job in jobs if job['status'] == "Pending" or job['status'] == 'Running'] for pending in not_endeds: if int(pending["type"]) == JobType.YDL_UPDATE: jobshandler.put((Actions.SET_STATUS, (pending["id"], Job.FAILED))) else: job = Job(pending["name"], Job.PENDING, "Queue stopped", int(pending["type"]), pending["format"], pending["url"]) job.id = pending["id"] jobshandler.put((Actions.RESUME, job))
async def ydl_update(request): job = Job("Youtube-dl Update", Job.PENDING, "", JobType.YDL_UPDATE, None, None) request.app.state.jobshandler.put((Actions.INSERT, job)) return JSONResponse({ "success": True, })
def api_queue_download(): if (app_config['ydl_server'].get('update_poll_delay_min') and (datetime.now() - ydlhandler.ydl_last_update).seconds > app_config['ydl_server'].get('update_poll_delay_min')): job = Job("Youtube-dl Update", Job.PENDING, "", JobType.YDL_UPDATE, None, None) jobshandler.put((Actions.INSERT, job)) url = request.forms.get("url") options = {'format': request.forms.get("format")} if not url: return {"success": False, "error": "'url' query parameter omitted"} job = Job(url, Job.PENDING, "", JobType.YDL_DOWNLOAD, request.forms.get("format"), url) jobshandler.put((Actions.INSERT, job)) print("Added url " + url + " to the download queue") return {"success": True, "url": url, "options": options}
def twl_update(): TWL_LOOKBACK_TIME_STRING = request.query.TWL_LOOKBACK_TIME_STRING or None job = Job("ToWatchList Update", Job.PENDING, "", JobType.TWL_DOWNLOAD, TWL_LOOKBACK_TIME_STRING, None) jobshandler.put((Actions.INSERT, job)) return { "success": True, "TWL_LOOKBACK_TIME_STRING": TWL_LOOKBACK_TIME_STRING }
def download(job, request_options, output): ydl_opts = get_ydl_options(app_config.get('ydl_options', {}), request_options) cmd = get_ydl_full_cmd(ydl_opts, job.url) cmd.extend(['-J', '--flat-playlist']) proc = Popen(cmd, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() if proc.wait() != 0: job.log = Job.clean_logs(stderr.decode()) job.status = Job.FAILED print("Error during download task:\n" + job.log) return metadata = json.loads(stdout) jobshandler.put((Actions.SET_NAME, (job.id, metadata.get('title', job.url)))) if metadata.get('_type') == 'playlist': ydl_opts.update({ 'output': app_config['ydl_server'].get('output_playlist', ydl_opts.get('output')) }) cmd = get_ydl_full_cmd(ydl_opts, job.url) proc = Popen(cmd, stdout=PIPE, stderr=STDOUT) stdout_thread = Thread(target=download_log_update, args=(job, proc, output)) stdout_thread.start() if proc.wait() == 0: read_proc_stdout(proc, output) job.log = Job.clean_logs(output.getvalue()) job.status = Job.COMPLETED else: read_proc_stdout(proc, output) job.log = Job.clean_logs(output.getvalue()) job.status = Job.FAILED print("Error during download task:\n" + output.getvalue()) stdout_thread.join()
def api_queue_download(): url = request.forms.get("url") options = {'format': request.forms.get("format")} if not url: return {"success": False, "error": "'url' query parameter omitted"} job = Job(url, Job.PENDING, "", JobType.YDL_DOWNLOAD, request.forms.get("format"), url) jobshandler.put((Actions.INSERT, job)) print("Added url " + url + " to the download queue") return {"success": True, "url": url, "options": options}
def download(self, job, request_options, output): ydl_opts = self.get_ydl_options(self.app_config.get("ydl_options", {}), request_options) cmd = self.get_ydl_full_cmd(ydl_opts, job.url) rc, metadata = self.fetch_metadata(job.url) if rc != 0: job.log = Job.clean_logs(metadata) job.status = Job.FAILED raise Exception(job.log) self.jobshandler.put( (Actions.SET_NAME, (job.id, metadata.get("title", job.url)))) if metadata.get("_type") == "playlist": ydl_opts.update({ "output": self.app_config["ydl_server"].get("output_playlist", ydl_opts.get("output")) }) cmd = self.get_ydl_full_cmd(ydl_opts, job.url) proc = Popen(cmd, stdout=PIPE, stderr=STDOUT) stdout_thread = Thread(target=self.download_log_update, args=(job, proc, output)) stdout_thread.start() if proc.wait() == 0: read_proc_stdout(proc, output) job.log = Job.clean_logs(output.getvalue()) job.status = Job.COMPLETED else: read_proc_stdout(proc, output) job.log = Job.clean_logs(output.getvalue()) job.status = Job.FAILED print("Error during download task:\n" + output.getvalue()) stdout_thread.join()
def resume_pending(self): db = JobsDB(readonly=False) jobs = db.get_all(self.app_config["ydl_server"].get( "max_log_entries", 100)) not_endeds = [ job for job in jobs if job["status"] == "Pending" or job["status"] == "Running" ] for pending in not_endeds: if int(pending["type"]) == JobType.YDL_UPDATE: self.jobshandler.put( (Actions.SET_STATUS, (pending["id"], Job.FAILED))) else: job = Job( pending["name"], Job.PENDING, "Queue stopped", int(pending["type"]), pending["format"], pending["url"], ) job.id = pending["id"] self.jobshandler.put((Actions.RESUME, job))
async def api_queue_download(request): data = await request.form() if (app_config['ydl_server'].get('update_poll_delay_min') and (datetime.now() - app_config['ydl_last_update']).seconds > app_config['ydl_server'].get('update_poll_delay_min') * 60): job = Job("Youtube-dl Update", Job.PENDING, "", JobType.YDL_UPDATE, None, None) request.app.state.jobshandler.put((Actions.INSERT, job)) url = data.get("url") options = {'format': data.get("format")} if not url: return JSONResponse({ "success": False, "error": "'url' query parameter omitted" }) job = Job(url, Job.PENDING, "", JobType.YDL_DOWNLOAD, data.get("format"), url) request.app.state.jobshandler.put((Actions.INSERT, job)) print("Added url " + url + " to the download queue") return JSONResponse({"success": True, "url": url, "options": options})
def worker(): while not done: job = queue.get() job.status = Job.RUNNING jobshandler.put((Actions.UPDATE, job)) output = io.StringIO() # FIXME intialize this ? stdout_thread = Thread(target=download_log_update, args=(job, output)) stdout_thread.start() try: job.log = Job.clean_logs( download(job.name, {'format': job.format}, output), ) job.status = Job.COMPLETED except Exception as e: job.status = Job.FAILED job.log += str(e) print("Exception during download task:\n" + str(e)) stdout_thread.join() jobshandler.put((Actions.UPDATE, job)) queue.task_done()
def worker(): while not done: job = queue.get() job.status = Job.RUNNING jobshandler.put((Actions.SET_STATUS, (job.id, job.status))) if job.type == JobType.YDL_DOWNLOAD: output = io.StringIO() try: download(job, {'format': job.format}, output) except Exception as e: job.status = Job.FAILED job.log = "Error during download task" print("Error during download task:\n{}\n{}".format( type(e).__name__, str(e))) elif job.type == JobType.YDL_UPDATE: rc, log = update() job.log = Job.clean_logs(log) job.status = Job.COMPLETED if rc == 0 else Job.FAILED jobshandler.put((Actions.UPDATE, job)) queue.task_done()
def twldownload(url, request_options, output, job_id): TWL_API_TOKEN = os.getenv("TWL_API_TOKEN", default="unset").strip() assert TWL_API_TOKEN != "unset", "ERROR: TWL_API_TOKEN should be set in env (and is not)" ydl_opts = ChainMap(os.environ, app_defaults) lookbackStr = ydl_opts['TWL_LOOKBACK_TIME_STRING'] if request_options and 'format' in request_options and request_options[ 'format']: # use 'format' as 'TWL_LOOKBACK_TIME_STRING' here lookbackStr = request_options['format'] r = httpx.get( f"https://towatchlist.com/api/v1/marks?since={lookbackStr}&uid={TWL_API_TOKEN}" ) r.raise_for_status() myMarks = r.json()['marks'] output_dir = Path(ydl_opts['YDL_OUTPUT_TEMPLATE']).parent with open(os.path.join(output_dir, '.twl.json'), 'w') as filehandle: json.dump(myMarks, filehandle) downloadQueueAdd = 0 removedFiles = 0 if 'YDL_WRITE_NFO' in ydl_opts and ydl_opts['YDL_WRITE_NFO']: targetNumberOfFiles = 2 else: targetNumberOfFiles = 1 for i in range(len(myMarks)): # set some values we'll use below mmeta = {} # mark metadata dict mmeta['videoURL'] = myMarks[i]['Mark']['source_url'] mmeta['title'] = myMarks[i]['Mark']['title'] mmeta['video_id'] = myMarks[i]['Mark']['video_id'] mmeta['channel_title'] = myMarks[i]['Mark']['channel_title'] mmeta['duration'] = int(myMarks[i]['Mark']['duration']) / 60.0 mmeta['created'] = myMarks[i]['Mark']['created'] existingFiles = listFilesFromID(mmeta['video_id'], output_dir=output_dir) if (myMarks[i]['Mark']['watched']) or (myMarks[i]['Mark']['delflag']): # it's been marked as watched, delete the local copy for filename in existingFiles: os.remove(filename) removedFiles += 1 continue if len(existingFiles) >= targetNumberOfFiles: # this file has probably already been downloaded, skip! continue try: # a bit more parsing for Kodi mmeta['description'] = strip_tags(myMarks[i]['Mark']['comment']) except: mmeta['description'] = '-Failed to parse-' downloadQueueAdd += 1 job = Job(mmeta['title'], Job.PENDING, "", JobType.YDL_DOWNLOAD, ydl_opts['YDL_FORMAT'], mmeta['videoURL']) jobshandler.put((Actions.INSERT, job)) if removedFiles > 0: # TODO: clean Kodi library pass return f"Processed {len(myMarks)} Marks, Queued {downloadQueueAdd}, Removed {removedFiles} vids/nfos"
def download_log_update(job, proc, strio): while job.status == Job.RUNNING: read_proc_stdout(proc, strio) job.log = Job.clean_logs(strio.getvalue()) jobshandler.put((Actions.SET_LOG, (job.id, job.log))) sleep(3)
def download_log_update(job, stringio): while job.status == Job.RUNNING: job.log = Job.clean_logs(stringio.getvalue()) jobshandler.put((Actions.SET_LOG, (job.id, job.log))) sleep(5)
def ydl_update(): job = Job("Youtube-dl Update", Job.PENDING, "", JobType.YDL_UPDATE, None, None) jobshandler.put((Actions.INSERT, job)) return {"success": True}
return ydlhandler.fetch_metadata(url) @app.route("/api/youtube-dl/update", method="GET") def ydl_update(): job = Job("Youtube-dl Update", Job.PENDING, "", JobType.YDL_UPDATE, None, None) jobshandler.put((Actions.INSERT, job)) return {"success": True} JobsDB.check_db_latest() JobsDB.init_db() ydlhandler.start() print("Started download thread") jobshandler.start(ydlhandler.queue) print("Started jobs manager thread") print("Updating youtube-dl to the newest version") job = Job("Youtube-dl Update", Job.PENDING, "", JobType.YDL_UPDATE, None, None) jobshandler.put((Actions.INSERT, job)) ydlhandler.resume_pending() app.run(host=app_config['ydl_server'].get('host'), port=app_config['ydl_server'].get('port'), debug=app_config['ydl_server'].get('debug', False)) ydlhandler.finish() jobshandler.finish() ydlhandler.join() jobshandler.join()