def parring(nzo, workdir): """ Perform par processing. Returns: (par_error, re_add) """ assert isinstance(nzo, sabnzbd.nzbstuff.NzbObject) filename = nzo.final_name growler.send_notification(T('Post-processing'), nzo.final_name, 'pp') logging.info('Par2 check starting on %s', filename) ## Get verification status of sets verified = sabnzbd.load_data(VERIFIED_FILE, nzo.workpath, remove=False) or {} ## Collect the par files if nzo.partable: par_table = nzo.partable.copy() else: par_table = {} repair_sets = par_table.keys() re_add = False par_error = False if repair_sets: for setname in repair_sets: if cfg.ignore_samples() > 0 and 'sample' in setname.lower(): continue if not verified.get(setname, False): logging.info("Running repair on set %s", setname) parfile_nzf = par_table[setname] if not os.path.exists( os.path.join(nzo.downpath, parfile_nzf.filename)): continue need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, setname) re_add = re_add or need_re_add if not res and not need_re_add and cfg.sfv_check(): res = try_sfv_check(nzo, workdir, setname) verified[setname] = res par_error = par_error or not res else: logging.info("No par2 sets for %s", filename) nzo.set_unpack_info('Repair', T('[%s] No par2 sets') % unicoder(filename)) if cfg.sfv_check(): par_error = not try_sfv_check(nzo, workdir, '') verified[''] = not par_error if re_add: logging.info('Readded %s to queue', filename) if nzo.priority != TOP_PRIORITY: nzo.priority = REPAIR_PRIORITY sabnzbd.nzbqueue.add_nzo(nzo) sabnzbd.downloader.Downloader.do.resume_from_postproc() sabnzbd.save_data(verified, VERIFIED_FILE, nzo.workpath) logging.info('Par2 check finished on %s', filename) return par_error, re_add
def __flush_article_to_disk(article: Article, data): nzo = article.nzf.nzo if nzo.is_gone(): # Don't store deleted jobs return # Save data, but don't complain when destination folder is missing # because this flush may come after completion of the NZO. sabnzbd.save_data(data, article.get_art_id(), nzo.admin_path, do_pickle=False, silent=True)
def parring(nzo, workdir): """ Perform par processing. Returns: (par_error, re_add) """ assert isinstance(nzo, sabnzbd.nzbstuff.NzbObject) filename = nzo.final_name growler.send_notification(T('Post-processing'), nzo.final_name, 'pp') logging.info('Par2 check starting on %s', filename) ## Get verification status of sets verified = sabnzbd.load_data(VERIFIED_FILE, nzo.workpath, remove=False) or {} ## Collect the par files if nzo.partable: par_table = nzo.partable.copy() else: par_table = {} repair_sets = par_table.keys() re_add = False par_error = False single = len(repair_sets) == 1 if repair_sets: for setname in repair_sets: if cfg.ignore_samples() > 0 and 'sample' in setname.lower(): continue if not verified.get(setname, False): logging.info("Running repair on set %s", setname) parfile_nzf = par_table[setname] if os.path.exists(os.path.join(nzo.downpath, parfile_nzf.filename)) or parfile_nzf.extrapars: need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, setname, single=single) re_add = re_add or need_re_add if not res and not need_re_add and cfg.sfv_check(): res = try_sfv_check(nzo, workdir, setname) verified[setname] = res else: continue par_error = par_error or not res else: logging.info("No par2 sets for %s", filename) nzo.set_unpack_info('Repair', T('[%s] No par2 sets') % unicoder(filename)) if cfg.sfv_check(): par_error = not try_sfv_check(nzo, workdir, '') verified[''] = not par_error if re_add: logging.info('Readded %s to queue', filename) if nzo.priority != TOP_PRIORITY: nzo.priority = REPAIR_PRIORITY sabnzbd.nzbqueue.add_nzo(nzo) sabnzbd.downloader.Downloader.do.resume_from_postproc() sabnzbd.save_data(verified, VERIFIED_FILE, nzo.workpath) logging.info('Par2 check finished on %s', filename) return par_error, re_add
def save(self, save_nzo=None): """ Save queue, all nzo's or just the specified one """ logging.info("Saving queue") nzo_ids = [] # Aggregate nzo_ids and save each nzo for nzo in self.__nzo_list: nzo_ids.append(os.path.join(nzo.work_name, nzo.nzo_id)) if save_nzo is None or nzo is save_nzo: sabnzbd.save_data(nzo, nzo.nzo_id, nzo.workpath) if not nzo.futuretype: nzo.save_to_disk() sabnzbd.save_admin((QUEUE_VERSION, nzo_ids, []), QUEUE_FILE_NAME)
def save(self, save_nzo=None): """ Save queue, all nzo's or just the specified one """ logging.info("Saving queue") nzo_ids = [] # Aggregate nzo_ids and save each nzo for nzo in self.__nzo_list[:]: if not nzo.deleted: nzo_ids.append(os.path.join(nzo.work_name, nzo.nzo_id)) if save_nzo is None or nzo is save_nzo: sabnzbd.save_data(nzo, nzo.nzo_id, nzo.workpath) if not nzo.futuretype: nzo.save_to_disk() sabnzbd.save_admin((QUEUE_VERSION, nzo_ids, []), QUEUE_FILE_NAME)
def __flush_article(self, article, data): nzf = article.nzf nzo = nzf.nzo if nzo.is_gone(): # Do not discard this article because the # file might still be processed at this moment!! return art_id = article.get_art_id() if art_id: # Save data, but don't complain when destination folder is missing # because this flush may come after completion of the NZO. sabnzbd.save_data(data, art_id, nzo.workpath, do_pickle=False, silent=True) else: logging.warning("Flushing %s failed -> no art_id", article)
def save(self, save_nzo: Union[NzbObject, None, bool] = None): """ Save queue, all nzo's or just the specified one """ logging.info("Saving queue") nzo_ids = [] # Aggregate nzo_ids and save each nzo for nzo in self.__nzo_list[:]: if not nzo.is_gone(): nzo_ids.append(os.path.join(nzo.work_name, nzo.nzo_id)) if save_nzo is None or nzo is save_nzo: if not nzo.futuretype: # Also includes save_data for NZO nzo.save_to_disk() else: sabnzbd.save_data(nzo, nzo.nzo_id, nzo.admin_path) sabnzbd.save_admin((QUEUE_VERSION, nzo_ids, []), QUEUE_FILE_NAME)
def register_article(self, article, found=True): nzf = article.nzf nzo = nzf.nzo if nzf.deleted: logging.debug("Discarding article %s, no longer in queue", article.article) return file_done, post_done, reset = nzo.remove_article(article, found) filename = nzf.filename if reset: self.reset_try_list() if nzo.is_gone(): logging.debug('Discarding file completion %s for deleted job', filename) else: if file_done: if nzo.next_save is None or time.time() > nzo.next_save: sabnzbd.save_data(nzo, nzo.nzo_id, nzo.workpath) BPSMeter.do.save() if nzo.save_timeout is None: nzo.next_save = None else: nzo.next_save = time.time() + nzo.save_timeout if not nzo.precheck: _type = nzf.type # Only start decoding if we have a filename and type if filename and _type: Assembler.do.process((nzo, nzf)) else: if file_has_articles(nzf): logging.warning(T('%s -> Unknown encoding'), filename) if post_done: self.end_job(nzo)
def __flush_article(self, article, data): nzf = article.nzf nzo = nzf.nzo if nzf.deleted or nzo.deleted: # Do not discard this article because the # file might still be processed at this moment!! if sabnzbd.LOG_ALL: logging.debug("%s would be discarded", article) # return art_id = article.get_art_id() if art_id: if sabnzbd.LOG_ALL: logging.debug("Flushing %s to disk", article) # Save data, but don't complain when destistation folder is missing # because this flush may come after completion of the NZO. sabnzbd.save_data(data, art_id, nzo.workpath, do_pickle = False, silent=True) else: logging.warning("Flushing %s failed -> no art_id", article)
def __flush_article(self, article, data): nzf = article.nzf nzo = nzf.nzo if nzo.status in (Status.COMPLETED, Status.DELETED): # Do not discard this article because the # file might still be processed at this moment!! if sabnzbd.LOG_ALL: logging.debug("%s is discarded", article) return art_id = article.get_art_id() if art_id: if sabnzbd.LOG_ALL: logging.debug("Flushing %s to disk", article) # Save data, but don't complain when destination folder is missing # because this flush may come after completion of the NZO. sabnzbd.save_data(data, art_id, nzo.workpath, do_pickle=False, silent=True) else: logging.warning("Flushing %s failed -> no art_id", article)
def register_article(self, article): nzf = article.nzf nzo = nzf.nzo if nzo.deleted or nzf.deleted: logging.debug("Discarding article %s, no longer in queue", article.article) return file_done, post_done, reset = nzo.remove_article(article) filename = nzf.filename if reset: self.reset_try_list() if file_done: if nzo.next_save is None or time.time() > nzo.next_save: sabnzbd.save_data(nzo, nzo.nzo_id, nzo.workpath) BPSMeter.do.save() if nzo.save_timeout is None: nzo.next_save = None else: nzo.next_save = time.time() + nzo.save_timeout _type = nzf.type # Only start decoding if we have a filename and type if filename and _type: Assembler.do.process((nzo, nzf)) else: if file_has_articles(nzf): logging.warning(Ta('%s -> Unknown encoding'), filename) if post_done: if self.actives(grabs=False) < 2 and cfg.autodisconnect(): # This was the last job, close server connections sabnzbd.downloader.Downloader.do.disconnect() # Notify assembler to call postprocessor Assembler.do.process((nzo, None))
def parring(nzo, workdir): """ Perform par processing. Returns: (par_error, re_add) """ logging.info("Starting verification and repair of %s", nzo.final_name) par_error = False re_add = False # Get verification status of sets verified = sabnzbd.load_data(VERIFIED_FILE, nzo.workpath, remove=False) or {} # If all were verified successfully, we skip the rest of the checks if verified and all(verified.values()): logging.info("Skipping repair, all sets previously verified: %s", verified) return par_error, re_add if nzo.extrapars: # Need to make a copy because it can change during iteration single = len(nzo.extrapars) == 1 for setname in list(nzo.extrapars): if cfg.ignore_samples() and RE_SAMPLE.search(setname.lower()): continue # Skip sets that were already tried if not verified.get(setname, False): logging.info("Running verification and repair on set %s", setname) parfile_nzf = nzo.partable[setname] # Check if file maybe wasn't deleted and if we maybe have more files in the parset if os.path.exists( os.path.join( nzo.downpath, parfile_nzf.filename)) or nzo.extrapars[setname]: need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, setname, single=single) # Was it aborted? if not nzo.pp_active: re_add = False par_error = True break re_add = re_add or need_re_add verified[setname] = res else: continue par_error = par_error or not res elif not verified.get("", False): # No par2-sets found, skipped if already tried before logging.info("No par2 sets for %s", nzo.final_name) nzo.set_unpack_info("Repair", T("[%s] No par2 sets") % nzo.final_name) # Try SFV-based verification and rename sfv_check_result = None if cfg.sfv_check() and not verified.get("", False): sfv_check_result = try_sfv_check(nzo, workdir) par_error = sfv_check_result is False # If no luck with SFV, do RAR-check or RAR-rename if sfv_check_result is None and cfg.enable_unrar(): # Check for RAR's with a sensible extension _, _, rars, _, _ = build_filelists(workdir, check_rar=False) # If there's no RAR's, they might be super-obfuscated if not rars: # Returns number of renamed RAR's if rar_renamer(nzo, workdir): # Re-parse the files so we can do RAR-check _, _, rars, _, _ = build_filelists(workdir) if rars: par_error = not try_rar_check(nzo, rars) # Save that we already tried SFV/RAR-verification verified[""] = not par_error if re_add: logging.info("Re-added %s to queue", nzo.final_name) if nzo.priority != FORCE_PRIORITY: nzo.priority = REPAIR_PRIORITY nzo.status = Status.FETCHING sabnzbd.nzbqueue.NzbQueue.do.add(nzo) sabnzbd.downloader.Downloader.do.resume_from_postproc() sabnzbd.save_data(verified, VERIFIED_FILE, nzo.workpath) logging.info("Verification and repair finished for %s", nzo.final_name) return par_error, re_add
def parring(nzo, workdir): """ Perform par processing. Returns: (par_error, re_add) """ filename = nzo.final_name notifier.send_notification(T('Post-processing'), filename, 'pp', nzo.cat) logging.info('Starting verification and repair of %s', filename) # Get verification status of sets verified = sabnzbd.load_data(VERIFIED_FILE, nzo.workpath, remove=False) or {} repair_sets = nzo.extrapars.keys() re_add = False par_error = False single = len(repair_sets) == 1 if repair_sets: for setname in repair_sets: if cfg.ignore_samples() and RE_SAMPLE.search(setname.lower()): continue if not verified.get(setname, False): logging.info("Running verification and repair on set %s", setname) parfile_nzf = nzo.partable[setname] # Check if file maybe wasn't deleted and if we maybe have more files in the parset if os.path.exists( os.path.join( nzo.downpath, parfile_nzf.filename)) or nzo.extrapars[setname]: need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, setname, single=single) # Was it aborted? if not nzo.pp_active: re_add = False par_error = True break re_add = re_add or need_re_add verified[setname] = res else: continue par_error = par_error or not res else: # We must not have found any par2.. logging.info("No par2 sets for %s", filename) nzo.set_unpack_info('Repair', T('[%s] No par2 sets') % unicoder(filename)) if cfg.sfv_check() and not verified.get('', False): par_error = not try_sfv_check(nzo, workdir, '') verified[''] = not par_error # If still no success, do RAR-check if not par_error and cfg.enable_unrar(): par_error = not try_rar_check(nzo, workdir, '') verified[''] = not par_error if re_add: logging.info('Re-added %s to queue', filename) if nzo.priority != TOP_PRIORITY: nzo.priority = REPAIR_PRIORITY nzo.status = Status.FETCHING sabnzbd.nzbqueue.NzbQueue.do.add(nzo) sabnzbd.downloader.Downloader.do.resume_from_postproc() sabnzbd.save_data(verified, VERIFIED_FILE, nzo.workpath) logging.info('Verification and repair finished for %s', filename) return par_error, re_add
def parring(nzo, workdir): """ Perform par processing. Returns: (par_error, re_add) """ filename = nzo.final_name notifier.send_notification(T('Post-processing'), filename, 'pp') logging.info('Starting verification and repair of %s', filename) # Get verification status of sets verified = sabnzbd.load_data(VERIFIED_FILE, nzo.workpath, remove=False) or {} repair_sets = nzo.partable.keys() re_add = False par_error = False single = len(repair_sets) == 1 if repair_sets: for setname in repair_sets: if cfg.ignore_samples() and RE_SAMPLE.search(setname.lower()): continue if not verified.get(setname, False): logging.info("Running verification and repair on set %s", setname) parfile_nzf = nzo.partable[setname] if os.path.exists(os.path.join(nzo.downpath, parfile_nzf.filename)) or parfile_nzf.extrapars: need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, setname, single=single) # Was it aborted? if not nzo.pp_active: re_add = False par_error = True break re_add = re_add or need_re_add verified[setname] = res else: continue par_error = par_error or not res else: # Obfuscated par2 check logging.info('No par2 sets found, running obfuscated check on %s', filename) # Get the NZF's and sort them based on size nzfs_sorted = sorted(nzo.finished_files, key=lambda x: x.bytes) # We will have to make 'fake' par files that are recognized par2_vol = 0 par2_filename = None for nzf_try in nzfs_sorted: # run through list of files, looking for par2 signature.. logging.debug("Checking par2 signature of %s", nzf_try.filename) try: nzf_path = os.path.join(workdir, nzf_try.filename) if(is_parfile(nzf_path)): # We need 1 base-name so they are recognized as 1 set if not par2_filename: par2_filename = nzf_path # Rename so handle_par2() picks it up newpath = '%s.vol%d+%d.par2' % (par2_filename, par2_vol, par2_vol + 1) renamer(nzf_path, newpath) nzf_try.filename = os.path.split(newpath)[1] # Let the magic happen nzo.handle_par2(nzf_try, file_done=True) par2_vol += 1 except: pass if par2_vol > 0: # Pars found, we do it again par_error, re_add = parring(nzo, workdir) else: # We must not have found any par2.. logging.info("No par2 sets for %s", filename) nzo.set_unpack_info('Repair', T('[%s] No par2 sets') % unicoder(filename)) if cfg.sfv_check() and not verified.get('', False): par_error = not try_sfv_check(nzo, workdir, '') verified[''] = not par_error # If still no success, do RAR-check if not par_error and cfg.enable_unrar(): par_error = not try_rar_check(nzo, workdir, '') verified[''] = not par_error if re_add: logging.info('Re-added %s to queue', filename) if nzo.priority != TOP_PRIORITY: nzo.priority = REPAIR_PRIORITY sabnzbd.nzbqueue.add_nzo(nzo) sabnzbd.downloader.Downloader.do.resume_from_postproc() sabnzbd.save_data(verified, VERIFIED_FILE, nzo.workpath) logging.info('Verification and repair finished for %s', filename) return par_error, re_add
def parring(nzo, workdir): """ Perform par processing. Returns: (par_error, re_add) """ if 0: assert isinstance( nzo, sabnzbd.nzbstuff.NzbObject) # Assert only for debug purposes filename = nzo.final_name notifier.send_notification(T('Post-processing'), filename, 'pp') logging.info('Starting verification and repair of %s', filename) # Get verification status of sets verified = sabnzbd.load_data(VERIFIED_FILE, nzo.workpath, remove=False) or {} # Collect the par files if nzo.partable: par_table = nzo.partable.copy() else: par_table = {} repair_sets = par_table.keys() re_add = False par_error = False single = len(repair_sets) == 1 if repair_sets: for setname in repair_sets: if cfg.ignore_samples() and 'sample' in setname.lower(): continue if not verified.get(setname, False): logging.info("Running verification and repair on set %s", setname) parfile_nzf = par_table[setname] if os.path.exists( os.path.join( nzo.downpath, parfile_nzf.filename)) or parfile_nzf.extrapars: need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, setname, single=single) # Was it aborted? if not nzo.pp_active: re_add = False par_error = True break re_add = re_add or need_re_add if not res and not need_re_add and cfg.sfv_check(): res = try_sfv_check(nzo, workdir, setname) if not res and not need_re_add and cfg.enable_unrar(): res = try_rar_check(nzo, workdir, setname) verified[setname] = res else: continue par_error = par_error or not res else: # Obfuscated par2 check logging.info('No par2 sets found, running obfuscated check on %s', filename) # Get the NZF's and sort them based on size nzfs_sorted = sorted(nzo.finished_files, key=lambda x: x.bytes) # We will have to make 'fake' par files that are recognized par2_vol = 0 par2_filename = None for nzf_try in nzfs_sorted: # run through list of files, looking for par2 signature.. logging.debug("Checking par2 signature of %s", nzf_try.filename) try: nzf_path = os.path.join(workdir, nzf_try.filename) if (is_parfile(nzf_path)): # We need 1 base-name so they are recognized as 1 set if not par2_filename: par2_filename = nzf_path # Rename so handle_par2() picks it up newpath = '%s.vol%d+%d.par2' % (par2_filename, par2_vol, par2_vol + 1) renamer(nzf_path, newpath) nzf_try.filename = os.path.split(newpath)[1] # Let the magic happen nzo.handle_par2(nzf_try, file_done=True) par2_vol += 1 except: pass if par2_vol > 0: # Pars found, we do it again par_error, re_add = parring(nzo, workdir) else: # We must not have found any par2.. logging.info("No par2 sets for %s", filename) nzo.set_unpack_info('Repair', T('[%s] No par2 sets') % unicoder(filename)) if cfg.sfv_check() and not verified.get('', False): par_error = not try_sfv_check(nzo, workdir, '') verified[''] = not par_error # If still no success, do RAR-check if not par_error and cfg.enable_unrar(): par_error = not try_rar_check(nzo, workdir, '') verified[''] = not par_error if re_add: logging.info('Re-added %s to queue', filename) if nzo.priority != TOP_PRIORITY: nzo.priority = REPAIR_PRIORITY sabnzbd.nzbqueue.add_nzo(nzo) sabnzbd.downloader.Downloader.do.resume_from_postproc() sabnzbd.save_data(verified, VERIFIED_FILE, nzo.workpath) logging.info('Verification and repair finished for %s', filename) return par_error, re_add
def parring(nzo, workdir): """ Perform par processing. Returns: (par_error, re_add) """ filename = nzo.final_name notifier.send_notification(T('Post-processing'), filename, 'pp', nzo.cat) logging.info('Starting verification and repair of %s', filename) # Get verification status of sets verified = sabnzbd.load_data(VERIFIED_FILE, nzo.workpath, remove=False) or {} repair_sets = nzo.extrapars.keys() re_add = False par_error = False single = len(repair_sets) == 1 if repair_sets: for setname in repair_sets: if cfg.ignore_samples() and RE_SAMPLE.search(setname.lower()): continue if not verified.get(setname, False): logging.info("Running verification and repair on set %s", setname) parfile_nzf = nzo.partable[setname] # Check if file maybe wasn't deleted and if we maybe have more files in the parset if os.path.exists(os.path.join(nzo.downpath, parfile_nzf.filename)) or nzo.extrapars[setname]: need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, setname, single=single) # Was it aborted? if not nzo.pp_active: re_add = False par_error = True break re_add = re_add or need_re_add verified[setname] = res else: continue par_error = par_error or not res else: # We must not have found any par2.. logging.info("No par2 sets for %s", filename) nzo.set_unpack_info('Repair', T('[%s] No par2 sets') % unicoder(filename)) if cfg.sfv_check() and not verified.get('', False): par_error = not try_sfv_check(nzo, workdir, '') verified[''] = not par_error # If still no success, do RAR-check if not par_error and cfg.enable_unrar(): par_error = not try_rar_check(nzo, workdir, '') verified[''] = not par_error if re_add: logging.info('Re-added %s to queue', filename) if nzo.priority != TOP_PRIORITY: nzo.priority = REPAIR_PRIORITY nzo.status = Status.FETCHING sabnzbd.nzbqueue.NzbQueue.do.add(nzo) sabnzbd.downloader.Downloader.do.resume_from_postproc() sabnzbd.save_data(verified, VERIFIED_FILE, nzo.workpath) logging.info('Verification and repair finished for %s', filename) return par_error, re_add