def GetMD5Hashes(fname, force=False): """ Get the hash table from a PAR2 file Return as dictionary, indexed on names and True for utf8-encoded names """ new_encoding = True table = {} if force or not flag_file(os.path.split(fname)[0], QCHECK_FILE): try: f = open(fname, 'rb') except: return table, new_encoding new_encoding = False try: header = f.read(8) while header: name, hash = ParseFilePacket(f, header) new_encoding |= is_utf8(name) if name: table[name] = hash header = f.read(8) except (struct.error, IndexError): logging.info('Cannot use corrupt par2 file for QuickCheck, "%s"', fname) table = {} except: logging.debug('QuickCheck parser crashed in file %s', fname) logging.info('Traceback: ', exc_info=True) table = {} f.close() return table, new_encoding
def GetMD5Hashes(fname, force=False): """ Get the hash table from a PAR2 file Return as dictionary, indexed on names and True for utf8-encoded names """ new_encoding = True table = {} if force or not flag_file(os.path.split(fname)[0], QCHECK_FILE): try: f = open(fname, 'rb') except: return table, new_encoding new_encoding = False try: header = f.read(8) while header: name, hash = ParseFilePacket(f, header) new_encoding |= is_utf8(name) if name: table[name] = hash header = f.read(8) except (struct.error, IndexError): logging.info('Cannot use corrupt par2 file for QuickCheck, "%s"', fname) table = {} except: logging.debug('QuickCheck parser crashed in file %s', fname) logging.info('Traceback: ', exc_info = True) table = {} f.close() return table, new_encoding
def repair_job(self, folder, new_nzb=None): """ Reconstruct admin for a single job folder, optionally with new NZB """ name = os.path.basename(folder) path = os.path.join(folder, JOB_ADMIN) if hasattr(new_nzb, 'filename'): filename = new_nzb.filename else: filename = '' if not filename: if not flag_file(folder, VERIFIED_FILE): filename = globber(path, '*.gz') if len(filename) > 0: logging.debug('Repair job %s by reparsing stored NZB', latin1(name)) sabnzbd.add_nzbfile(filename[0], pp=None, script=None, cat=None, priority=None, nzbname=name, reuse=True) else: logging.debug('Repair job %s without stored NZB', latin1(name)) nzo = NzbObject(name, 0, pp=None, script=None, nzb='', cat=None, priority=None, nzbname=name, reuse=True) self.add(nzo) else: remove_all(path, '*.gz') logging.debug('Repair job %s with new NZB (%s)', latin1(name), latin1(filename)) sabnzbd.add_nzbfile(new_nzb, pp=None, script=None, cat=None, priority=None, nzbname=name, reuse=True)
def parring(nzo, workdir): """ Perform par processing. Returns: (par_error, re_add) """ assert isinstance(nzo, sabnzbd.nzbstuff.NzbObject) filename = nzo.final_name growler.send_notification(T('Post-processing'), nzo.final_name, 'pp') logging.info('Par2 check starting on %s', filename) ## Collect the par files if nzo.partable: par_table = nzo.partable.copy() else: par_table = {} repair_sets = par_table.keys() re_add = False par_error = False if repair_sets: for set_ in repair_sets: logging.info("Running repair on set %s", set_) parfile_nzf = par_table[set_] need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, set_) if need_re_add: re_add = True par_error = par_error or not res if re_add: logging.info('Readded %s to queue', filename) nzo.priority = REPAIR_PRIORITY sabnzbd.nzbqueue.add_nzo(nzo) sabnzbd.downloader.Downloader.do.resume_from_postproc() logging.info('Par2 check finished on %s', filename) if (par_error and not re_add) or not repair_sets: # See if alternative SFV check is possible if cfg.sfv_check(): sfvs = globber(workdir, '*.sfv') else: sfvs = None if sfvs: par_error = False nzo.set_unpack_info('Repair', T('Trying SFV verification')) for sfv in sfvs: failed = sfv_check(sfv) if failed: msg = T('Some files failed to verify against "%s"') % unicoder(os.path.basename(sfv)) msg += '; ' msg += '; '.join(failed) nzo.set_unpack_info('Repair', msg) par_error = True if not par_error: nzo.set_unpack_info('Repair', T('Verified successfully using SFV files')) elif not repair_sets: logging.info("No par2 sets for %s", filename) nzo.set_unpack_info('Repair', T('[%s] No par2 sets') % unicoder(filename)) if not par_error: flag_file(workdir, VERIFIED_FILE, create=True) return par_error, re_add
def parring(nzo, workdir): """ Perform par processing. Returns: (par_error, re_add) """ assert isinstance(nzo, sabnzbd.nzbstuff.NzbObject) filename = nzo.final_name growler.send_notification(T('Post-processing'), nzo.final_name, 'pp') logging.info('Par2 check starting on %s', filename) ## Collect the par files if nzo.partable: par_table = nzo.partable.copy() else: par_table = {} repair_sets = par_table.keys() re_add = False par_error = False if repair_sets: for set_ in repair_sets: logging.info("Running repair on set %s", set_) parfile_nzf = par_table[set_] need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, set_) if need_re_add: re_add = True par_error = par_error or not res if re_add: logging.info('Readded %s to queue', filename) nzo.priority = REPAIR_PRIORITY sabnzbd.nzbqueue.add_nzo(nzo) sabnzbd.downloader.Downloader.do.resume_from_postproc() logging.info('Par2 check finished on %s', filename) if (par_error and not re_add) or not repair_sets: # See if alternative SFV check is possible if cfg.sfv_check(): sfvs = globber(workdir, '*.sfv') else: sfvs = None if sfvs: par_error = False nzo.set_unpack_info('Repair', T('Trying SFV verification')) for sfv in sfvs: failed = sfv_check(sfv) if failed: msg = T('Some files failed to verify against "%s"' ) % unicoder(os.path.basename(sfv)) msg += '; ' msg += '; '.join(failed) nzo.set_unpack_info('Repair', msg) par_error = True if not par_error: nzo.set_unpack_info('Repair', T('Verified successfully using SFV files')) elif not repair_sets: logging.info("No par2 sets for %s", filename) nzo.set_unpack_info('Repair', T('[%s] No par2 sets') % unicoder(filename)) if not par_error: flag_file(workdir, VERIFIED_FILE, create=True) return par_error, re_add