def is_cloaked(nzo: NzbObject, path: str, names: List[str]) -> bool: """ Return True if this is likely to be a cloaked encrypted post """ fname = os.path.splitext(get_filename(path.lower()))[0] for name in names: name = get_filename(name.lower()) name, ext = os.path.splitext(name) if ( ext == ".rar" and fname.startswith(name) and (len(fname) - len(name)) < 8 and len(names) < 3 and not RE_SUBS.search(fname) ): # Only warn once if nzo.encrypted == 0: logging.warning( T('Job "%s" is probably encrypted due to RAR with same name inside this RAR'), nzo.final_name ) nzo.encrypted = 1 return True elif "password" in name and ext not in SAFE_EXTS: # Only warn once if nzo.encrypted == 0: logging.warning(T('Job "%s" is probably encrypted: "password" in filename "%s"'), nzo.final_name, name) nzo.encrypted = 1 return True return False
def repair_job(self, folder, new_nzb=None, password=None): """ Reconstruct admin for a single job folder, optionally with new NZB """ def all_verified(path): """ Return True when all sets have been successfully verified """ verified = sabnzbd.load_data(VERIFIED_FILE, path, remove=False) or {'x': False} return all(verified[x] for x in verified) name = os.path.basename(folder) path = os.path.join(folder, JOB_ADMIN) if hasattr(new_nzb, 'filename'): filename = new_nzb.filename else: filename = '' if not filename: if not all_verified(path): filename = globber_full(path, '*.gz') if len(filename) > 0: logging.debug('Repair job %s by reparsing stored NZB', name) nzo_id = sabnzbd.add_nzbfile(filename[0], pp=None, script=None, cat=None, priority=None, nzbname=name, reuse=True, password=password)[1] else: logging.debug('Repair job %s without stored NZB', name) nzo = NzbObject(name, pp=None, script=None, nzb='', cat=None, priority=None, nzbname=name, reuse=True) nzo.password = password self.add(nzo) nzo_id = nzo.nzo_id else: remove_all(path, '*.gz') logging.debug('Repair job %s with new NZB (%s)', name, filename) nzo_id = sabnzbd.add_nzbfile(new_nzb, pp=None, script=None, cat=None, priority=None, nzbname=name, reuse=True, password=password)[1] return nzo_id
def add(self, nzo: NzbObject, save=True, quiet=False) -> str: if not nzo.nzo_id: nzo.nzo_id = sabnzbd.get_new_id("nzo", nzo.admin_path, self.__nzo_table) # If no files are to be downloaded anymore, send to postproc if not nzo.files and not nzo.futuretype: self.end_job(nzo) return nzo.nzo_id # Reset try_lists, markers and evaluate the scheduling settings nzo.reset_try_list() nzo.deleted = False priority = nzo.priority if sabnzbd.Scheduler.analyse(False, priority): nzo.status = Status.PAUSED self.__nzo_table[nzo.nzo_id] = nzo if priority > HIGH_PRIORITY: # Top and repair priority items are added to the top of the queue self.__nzo_list.insert(0, nzo) elif priority == LOW_PRIORITY: self.__nzo_list.append(nzo) else: # for high priority we need to add the item at the bottom # of any other high priority items above the normal priority # for normal priority we need to add the item at the bottom # of the normal priority items above the low priority if self.__nzo_list: pos = 0 added = False for position in self.__nzo_list: if position.priority < priority: self.__nzo_list.insert(pos, nzo) added = True break pos += 1 if not added: # if there are no other items classed as a lower priority # then it will be added to the bottom of the queue self.__nzo_list.append(nzo) else: # if the queue is empty then simple append the item to the bottom self.__nzo_list.append(nzo) if save: self.save(nzo) if not (quiet or nzo.status == Status.FETCHING): notifier.send_notification(T("NZB added to queue"), nzo.filename, "download", nzo.cat) if not quiet and cfg.auto_sort(): try: field, direction = cfg.auto_sort().split() self.sort_queue(field, direction) except ValueError: pass return nzo.nzo_id
def end_job(self, nzo: NzbObject): """ Send NZO to the post-processing queue """ # Notify assembler to call postprocessor if not nzo.deleted: logging.info("[%s] Ending job %s", caller_name(), nzo.final_name) nzo.deleted = True if nzo.precheck: nzo.save_to_disk() # Check result enough, _ = nzo.check_availability_ratio() if enough: # Enough data present, do real download self.send_back(nzo) return else: # Not enough data, let postprocessor show it as failed pass sabnzbd.Assembler.process(nzo)
def repair_job(self, repair_folder, new_nzb=None, password=None): """ Reconstruct admin for a single job folder, optionally with new NZB """ # Check if folder exists if not repair_folder or not os.path.exists(repair_folder): return None name = os.path.basename(repair_folder) admin_path = os.path.join(repair_folder, JOB_ADMIN) # If Retry was used and a new NZB was uploaded if getattr(new_nzb, "filename", None): remove_all(admin_path, "*.gz", keep_folder=True) logging.debug("Repair job %s with new NZB (%s)", name, new_nzb.filename) _, nzo_ids = sabnzbd.add_nzbfile(new_nzb, nzbname=name, reuse=repair_folder, password=password) nzo_id = nzo_ids[0] else: # Was this file already post-processed? verified = sabnzbd.load_data(VERIFIED_FILE, admin_path, remove=False) filenames = [] if not verified or not all(verified[x] for x in verified): filenames = globber_full(admin_path, "*.gz") if filenames: logging.debug("Repair job %s by re-parsing stored NZB", name) _, nzo_ids = sabnzbd.add_nzbfile(filenames[0], nzbname=name, reuse=repair_folder, password=password) nzo_id = nzo_ids[0] else: logging.debug("Repair job %s without stored NZB", name) nzo = NzbObject(name, nzbname=name, reuse=repair_folder) nzo.password = password self.add(nzo) nzo_id = nzo.nzo_id return nzo_id
def nzo_filtered_by_rating(nzo: NzbObject) -> Tuple[int, str]: if cfg.rating_enable() and cfg.rating_filter_enable() and (nzo.rating_filtered < 2): rating = sabnzbd.Rating.get_rating_by_nzo(nzo.nzo_id) if rating is not None: nzo.rating_filtered = 1 reason = rating_filtered(rating, nzo.filename.lower(), True) if reason is not None: return 2, reason reason = rating_filtered(rating, nzo.filename.lower(), False) if reason is not None: return 1, reason return 0, ""
def fail_to_history(nzo: NzbObject, url: str, msg="", content=False): """Create History entry for failed URL Fetch msg: message to be logged content: report in history that cause is a bad NZB file """ # Remove the "Trying to fetch" part if url: nzo.filename = url nzo.final_name = url.strip() if content: # Bad content msg = T("Unusable NZB file") else: # Failed fetch msg = T("URL Fetching failed; %s") % msg # Mark as failed nzo.set_unpack_info("Source", msg) nzo.fail_msg = msg notifier.send_notification( T("URL Fetching failed; %s") % "", "%s\n%s" % (msg, url), "failed", nzo.cat) if cfg.email_endjob() > 0: emailer.badfetch_mail(msg, url) # Parse category to make sure script is set correctly after a grab nzo.cat, _, nzo.script, _ = misc.cat_to_opts(nzo.cat, script=nzo.script) # Add to history and run script if desired sabnzbd.NzbQueue.remove(nzo.nzo_id) sabnzbd.PostProcessor.process(nzo)
def try_sfv_check(nzo: NzbObject, workdir): """Attempt to verify set using SFV file Return None if no SFV-sets, True/False based on verification """ # Get list of SFV names sfvs = globber_full(workdir, "*.sfv") # If no files named *.sfv, lets search for obfuscated SFV files if not sfvs: files = globber_full(workdir, "*") for file in files: if is_sfv_file(file): logging.debug("Found and will use obfuscated SFV file: %s", file) sfvs.append(file) if not sfvs: # still no SFV, so: return None result = sfv_check(sfvs, nzo, workdir) if not result: print_sfv = [os.path.basename(sfv) for sfv in sfvs] fail_msg = T('Some files failed to verify against "%s"') % "; ".join( print_sfv) nzo.set_unpack_info("Repair", fail_msg) nzo.status = Status.FAILED nzo.fail_msg = fail_msg return False # Success nzo.set_unpack_info("Repair", T("Verified successfully using SFV files")) return True
def add(self, url: str, future_nzo: NzbObject, when: Optional[int] = None): """ Add an URL to the URLGrabber queue, 'when' is seconds from now """ if future_nzo and when: # Always increase counter future_nzo.url_tries += 1 # Too many tries? Cancel if future_nzo.url_tries > cfg.max_url_retries(): self.fail_to_history(future_nzo, url, T("Maximum retries")) return future_nzo.url_wait = time.time() + when self.queue.put((url, future_nzo))
def process(self, nzo: NzbObject): """ Push on finished job in the queue """ # Make sure we return the status "Waiting" nzo.status = Status.QUEUED if nzo not in self.history_queue: self.history_queue.append(nzo) # Fast-track if it has DirectUnpacked jobs or if it's still going if nzo.direct_unpacker and (nzo.direct_unpacker.success_sets or not nzo.direct_unpacker.killed): self.fast_queue.put(nzo) else: self.slow_queue.put(nzo) self.save() sabnzbd.history_updated()
def generate_future(self, msg, pp=None, script=None, cat=None, url=None, priority=DEFAULT_PRIORITY, nzbname=None): """ Create and return a placeholder nzo object """ logging.debug("Creating placeholder NZO") future_nzo = NzbObject( filename=msg, pp=pp, script=script, futuretype=True, cat=cat, url=url, priority=priority, nzbname=nzbname, status=Status.GRABBING, ) self.add(future_nzo) return future_nzo
def repair_job(self, folder, new_nzb=None): """ Reconstruct admin for a single job folder, optionally with new NZB """ name = os.path.basename(folder) path = os.path.join(folder, JOB_ADMIN) if new_nzb is None or not new_nzb.filename: filename = globber(path, '*.gz') if len(filename) > 0: logging.debug('Repair job %s by reparsing stored NZB', latin1(name)) sabnzbd.add_nzbfile(filename[0], pp=None, script=None, cat=None, priority=None, nzbname=name, reuse=True) else: logging.debug('Repair job %s without stored NZB', latin1(name)) nzo = NzbObject(name, 0, pp=None, script=None, nzb='', cat=None, priority=None, nzbname=name, reuse=True) self.add(nzo) else: remove_all(path, '*.gz') logging.debug('Repair job %s without new NZB (%s)', latin1(name), latin1(new_nzb.filename)) sabnzbd.add_nzbfile(new_nzb, pp=None, script=None, cat=None, priority=None, nzbname=name, reuse=True)
def __init__(self, nzo: NzbObject): super().__init__() self.nzo: NzbObject = nzo self.active_instance: Optional[subprocess.Popen] = None self.killed = False self.next_file_lock = threading.Condition(threading.RLock()) self.unpack_dir_info = None self.rarfile_nzf: Optional[NzbFile] = None self.cur_setname = None self.cur_volume = 0 self.total_volumes = {} self.unpack_time = 0.0 self.success_sets = {} self.next_sets = [] self.duplicate_lines = 0 nzo.direct_unpacker = self
def generate_future(self, msg, pp=None, script=None, cat=None, url=None, priority=NORMAL_PRIORITY, nzbname=None): """ Create and return a placeholder nzo object """ future_nzo = NzbObject(msg, pp, script, None, True, cat=cat, url=url, priority=priority, nzbname=nzbname, status=Status.GRABBING) self.add(future_nzo) return future_nzo
def rar_renamer(nzo: NzbObject, workdir): """ Deobfuscate rar file names: Use header and content information to give RAR-files decent names """ nzo.status = Status.VERIFYING nzo.set_unpack_info("Repair", T("Trying RAR renamer")) nzo.set_action_line(T("Trying RAR renamer"), "...") renamed_files = 0 # This is the most important datastructure (in case of mixed obfuscated rarsets) rarvolnr = {} # rarvolnr will contain per rar vol number the rarfilenames and their respective contents (and maybe other characteristics, like filesizes). # for example: rarvolnr[6]['somerandomfilename.rar']={'readme.txt', 'linux.iso'}, # which means 'somerandomfilename.rar' has rarvolnumber 6, and contents 'readme.txt' and 'linux.iso' # if we find a rarfile with rarvolnumber 7, and 'linux.iso' in it, we have a match! # The volume number and real extension of a (obfuscated) rar file # so volnrext['dfakjldfalkjdfl.blabla'] = (14, 'part014.rar') or (2, 'r000') # Not really needed, but handy to avoid a second lookup at the renaming volnrext = {} # Scan rar files in workdir, but not subdirs workdir_files = os.listdir(workdir) for file_to_check in workdir_files: file_to_check = os.path.join(workdir, file_to_check) # We only want files: if not (os.path.isfile(file_to_check)): continue # The function will check if it's a RAR-file # We do a sanity-check for the returned number rar_vol, new_extension = rarvolinfo.get_rar_extension(file_to_check) if 0 < rar_vol < 1000: logging.debug("Detected volume-number %s from RAR-header: %s ", rar_vol, file_to_check) volnrext[file_to_check] = (rar_vol, new_extension) # The files inside rar file rar_contents = rarfile.RarFile(os.path.join( workdir, file_to_check), single_file_check=True).filelist() try: rarvolnr[rar_vol] except: # does not yet exist, so create: rarvolnr[rar_vol] = {} rarvolnr[rar_vol][ file_to_check] = rar_contents # store them for matching (if needed) else: logging.debug("No RAR-volume-number found in %s", file_to_check) logging.debug("Deobfuscate: rarvolnr is: %s", rarvolnr) logging.debug("Deobfuscate: volnrext is: %s", volnrext) # Could be that there are no rar-files, we stop if not len(rarvolnr): return renamed_files # this can probably done with a max-key-lambda oneliner, but ... how? numberofrarsets = 0 for mykey in rarvolnr.keys(): numberofrarsets = max(numberofrarsets, len(rarvolnr[mykey])) logging.debug("Number of rarset is %s", numberofrarsets) if numberofrarsets == 1: # Just one obfuscated rarset ... that's easy logging.debug("Deobfuscate: Just one obfuscated rarset") for filename in volnrext: new_rar_name = "%s.%s" % (nzo.final_name, volnrext[filename][1]) new_rar_name = os.path.join(workdir, new_rar_name) new_rar_name = get_unique_filename(new_rar_name) logging.debug("Deobfuscate: Renaming %s to %s" % (filename, new_rar_name)) renamer(filename, new_rar_name) renamed_files += 1 return renamed_files # numberofrarsets bigger than 1, so a mixed rar set, so we need pre-checking # Sanity check of the rar set # Get the highest rar part number (that's the upper limit): highest_rar = sorted(rarvolnr.keys())[-1] # A staircase check: number of rarsets should no go up, but stay the same or go down how_many_previous = 1000 # 1000 rarset mixed ... should be enough ... typical is 1, 2 or maybe 3 # Start at part001.rar and go the highest for rar_set_number in range(1, highest_rar + 1): try: how_many_here = len(rarvolnr[rar_set_number]) except: # rarset does not exist at all logging.warning( "rarset %s is missing completely, so I can't deobfuscate.", rar_set_number) return 0 # OK, it exists, now let's check it's not higher if how_many_here > how_many_previous: # this should not happen: higher number of rarset than previous number of rarset logging.warning( "no staircase! rarset %s is higher than previous, so I can't deobfuscate.", rar_set_number) return 0 how_many_previous = how_many_here # OK, that looked OK (a declining staircase), so we can safely proceed # More than one obfuscated rarset, so we must do matching based of files inside the rar files # Assign (random) rar set names, first come first serve basis rarsetname = { } # in which rar set it should be, so rar set 'A', or 'B', or ... mychar = "A" # First things first: Assigning a rarsetname to the rar file which have volume number 1 for base_obfuscated_filename in rarvolnr[1]: rarsetname[base_obfuscated_filename] = mychar + "--" + nzo.final_name mychar = chr(ord(mychar) + 1) logging.debug("Deobfuscate: rarsetname %s", rarsetname) # Do the matching, layer by layer (read: rarvolnumber) # So, all rar files with rarvolnr 1, find the contents (files inside the rar), # and match with rarfiles with rarvolnr 2, and put them in the correct rarset. # And so on, until the highest rarvolnr minus 1 matched against highest rarvolnr for n in range(1, len(rarvolnr)): logging.debug( "Deobfuscate: Finding matches between rar sets %s and %s" % (n, n + 1)) for base_obfuscated_filename in rarvolnr[n]: matchcounter = 0 for next_obfuscated_filename in rarvolnr[n + 1]: # set() method with intersection (less strict): set(rarvolnr[n][base_obfuscated_filename]).intersection(set(rarvolnr[n+1][next_obfuscated_filename])) # check if the last filename inside the existing rar matches with the first filename in the following rar if rarvolnr[n][base_obfuscated_filename][-1] == rarvolnr[ n + 1][next_obfuscated_filename][0]: try: rarsetname[next_obfuscated_filename] = rarsetname[ base_obfuscated_filename] matchcounter += 1 except KeyError: logging.warning( T("No matching earlier rar file for %s"), next_obfuscated_filename) if matchcounter > 1: logging.info( "Deobfuscate: more than one match, so risk on false positive matching." ) # Do the renaming: for filename in rarsetname: new_rar_name = "%s.%s" % (rarsetname[filename], volnrext[filename][1]) new_rar_name = os.path.join(workdir, new_rar_name) new_rar_name = get_unique_filename(new_rar_name) logging.debug("Deobfuscate: Renaming %s to %s" % (filename, new_rar_name)) renamer(filename, new_rar_name) renamed_files += 1 # Done: The obfuscated rar files have now been renamed to regular formatted filenames return renamed_files
def process_job(nzo: NzbObject): """ Process one job """ start = time.time() # keep track of whether we can continue all_ok = True # keep track of par problems par_error = False # keep track of any unpacking errors unpack_error = False # Signal empty download, for when 'empty_postproc' is enabled empty = False nzb_list = [] # These need to be initialized in case of a crash workdir_complete = "" script_log = "" script_line = "" # Get the job flags nzo.save_attribs() flag_repair, flag_unpack, flag_delete = nzo.repair_opts # Normalize PP if flag_delete: flag_unpack = True if flag_unpack: flag_repair = True # Get the NZB name filename = nzo.final_name # Download-processes can mark job as failed, skip all steps if nzo.fail_msg: all_ok = False par_error = True unpack_error = 1 try: # Get the folder containing the download result workdir = nzo.download_path tmp_workdir_complete = None # if no files are present (except __admin__), fail the job if all_ok and len(globber(workdir)) < 2: if nzo.precheck: _, ratio = nzo.check_availability_ratio() emsg = T( "Download might fail, only %s of required %s available" ) % (ratio, cfg.req_completion_rate()) else: emsg = T("Download failed - Not on your server(s)") empty = True emsg += " - https://sabnzbd.org/not-complete" nzo.fail_msg = emsg nzo.set_unpack_info("Download", emsg) nzo.status = Status.FAILED # do not run unpacking or parity verification flag_repair = flag_unpack = False all_ok = cfg.empty_postproc() and empty if not all_ok: par_error = True unpack_error = 1 script = nzo.script logging.info( "Starting Post-Processing on %s => Repair:%s, Unpack:%s, Delete:%s, Script:%s, Cat:%s", filename, flag_repair, flag_unpack, flag_delete, script, nzo.cat, ) # Set complete dir to workdir in case we need to abort workdir_complete = workdir # Send post-processing notification notifier.send_notification(T("Post-processing"), nzo.final_name, "pp", nzo.cat) # Par processing, if enabled if all_ok and flag_repair: par_error, re_add = parring(nzo, workdir) if re_add: # Try to get more par files return False # If we don't need extra par2, we can disconnect if sabnzbd.NzbQueue.actives(grabs=False) == 0 and cfg.autodisconnect(): # This was the last job, close server connections sabnzbd.Downloader.disconnect() # Sanitize the resulting files if sabnzbd.WIN32: sanitize_files_in_folder(workdir) # Check if user allows unsafe post-processing if flag_repair and cfg.safe_postproc(): all_ok = all_ok and not par_error if all_ok: # Fix encodings fix_unix_encoding(workdir) # Use dirs generated by direct-unpacker if nzo.direct_unpacker and nzo.direct_unpacker.unpack_dir_info: ( tmp_workdir_complete, workdir_complete, file_sorter, one_folder, marker_file, ) = nzo.direct_unpacker.unpack_dir_info else: # Generate extraction path tmp_workdir_complete, workdir_complete, file_sorter, one_folder, marker_file = prepare_extraction_path( nzo) newfiles = [] # Run Stage 2: Unpack if flag_unpack: # Set the current nzo status to "Extracting...". Used in History nzo.status = Status.EXTRACTING logging.info("Running unpack_magic on %s", filename) unpack_error, newfiles = unpack_magic(nzo, workdir, tmp_workdir_complete, flag_delete, one_folder, (), (), (), (), ()) logging.info("Unpacked files %s", newfiles) if sabnzbd.WIN32: # Sanitize the resulting files newfiles = sanitize_files_in_folder(tmp_workdir_complete) logging.info("Finished unpack_magic on %s", filename) if cfg.safe_postproc(): all_ok = all_ok and not unpack_error if all_ok: # Move any (left-over) files to destination nzo.status = Status.MOVING nzo.set_action_line(T("Moving"), "...") for root, _dirs, files in os.walk(workdir): if not root.endswith(JOB_ADMIN): for file_ in files: path = os.path.join(root, file_) new_path = path.replace(workdir, tmp_workdir_complete) ok, new_path = move_to_path(path, new_path) if new_path: newfiles.append(new_path) if not ok: nzo.set_unpack_info( "Unpack", T("Failed moving %s to %s") % (path, new_path)) all_ok = False break # Set permissions right set_permissions(tmp_workdir_complete) if all_ok and marker_file: del_marker(os.path.join(tmp_workdir_complete, marker_file)) remove_from_list(marker_file, newfiles) if all_ok: # Remove files matching the cleanup list cleanup_list(tmp_workdir_complete, skip_nzb=True) # Check if this is an NZB-only download, if so redirect to queue # except when PP was Download-only if flag_repair: nzb_list = nzb_redirect(tmp_workdir_complete, nzo.final_name, nzo.pp, script, nzo.cat, nzo.priority) else: nzb_list = None if nzb_list: nzo.set_unpack_info("Download", T("Sent %s to queue") % nzb_list) cleanup_empty_directories(tmp_workdir_complete) else: # Full cleanup including nzb's cleanup_list(tmp_workdir_complete, skip_nzb=False) script_output = "" script_ret = 0 if not nzb_list: # Give destination its final name if cfg.folder_rename() and tmp_workdir_complete and not one_folder: if not all_ok: # Rename failed folders so they are easy to recognize workdir_complete = tmp_workdir_complete.replace( "_UNPACK_", "_FAILED_") workdir_complete = get_unique_path(workdir_complete, create_dir=False) try: newfiles = rename_and_collapse_folder( tmp_workdir_complete, workdir_complete, newfiles) except: logging.error( T('Error renaming "%s" to "%s"'), clip_path(tmp_workdir_complete), clip_path(workdir_complete), ) logging.info("Traceback: ", exc_info=True) # Better disable sorting because filenames are all off now file_sorter.sort_file = None if empty: job_result = -1 else: job_result = int(par_error) + int(bool(unpack_error)) * 2 if cfg.ignore_samples(): remove_samples(workdir_complete) # TV/Movie/Date Renaming code part 2 - rename and move files to parent folder if all_ok and file_sorter.sort_file: if newfiles: file_sorter.rename(newfiles, workdir_complete) workdir_complete, ok = file_sorter.move(workdir_complete) else: workdir_complete, ok = file_sorter.rename_with_ext( workdir_complete) if not ok: nzo.set_unpack_info("Unpack", T("Failed to move files")) all_ok = False if cfg.deobfuscate_final_filenames() and all_ok and not nzb_list: # Deobfuscate the filenames logging.info("Running deobfuscate") deobfuscate.deobfuscate_list(newfiles, nzo.final_name) # Run the user script script_path = make_script_path(script) if (all_ok or not cfg.safe_postproc()) and ( not nzb_list) and script_path: # Set the current nzo status to "Ext Script...". Used in History nzo.status = Status.RUNNING nzo.set_action_line(T("Running script"), script) nzo.set_unpack_info("Script", T("Running user script %s") % script, unique=True) script_log, script_ret = external_processing( script_path, nzo, clip_path(workdir_complete), nzo.final_name, job_result) script_line = get_last_line(script_log) if script_log: script_output = nzo.nzo_id if script_line: nzo.set_unpack_info("Script", script_line, unique=True) else: nzo.set_unpack_info("Script", T("Ran %s") % script, unique=True) else: script = "" script_line = "" script_ret = 0 # Maybe bad script result should fail job if script_ret and cfg.script_can_fail(): script_error = True all_ok = False nzo.fail_msg = T("Script exit code is %s") % script_ret else: script_error = False # Email the results if (not nzb_list) and cfg.email_endjob(): if (cfg.email_endjob() == 1) or (cfg.email_endjob() == 2 and (unpack_error or par_error or script_error)): emailer.endjob( nzo.final_name, nzo.cat, all_ok, workdir_complete, nzo.bytes_downloaded, nzo.fail_msg, nzo.unpack_info, script, script_log, script_ret, ) if script_output: # Can do this only now, otherwise it would show up in the email if script_ret: script_ret = "Exit(%s) " % script_ret else: script_ret = "" if len(script_log.rstrip().split("\n")) > 1: nzo.set_unpack_info( "Script", '%s%s <a href="./scriptlog?name=%s">(%s)</a>' % (script_ret, script_line, encoding.xml_name(script_output), T("More")), unique=True, ) else: # No '(more)' button needed nzo.set_unpack_info("Script", "%s%s " % (script_ret, script_line), unique=True) # Cleanup again, including NZB files if all_ok: cleanup_list(workdir_complete, False) # Force error for empty result all_ok = all_ok and not empty # Update indexer with results if cfg.rating_enable(): if nzo.encrypted > 0: sabnzbd.Rating.update_auto_flag(nzo.nzo_id, sabnzbd.Rating.FLAG_ENCRYPTED) if empty: hosts = [s.host for s in sabnzbd.Downloader.nzo_servers(nzo)] if not hosts: hosts = [None] for host in hosts: sabnzbd.Rating.update_auto_flag( nzo.nzo_id, sabnzbd.Rating.FLAG_EXPIRED, host) except: logging.error(T("Post Processing Failed for %s (%s)"), filename, T("see logfile")) logging.info("Traceback: ", exc_info=True) nzo.fail_msg = T("Post-processing was aborted") notifier.send_notification(T("Download Failed"), filename, "failed", nzo.cat) nzo.status = Status.FAILED par_error = True all_ok = False if cfg.email_endjob(): emailer.endjob( nzo.final_name, nzo.cat, all_ok, clip_path(workdir_complete), nzo.bytes_downloaded, nzo.fail_msg, nzo.unpack_info, "", "", 0, ) if all_ok: # If the folder only contains one file OR folder, have that as the path # Be aware that series/generic/date sorting may move a single file into a folder containing other files workdir_complete = one_file_or_folder(workdir_complete) workdir_complete = os.path.normpath(workdir_complete) # Clean up the NZO data try: nzo.purge_data(delete_all_data=all_ok) except: logging.error(T("Cleanup of %s failed."), nzo.final_name) logging.info("Traceback: ", exc_info=True) # Use automatic retry link on par2 errors and encrypted/bad RARs if par_error or unpack_error in (2, 3): try_alt_nzb(nzo) # Check if it was aborted if not nzo.pp_active: nzo.fail_msg = T("Post-processing was aborted") all_ok = False # Show final status in history if all_ok: notifier.send_notification(T("Download Completed"), filename, "complete", nzo.cat) nzo.status = Status.COMPLETED else: notifier.send_notification(T("Download Failed"), filename, "failed", nzo.cat) nzo.status = Status.FAILED # Log the overall time taken for postprocessing postproc_time = int(time.time() - start) with database.HistoryDB() as history_db: # Add the nzo to the database. Only the path, script and time taken is passed # Other information is obtained from the nzo history_db.add_history_db(nzo, workdir_complete, postproc_time, script_log, script_line) # Purge items history_db.auto_history_purge() sabnzbd.history_updated() return True
def parring(nzo: NzbObject, workdir: str): """ Perform par processing. Returns: (par_error, re_add) """ logging.info("Starting verification and repair of %s", nzo.final_name) par_error = False re_add = False # Get verification status of sets verified = sabnzbd.load_data(VERIFIED_FILE, nzo.admin_path, remove=False) or {} # If all were verified successfully, we skip the rest of the checks if verified and all(verified.values()): logging.info("Skipping repair, all sets previously verified: %s", verified) return par_error, re_add if nzo.extrapars: # Need to make a copy because it can change during iteration single = len(nzo.extrapars) == 1 for setname in list(nzo.extrapars): if cfg.ignore_samples() and RE_SAMPLE.search(setname.lower()): continue # Skip sets that were already tried if not verified.get(setname, False): logging.info("Running verification and repair on set %s", setname) parfile_nzf = nzo.partable[setname] # Check if file maybe wasn't deleted and if we maybe have more files in the parset if os.path.exists( os.path.join( nzo.download_path, parfile_nzf.filename)) or nzo.extrapars[setname]: need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, setname, single=single) re_add = re_add or need_re_add verified[setname] = res else: continue par_error = par_error or not res elif not verified.get("", False): # No par2-sets found, skipped if already tried before logging.info("No par2 sets for %s", nzo.final_name) nzo.set_unpack_info("Repair", T("[%s] No par2 sets") % nzo.final_name) # Try SFV-based verification and rename sfv_check_result = None if cfg.sfv_check() and not verified.get("", False): sfv_check_result = try_sfv_check(nzo, workdir) par_error = sfv_check_result is False # If no luck with SFV, do RAR-check or RAR-rename if sfv_check_result is None and cfg.enable_unrar(): # Check for RAR's with a sensible extension _, _, rars, _, _ = build_filelists(workdir, check_rar=False) # If there's no RAR's, they might be super-obfuscated if not rars: # Returns number of renamed RAR's if rar_renamer(nzo, workdir): # Re-parse the files so we can do RAR-check _, _, rars, _, _ = build_filelists(workdir) if rars: par_error = not try_rar_check(nzo, rars) # Save that we already tried SFV/RAR-verification verified[""] = not par_error if re_add: logging.info("Re-added %s to queue", nzo.final_name) if nzo.priority != FORCE_PRIORITY: nzo.priority = REPAIR_PRIORITY nzo.status = Status.FETCHING sabnzbd.NzbQueue.add(nzo) sabnzbd.Downloader.resume_from_postproc() sabnzbd.save_data(verified, VERIFIED_FILE, nzo.admin_path) logging.info("Verification and repair finished for %s", nzo.final_name) return par_error, re_add
def try_rar_check(nzo: NzbObject, rars): """Attempt to verify set using the RARs Return True if verified, False when failed When setname is '', all RAR files will be used, otherwise only the matching one If no RAR's are found, returns True """ # Sort for better processing rars.sort(key=functools.cmp_to_key(rar_sort)) # Test if rars: setname = setname_from_path(rars[0]) nzo.status = Status.VERIFYING nzo.set_unpack_info("Repair", T("Trying RAR-based verification"), setname) nzo.set_action_line(T("Trying RAR-based verification"), "...") try: # Set path to unrar and open the file # Requires de-unicode for RarFile to work! rarfile.UNRAR_TOOL = sabnzbd.newsunpack.RAR_COMMAND zf = rarfile.RarFile(rars[0]) # Skip if it's encrypted if zf.needs_password(): msg = T("[%s] RAR-based verification failed: %s") % ( setname, T("Passworded")) nzo.set_unpack_info("Repair", msg) return True # Will throw exception if something is wrong zf.testrar() # Success! msg = T("RAR files verified successfully") nzo.set_unpack_info("Repair", msg, setname) logging.info(msg) return True except rarfile.Error as e: nzo.fail_msg = T("RAR files failed to verify") msg = T("[%s] RAR-based verification failed: %s") % (setname, e) nzo.set_unpack_info("Repair", msg, setname) logging.info(msg) return False else: # No rar-files, so just continue return True
def check_encrypted_and_unwanted_files(nzo: NzbObject, filepath: str) -> Tuple[bool, Optional[str]]: """ Combines check for unwanted and encrypted files to save on CPU and IO """ encrypted = False unwanted = None if (cfg.unwanted_extensions() and cfg.action_on_unwanted_extensions()) or ( nzo.encrypted == 0 and cfg.pause_on_pwrar() ): # These checks should not break the assembler try: # Rarfile freezes on Windows special names, so don't try those! if sabnzbd.WIN32 and has_win_device(filepath): return encrypted, unwanted # Is it even a rarfile? if rarfile.is_rarfile(filepath): # Open the rar rarfile.UNRAR_TOOL = sabnzbd.newsunpack.RAR_COMMAND zf = rarfile.RarFile(filepath, single_file_check=True) # Check for encryption if ( nzo.encrypted == 0 and cfg.pause_on_pwrar() and (zf.needs_password() or is_cloaked(nzo, filepath, zf.namelist())) ): # Load all passwords passwords = get_all_passwords(nzo) # Cloaked job? if is_cloaked(nzo, filepath, zf.namelist()): encrypted = True elif not passwords: # Only error when no password was set nzo.encrypted = 1 encrypted = True else: # Lets test if any of the password work password_hit = False for password in passwords: if password: logging.info('Trying password "%s" on job "%s"', password, nzo.final_name) try: zf.setpassword(password) except rarfile.Error: # On weird passwords the setpassword() will fail # but the actual testrar() will work pass try: zf.testrar() password_hit = password break except rarfile.RarWrongPassword: # This one really didn't work pass except rarfile.RarCRCError as e: # CRC errors can be thrown for wrong password or # missing the next volume (with correct password) if "cannot find volume" in str(e).lower(): # We assume this one worked! password_hit = password break # This one didn't work pass except: # All the other errors we skip, they might be fixable in post-proc. # For example starting from the wrong volume, or damaged files # This will cause the check to be performed again for the next rar, might # be disk-intensive! Could be removed later and just accept the password. return encrypted, unwanted # Did any work? if password_hit: # We always trust the user's input if not nzo.password: nzo.password = password_hit # Don't check other files logging.info('Password "%s" matches for job "%s"', password_hit, nzo.final_name) nzo.encrypted = -1 encrypted = False else: # Encrypted and none of them worked nzo.encrypted = 1 encrypted = True # Check for unwanted extensions if cfg.unwanted_extensions() and cfg.action_on_unwanted_extensions(): for somefile in zf.namelist(): logging.debug("File contains: %s", somefile) if get_ext(somefile).replace(".", "").lower() in cfg.unwanted_extensions(): logging.debug("Unwanted file %s", somefile) unwanted = somefile zf.close() del zf except: logging.info("Error during inspection of RAR-file %s", filepath) logging.debug("Traceback: ", exc_info=True) return encrypted, unwanted