def process_job(nzo): """ Process one job """ start = time.time() # keep track of whether we can continue all_ok = True # keep track of par problems par_error = False # keep track of any unpacking errors unpack_error = False # Signal empty download, for when 'empty_postproc' is enabled empty = False nzb_list = [] # These need to be initialized in case of a crash workdir_complete = '' script_log = '' script_line = '' # Get the job flags nzo.save_attribs() flag_repair, flag_unpack, flag_delete = nzo.repair_opts # Normalize PP if flag_delete: flag_unpack = True if flag_unpack: flag_repair = True # Get the NZB name filename = nzo.final_name if nzo.fail_msg: # Special case: aborted due to too many missing data nzo.status = Status.FAILED nzo.save_attribs() all_ok = False par_error = True unpack_error = 1 try: # Get the folder containing the download result workdir = nzo.downpath tmp_workdir_complete = None # if no files are present (except __admin__), fail the job if all_ok and len(globber(workdir)) < 2: if nzo.precheck: _enough, ratio = nzo.check_quality() req_ratio = float(cfg.req_completion_rate()) / 100.0 # Make sure that rounded ratio doesn't equal required ratio # when it is actually below required if (ratio < req_ratio) and (req_ratio - ratio) < 0.001: ratio = req_ratio - 0.001 emsg = '%.1f%%' % (ratio * 100.0) emsg2 = '%.1f%%' % float(cfg.req_completion_rate()) emsg = T( 'Download might fail, only %s of required %s available' ) % (emsg, emsg2) else: emsg = T('Download failed - Not on your server(s)') empty = True emsg += ' - https://sabnzbd.org/not-complete' nzo.fail_msg = emsg nzo.set_unpack_info('Fail', emsg) nzo.status = Status.FAILED # do not run unpacking or parity verification flag_repair = flag_unpack = False all_ok = cfg.empty_postproc() and empty if not all_ok: par_error = True unpack_error = 1 script = nzo.script logging.info( 'Starting Post-Processing on %s' + ' => Repair:%s, Unpack:%s, Delete:%s, Script:%s, Cat:%s', filename, flag_repair, flag_unpack, flag_delete, script, nzo.cat) # Set complete dir to workdir in case we need to abort workdir_complete = workdir # Par processing, if enabled if all_ok and flag_repair: par_error, re_add = parring(nzo, workdir) if re_add: # Try to get more par files return False # If we don't need extra par2, we can disconnect if sabnzbd.nzbqueue.NzbQueue.do.actives( grabs=False) == 0 and cfg.autodisconnect(): # This was the last job, close server connections sabnzbd.downloader.Downloader.do.disconnect() # Sanitize the resulting files if sabnzbd.WIN32: sanitize_files_in_folder(workdir) # Check if user allows unsafe post-processing if flag_repair and cfg.safe_postproc(): all_ok = all_ok and not par_error if all_ok: # Fix encodings fix_unix_encoding(workdir) # Use dirs generated by direct-unpacker if nzo.direct_unpacker and nzo.direct_unpacker.unpack_dir_info: tmp_workdir_complete, workdir_complete, file_sorter, one_folder, marker_file = nzo.direct_unpacker.unpack_dir_info else: # Generate extraction path tmp_workdir_complete, workdir_complete, file_sorter, one_folder, marker_file = prepare_extraction_path( nzo) newfiles = [] # Run Stage 2: Unpack if flag_unpack: # set the current nzo status to "Extracting...". Used in History nzo.status = Status.EXTRACTING logging.info("Running unpack_magic on %s", filename) unpack_error, newfiles = unpack_magic(nzo, workdir, tmp_workdir_complete, flag_delete, one_folder, (), (), (), (), ()) logging.info("Unpacked files %s", newfiles) if sabnzbd.WIN32: # Sanitize the resulting files newfiles = sanitize_files_in_folder(tmp_workdir_complete) logging.info("Finished unpack_magic on %s", filename) if cfg.safe_postproc(): all_ok = all_ok and not unpack_error if all_ok: # Move any (left-over) files to destination nzo.status = Status.MOVING nzo.set_action_line(T('Moving'), '...') for root, _dirs, files in os.walk(workdir): if not root.endswith(JOB_ADMIN): for file_ in files: path = os.path.join(root, file_) new_path = path.replace(workdir, tmp_workdir_complete) ok, new_path = move_to_path(path, new_path) if new_path: newfiles.append(new_path) if not ok: nzo.set_unpack_info( 'Unpack', T('Failed moving %s to %s') % (unicoder(path), unicoder(new_path))) all_ok = False break # Set permissions right set_permissions(tmp_workdir_complete) if all_ok and marker_file: del_marker(os.path.join(tmp_workdir_complete, marker_file)) remove_from_list(marker_file, newfiles) if all_ok: # Remove files matching the cleanup list cleanup_list(tmp_workdir_complete, True) # Check if this is an NZB-only download, if so redirect to queue # except when PP was Download-only if flag_repair: nzb_list = nzb_redirect(tmp_workdir_complete, nzo.final_name, nzo.pp, script, nzo.cat, priority=nzo.priority) else: nzb_list = None if nzb_list: nzo.set_unpack_info( 'Download', T('Sent %s to queue') % unicoder(nzb_list)) cleanup_empty_directories(tmp_workdir_complete) else: cleanup_list(tmp_workdir_complete, False) script_output = '' script_ret = 0 if not nzb_list: # Give destination its final name if cfg.folder_rename() and tmp_workdir_complete and not one_folder: if all_ok: try: newfiles = rename_and_collapse_folder( tmp_workdir_complete, workdir_complete, newfiles) except: logging.error(T('Error renaming "%s" to "%s"'), clip_path(tmp_workdir_complete), clip_path(workdir_complete)) logging.info('Traceback: ', exc_info=True) # Better disable sorting because filenames are all off now file_sorter.sort_file = None else: workdir_complete = tmp_workdir_complete.replace( '_UNPACK_', '_FAILED_') workdir_complete = get_unique_path(workdir_complete, n=0, create_dir=False) if empty: job_result = -1 else: job_result = int(par_error) + int(bool(unpack_error)) * 2 if cfg.ignore_samples(): remove_samples(workdir_complete) # TV/Movie/Date Renaming code part 2 - rename and move files to parent folder if all_ok and file_sorter.sort_file: if newfiles: file_sorter.rename(newfiles, workdir_complete) workdir_complete, ok = file_sorter.move(workdir_complete) else: workdir_complete, ok = file_sorter.rename_with_ext( workdir_complete) if not ok: nzo.set_unpack_info('Unpack', T('Failed to move files')) all_ok = False # Run the user script script_path = make_script_path(script) if (all_ok or not cfg.safe_postproc()) and ( not nzb_list) and script_path: # Set the current nzo status to "Ext Script...". Used in History nzo.status = Status.RUNNING nzo.set_action_line(T('Running script'), unicoder(script)) nzo.set_unpack_info('Script', T('Running user script %s') % unicoder(script), unique=True) script_log, script_ret = external_processing( script_path, nzo, clip_path(workdir_complete), nzo.final_name, job_result) script_line = get_last_line(script_log) if script_log: script_output = nzo.nzo_id if script_line: nzo.set_unpack_info('Script', unicoder(script_line), unique=True) else: nzo.set_unpack_info('Script', T('Ran %s') % unicoder(script), unique=True) else: script = "" script_line = "" script_ret = 0 # Maybe bad script result should fail job if script_ret and cfg.script_can_fail(): script_error = True all_ok = False nzo.fail_msg = T('Script exit code is %s') % script_ret else: script_error = False # Email the results if (not nzb_list) and cfg.email_endjob(): if (cfg.email_endjob() == 1) or (cfg.email_endjob() == 2 and (unpack_error or par_error or script_error)): emailer.endjob(nzo.final_name, nzo.cat, all_ok, workdir_complete, nzo.bytes_downloaded, nzo.fail_msg, nzo.unpack_info, script, TRANS(script_log), script_ret) if script_output: # Can do this only now, otherwise it would show up in the email if script_ret: script_ret = 'Exit(%s) ' % script_ret else: script_ret = '' if len(script_log.rstrip().split('\n')) > 1: nzo.set_unpack_info( 'Script', u'%s%s <a href="./scriptlog?name=%s">(%s)</a>' % (script_ret, script_line, xml.sax.saxutils.escape(script_output), T('More')), unique=True) else: # No '(more)' button needed nzo.set_unpack_info('Script', u'%s%s ' % (script_ret, script_line), unique=True) # Cleanup again, including NZB files if all_ok: cleanup_list(workdir_complete, False) # Force error for empty result all_ok = all_ok and not empty # Update indexer with results if cfg.rating_enable(): if nzo.encrypted > 0: Rating.do.update_auto_flag(nzo.nzo_id, Rating.FLAG_ENCRYPTED) if empty: hosts = map(lambda s: s.host, sabnzbd.downloader.Downloader.do.nzo_servers(nzo)) if not hosts: hosts = [None] for host in hosts: Rating.do.update_auto_flag(nzo.nzo_id, Rating.FLAG_EXPIRED, host) except: logging.error(T('Post Processing Failed for %s (%s)'), filename, T('see logfile')) logging.info("Traceback: ", exc_info=True) nzo.fail_msg = T('PostProcessing was aborted (%s)') % T('see logfile') notifier.send_notification(T('Download Failed'), filename, 'failed', nzo.cat) nzo.status = Status.FAILED par_error = True all_ok = False if cfg.email_endjob(): emailer.endjob(nzo.final_name, nzo.cat, all_ok, clip_path(workdir_complete), nzo.bytes_downloaded, nzo.fail_msg, nzo.unpack_info, '', '', 0) if all_ok: # If the folder only contains one file OR folder, have that as the path # Be aware that series/generic/date sorting may move a single file into a folder containing other files workdir_complete = one_file_or_folder(workdir_complete) workdir_complete = os.path.normpath(workdir_complete) # Clean up the NZO try: logging.info('Cleaning up %s (keep_basic=%s)', filename, str(not all_ok)) sabnzbd.nzbqueue.NzbQueue.do.cleanup_nzo(nzo, keep_basic=not all_ok) except: logging.error(T('Cleanup of %s failed.'), nzo.final_name) logging.info("Traceback: ", exc_info=True) # Remove download folder if all_ok: try: if os.path.exists(workdir): logging.debug('Removing workdir %s', workdir) remove_all(workdir, recursive=True) except: logging.error(T('Error removing workdir (%s)'), clip_path(workdir)) logging.info("Traceback: ", exc_info=True) # Use automatic retry link on par2 errors and encrypted/bad RARs if par_error or unpack_error in (2, 3): try_alt_nzb(nzo) # Show final status in history if all_ok: notifier.send_notification(T('Download Completed'), filename, 'complete', nzo.cat) nzo.status = Status.COMPLETED else: notifier.send_notification(T('Download Failed'), filename, 'failed', nzo.cat) nzo.status = Status.FAILED # Log the overall time taken for postprocessing postproc_time = int(time.time() - start) # Create the history DB instance history_db = database.HistoryDB() # Add the nzo to the database. Only the path, script and time taken is passed # Other information is obtained from the nzo history_db.add_history_db(nzo, clip_path(workdir_complete), nzo.downpath, postproc_time, script_log, script_line) # Purge items history_db.auto_history_purge() # The connection is only used once, so close it here history_db.close() sabnzbd.history_updated() return True
def run(self): # Input and output linebuf = '' last_volume_linebuf = '' unrar_log = [] rarfiles = [] extracted = [] start_time = time.time() # Need to read char-by-char because there's no newline after new-disk message while 1: if not self.active_instance: break char = self.active_instance.stdout.read(1) linebuf += char if not char: # End of program break # Error? Let PP-handle it if linebuf.endswith( ('ERROR: ', 'Cannot create', 'in the encrypted file', 'CRC failed', 'checksum failed', 'You need to start extraction from a previous volume', 'password is incorrect', 'Write error', 'checksum error', 'start extraction from a previous volume' 'Unexpected end of archive')): logging.info('Error in DirectUnpack of %s: %s', self.cur_setname, linebuf.strip()) self.abort() if linebuf.endswith('\n'): # List files we used if linebuf.startswith('Extracting from'): filename = TRANS((re.search(EXTRACTFROM_RE, linebuf.strip()).group(1))) if filename not in rarfiles: rarfiles.append(filename) # List files we extracted m = re.search(EXTRACTED_RE, linebuf) if m: # In case of flat-unpack, UnRar still prints the whole path (?!) unpacked_file = TRANS(m.group(2)) if cfg.flat_unpack(): unpacked_file = os.path.basename(unpacked_file) extracted.append( real_path(self.unpack_dir_info[0], unpacked_file)) # Did we reach the end? if linebuf.endswith('All OK'): # Stop timer and finish self.unpack_time += time.time() - start_time ACTIVE_UNPACKERS.remove(self) # Add to success rarfile_path = os.path.join(self.nzo.downpath, self.rarfile_nzf.filename) self.success_sets[self.cur_setname] = (rar_volumelist( rarfile_path, self.nzo.password, rarfiles), extracted) logging.info('DirectUnpack completed for %s', self.cur_setname) self.nzo.set_action_line(T('Direct Unpack'), T('Completed')) # List success in history-info msg = T('Unpacked %s files/folders in %s') % ( len(extracted), format_time_string(self.unpack_time)) msg = '%s - %s' % (T('Direct Unpack'), msg) self.nzo.set_unpack_info( 'Unpack', '[%s] %s' % (unicoder(self.cur_setname), msg)) # Write current log and clear unrar_log.append(linebuf.strip()) linebuf = '' last_volume_linebuf = '' logging.debug('DirectUnpack Unrar output %s', '\n'.join(unrar_log)) unrar_log = [] rarfiles = [] extracted = [] # Are there more files left? while self.nzo.files and not self.next_sets: with self.next_file_lock: self.next_file_lock.wait() # Is there another set to do? if self.next_sets: # Start new instance nzf = self.next_sets.pop(0) self.reset_active() self.cur_setname = nzf.setname # Wait for the 1st volume to appear self.wait_for_next_volume() self.create_unrar_instance() start_time = time.time() else: self.killed = True break if linebuf.endswith('[C]ontinue, [Q]uit '): # Stop timer self.unpack_time += time.time() - start_time # Wait for the next one.. self.wait_for_next_volume() # Possible that the instance was deleted while locked if not self.killed: # If unrar stopped or is killed somehow, writing will cause a crash try: # Give unrar some time to do it's thing self.active_instance.stdin.write('C\n') start_time = time.time() time.sleep(0.1) except IOError: self.abort() break # Did we unpack a new volume? Sometimes UnRar hangs on 1 volume if not last_volume_linebuf or last_volume_linebuf != linebuf: # Next volume self.cur_volume += 1 self.nzo.set_action_line(T('Direct Unpack'), self.get_formatted_stats()) logging.info('DirectUnpacked volume %s for %s', self.cur_volume, self.cur_setname) # If lines did not change and we don't have the next volume, this download is missing files! # In rare occasions we can get stuck forever with repeating lines if last_volume_linebuf == linebuf: if not self.have_next_volume( ) or self.duplicate_lines > 10: logging.info( 'DirectUnpack failed due to missing files %s', self.cur_setname) self.abort() else: logging.debug( 'Duplicate output line detected: "%s"', last_volume_linebuf) self.duplicate_lines += 1 else: self.duplicate_lines = 0 last_volume_linebuf = linebuf # Show the log if linebuf.endswith('\n'): unrar_log.append(linebuf.strip()) linebuf = '' # Add last line unrar_log.append(linebuf.strip()) logging.debug('DirectUnpack Unrar output %s', '\n'.join(unrar_log)) # Make more space self.reset_active() if self in ACTIVE_UNPACKERS: ACTIVE_UNPACKERS.remove(self) # Set the thread to killed so it never gets restarted by accident self.killed = True
def run(self): # Input and output linebuf = '' last_volume_linebuf = '' unrar_log = [] rarfiles = [] start_time = time.time() # Need to read char-by-char because there's no newline after new-disk message while 1: if not self.active_instance: break char = self.active_instance.stdout.read(1) linebuf += char if not char: # End of program break # Error? Let PP-handle it if linebuf.endswith(('ERROR: ', 'Cannot create', 'in the encrypted file', 'CRC failed', \ 'checksum failed', 'You need to start extraction from a previous volume', \ 'password is incorrect', 'Write error', 'checksum error', \ 'start extraction from a previous volume')): logging.info('Error in DirectUnpack of %s', self.cur_setname) self.abort() if linebuf.startswith('Extracting from') and linebuf.endswith( '\n'): filename = TRANS((re.search(EXTRACTFROM_RE, linebuf.strip()).group(1))) if filename not in rarfiles: rarfiles.append(filename) # Did we reach the end? if linebuf.endswith('All OK'): # Stop timer and finish self.unpack_time += time.time() - start_time ACTIVE_UNPACKERS.remove(self) # Add to success rarfile_path = os.path.join(self.nzo.downpath, self.rarfile_nzf.filename) self.success_sets[self.cur_setname] = rar_volumelist( rarfile_path, self.nzo.password, rarfiles) logging.info('DirectUnpack completed for %s', self.cur_setname) self.nzo.set_action_line(T('Direct Unpack'), T('Completed')) # Write current log and clear unrar_log.append(linebuf.strip()) linebuf = '' logging.debug('DirectUnpack Unrar output %s', '\n'.join(unrar_log)) unrar_log = [] rarfiles = [] # Are there more files left? while self.nzo.files and not self.next_sets: with self.next_file_lock: self.next_file_lock.wait() # Is there another set to do? if self.next_sets: # Start new instance nzf = self.next_sets.pop(0) self.reset_active() self.cur_setname = nzf.setname # Wait for the 1st volume to appear self.wait_for_next_volume() self.create_unrar_instance() start_time = time.time() else: self.killed = True break if linebuf.endswith('[C]ontinue, [Q]uit '): # Stop timer self.unpack_time += time.time() - start_time # Wait for the next one.. self.wait_for_next_volume() # Possible that the instance was deleted while locked if not self.killed: # Give unrar some time to do it's thing self.active_instance.stdin.write('\n') start_time = time.time() time.sleep(0.1) # Did we unpack a new volume? Sometimes UnRar hangs on 1 volume if not last_volume_linebuf or last_volume_linebuf != linebuf: # Next volume self.cur_volume += 1 self.nzo.set_action_line(T('Direct Unpack'), self.get_formatted_stats()) logging.info('DirectUnpacked volume %s for %s', self.cur_volume, self.cur_setname) # If lines did not change and we don't have the next volume, this download is missing files! if last_volume_linebuf == linebuf and not self.have_next_volume( ): logging.info( 'DirectUnpack failed due to missing files %s', self.cur_setname) self.abort() last_volume_linebuf = linebuf # Show the log if linebuf.endswith('\n'): unrar_log.append(linebuf.strip()) linebuf = '' # Add last line unrar_log.append(linebuf.strip()) logging.debug('DirectUnpack Unrar output %s', '\n'.join(unrar_log)) # Save information if success if self.success_sets: # The number is wrong if one_folder, just leave empty nr_files = '' if self.unpack_dir_info[3] else len( globber(self.unpack_dir_info[0])) msg = T('Unpacked %s files/folders in %s') % ( nr_files, format_time_string(self.unpack_time)) msg = '%s - %s' % (T('Direct Unpack'), msg) self.nzo.set_unpack_info( 'Unpack', '[%s] %s' % (unicoder(self.cur_setname), msg)) # Make more space self.reset_active() if self in ACTIVE_UNPACKERS: ACTIVE_UNPACKERS.remove(self) # Set the thread to killed so it never gets restarted by accident self.killed = True
def process_job(nzo): """ Process one job """ assert isinstance(nzo, sabnzbd.nzbstuff.NzbObject) start = time.time() # keep track of whether we can continue all_ok = True # keep track of par problems par_error = False # keep track of any unpacking errors unpack_error = False # Signal empty download, for when 'empty_postproc' is enabled empty = False nzb_list = [] # These need to be initialised incase of a crash workdir_complete = '' postproc_time = 0 script_log = '' script_line = '' crash_msg = '' ## Get the job flags nzo.save_attribs() flag_repair, flag_unpack, flag_delete = nzo.repair_opts # Normalize PP if flag_delete: flag_unpack = True if flag_unpack: flag_repair = True # Get the NZB name filename = nzo.final_name msgid = nzo.msgid if cfg.allow_streaming() and not (flag_repair or flag_unpack or flag_delete): # After streaming, force +D nzo.set_pp(3) nzo.status = Status.FAILED nzo.save_attribs() all_ok = False if nzo.fail_msg: # Special case: aborted due to too many missing data nzo.status = Status.FAILED nzo.save_attribs() all_ok = False par_error = unpack_error = True try: # Get the folder containing the download result workdir = nzo.downpath tmp_workdir_complete = None # if no files are present (except __admin__), fail the job if all_ok and len(globber(workdir)) < 2: if nzo.precheck: enough, ratio = nzo.check_quality() req_ratio = float(cfg.req_completion_rate()) / 100.0 # Make sure that rounded ratio doesn't equal required ratio # when it is actually below required if (ratio < req_ratio) and (req_ratio - ratio) < 0.001: ratio = req_ratio - 0.001 emsg = '%.1f%%' % (ratio * 100.0) emsg2 = '%.1f%%' % float(cfg.req_completion_rate()) emsg = T( 'Download might fail, only %s of required %s available' ) % (emsg, emsg2) else: emsg = T('Download failed - Out of your server\'s retention?') empty = True nzo.fail_msg = emsg nzo.set_unpack_info('Fail', emsg) nzo.status = Status.FAILED # do not run unpacking or parity verification flag_repair = flag_unpack = False all_ok = cfg.empty_postproc() and empty if not all_ok: par_error = unpack_error = True script = nzo.script cat = nzo.cat logging.info('Starting PostProcessing on %s' + \ ' => Repair:%s, Unpack:%s, Delete:%s, Script:%s, Cat:%s', filename, flag_repair, flag_unpack, flag_delete, script, cat) ## Par processing, if enabled if all_ok and flag_repair: par_error, re_add = parring(nzo, workdir) if re_add: # Try to get more par files return False ## Check if user allows unsafe post-processing if flag_repair and cfg.safe_postproc(): all_ok = all_ok and not par_error # Set complete dir to workdir in case we need to abort workdir_complete = workdir dirname = nzo.final_name marker_file = None if all_ok: one_folder = False ## Determine class directory if cfg.create_group_folders(): complete_dir = addPrefixes(cfg.complete_dir.get_path(), nzo.dirprefix) complete_dir = create_dirs(complete_dir) else: catdir = config.get_categories(cat).dir() if catdir.endswith('*'): catdir = catdir.strip('*') one_folder = True complete_dir = real_path(cfg.complete_dir.get_path(), catdir) ## TV/Movie/Date Renaming code part 1 - detect and construct paths if cfg.enable_meta(): file_sorter = Sorter(nzo, cat) else: file_sorter = Sorter(None, cat) complete_dir = file_sorter.detect(dirname, complete_dir) if file_sorter.sort_file: one_folder = False if one_folder: workdir_complete = create_dirs(complete_dir) else: workdir_complete = get_unique_path(os.path.join( complete_dir, dirname), create_dir=True) marker_file = set_marker(workdir_complete) if not workdir_complete or not os.path.exists(workdir_complete): crash_msg = T('Cannot create final folder %s') % unicoder( os.path.join(complete_dir, dirname)) raise IOError if cfg.folder_rename() and not one_folder: tmp_workdir_complete = prefix(workdir_complete, '_UNPACK_') try: renamer(workdir_complete, tmp_workdir_complete) except: pass # On failure, just use the original name else: tmp_workdir_complete = workdir_complete newfiles = [] ## Run Stage 2: Unpack if flag_unpack: if all_ok: #set the current nzo status to "Extracting...". Used in History nzo.status = Status.EXTRACTING logging.info("Running unpack_magic on %s", filename) unpack_error, newfiles = unpack_magic( nzo, workdir, tmp_workdir_complete, flag_delete, one_folder, (), (), (), ()) logging.info("unpack_magic finished on %s", filename) else: nzo.set_unpack_info( 'Unpack', T('No post-processing because of failed verification')) if cfg.safe_postproc(): all_ok = all_ok and not unpack_error if all_ok: ## Move any (left-over) files to destination nzo.status = Status.MOVING nzo.set_action_line(T('Moving'), '...') for root, dirs, files in os.walk(workdir): if not root.endswith(JOB_ADMIN): for file_ in files: path = os.path.join(root, file_) new_path = path.replace(workdir, tmp_workdir_complete) ok, new_path = move_to_path(path, new_path) newfiles.append(new_path) if not ok: nzo.set_unpack_info( 'Unpack', T('Failed moving %s to %s') % (unicoder(path), unicoder(new_path))) all_ok = False break ## Set permissions right set_permissions(tmp_workdir_complete) if all_ok and marker_file: del_marker(os.path.join(tmp_workdir_complete, marker_file)) remove_from_list(marker_file, newfiles) if all_ok: ## Remove files matching the cleanup list cleanup_list(tmp_workdir_complete, True) ## Check if this is an NZB-only download, if so redirect to queue ## except when PP was Download-only if flag_repair: nzb_list = nzb_redirect(tmp_workdir_complete, nzo.final_name, nzo.pp, script, cat, priority=nzo.priority) else: nzb_list = None if nzb_list: nzo.set_unpack_info( 'Download', T('Sent %s to queue') % unicoder(nzb_list)) cleanup_empty_directories(tmp_workdir_complete) else: cleanup_list(tmp_workdir_complete, False) script_output = '' script_ret = 0 if not nzb_list: ## Give destination its final name if cfg.folder_rename() and tmp_workdir_complete and not one_folder: if all_ok: try: newfiles = rename_and_collapse_folder( tmp_workdir_complete, workdir_complete, newfiles) except: logging.error(Ta('Error renaming "%s" to "%s"'), tmp_workdir_complete, workdir_complete) logging.info('Traceback: ', exc_info=True) # Better disable sorting because filenames are all off now file_sorter.sort_file = None else: workdir_complete = tmp_workdir_complete.replace( '_UNPACK_', '_FAILED_') workdir_complete = get_unique_path(workdir_complete, n=0, create_dir=False) if empty: job_result = -1 else: job_result = int(par_error) + int(unpack_error) * 2 if cfg.ignore_samples() > 0: remove_samples(workdir_complete) ## TV/Movie/Date Renaming code part 2 - rename and move files to parent folder if all_ok and file_sorter.sort_file: if newfiles: file_sorter.rename(newfiles, workdir_complete) workdir_complete, ok = file_sorter.move(workdir_complete) else: workdir_complete, ok = file_sorter.rename_with_ext( workdir_complete) if not ok: nzo.set_unpack_info('Unpack', T('Failed to move files')) all_ok = False ## Run the user script script_path = make_script_path(script) if (all_ok or not cfg.safe_postproc()) and ( not nzb_list) and script_path: #set the current nzo status to "Ext Script...". Used in History nzo.status = Status.RUNNING nzo.set_action_line(T('Running script'), unicoder(script)) nzo.set_unpack_info('Script', T('Running user script %s') % unicoder(script), unique=True) script_log, script_ret = external_processing( script_path, workdir_complete, nzo.filename, msgid, dirname, cat, nzo.group, job_result, nzo.nzo_info.get('failure', '')) script_line = get_last_line(script_log) if script_log: script_output = nzo.nzo_id if script_line: nzo.set_unpack_info('Script', unicoder(script_line), unique=True) else: nzo.set_unpack_info('Script', T('Ran %s') % unicoder(script), unique=True) else: script = "" script_line = "" script_ret = 0 ## Email the results if (not nzb_list) and cfg.email_endjob(): if (cfg.email_endjob() == 1) or (cfg.email_endjob() == 2 and (unpack_error or par_error)): emailer.endjob(dirname, msgid, cat, all_ok, workdir_complete, nzo.bytes_downloaded, nzo.fail_msg, nzo.unpack_info, script, TRANS(script_log), script_ret) if script_output: # Can do this only now, otherwise it would show up in the email if script_ret: script_ret = 'Exit(%s) ' % script_ret else: script_ret = '' if script_line: nzo.set_unpack_info( 'Script', u'%s%s <a href="./scriptlog?name=%s">(%s)</a>' % (script_ret, unicoder(script_line), urllib.quote(script_output), T('More')), unique=True) else: nzo.set_unpack_info('Script', u'%s<a href="./scriptlog?name=%s">%s</a>' % (script_ret, urllib.quote(script_output), T('View script output')), unique=True) ## Cleanup again, including NZB files if all_ok: cleanup_list(workdir_complete, False) ## Remove newzbin bookmark, if any if msgid and all_ok: Bookmarks.do.del_bookmark(msgid) elif all_ok and isinstance(nzo.url, str): sabnzbd.proxy_rm_bookmark(nzo.url) ## Force error for empty result all_ok = all_ok and not empty ## Update indexer with results if nzo.encrypted > 0: Rating.do.update_auto_flag(nzo.nzo_id, Rating.FLAG_ENCRYPTED) if empty: hosts = map(lambda s: s.host, sabnzbd.downloader.Downloader.do.nzo_servers(nzo)) if not hosts: hosts = [None] for host in hosts: Rating.do.update_auto_flag(nzo.nzo_id, Rating.FLAG_EXPIRED, host) ## Show final status in history if all_ok: growler.send_notification(T('Download Completed'), filename, 'complete') nzo.status = Status.COMPLETED else: growler.send_notification(T('Download Failed'), filename, 'complete') nzo.status = Status.FAILED except: logging.error(Ta('Post Processing Failed for %s (%s)'), filename, crash_msg) if not crash_msg: logging.info("Traceback: ", exc_info=True) crash_msg = T('see logfile') nzo.fail_msg = T('PostProcessing was aborted (%s)') % unicoder( crash_msg) growler.send_notification(T('Download Failed'), filename, 'complete') nzo.status = Status.FAILED par_error = True all_ok = False if cfg.email_endjob(): emailer.endjob(dirname, msgid, cat, all_ok, workdir_complete, nzo.bytes_downloaded, nzo.fail_msg, nzo.unpack_info, '', '', 0) if all_ok: # If the folder only contains one file OR folder, have that as the path # Be aware that series/generic/date sorting may move a single file into a folder containing other files workdir_complete = one_file_or_folder(workdir_complete) workdir_complete = os.path.normpath(workdir_complete) # Log the overall time taken for postprocessing postproc_time = int(time.time() - start) # Create the history DB instance history_db = database.get_history_handle() # Add the nzo to the database. Only the path, script and time taken is passed # Other information is obtained from the nzo history_db.add_history_db(nzo, workdir_complete, nzo.downpath, postproc_time, script_log, script_line) # The connection is only used once, so close it here history_db.close() ## Clean up the NZO try: logging.info('Cleaning up %s (keep_basic=%s)', filename, str(not all_ok)) sabnzbd.nzbqueue.NzbQueue.do.cleanup_nzo(nzo, keep_basic=not all_ok) except: logging.error(Ta('Cleanup of %s failed.'), nzo.final_name) logging.info("Traceback: ", exc_info=True) ## Remove download folder if all_ok: try: if os.path.exists(workdir): logging.debug('Removing workdir %s', workdir) remove_all(workdir, recursive=True) except: logging.error(Ta('Error removing workdir (%s)'), workdir) logging.info("Traceback: ", exc_info=True) return True
def process_job(nzo): """ Process one job """ assert isinstance(nzo, sabnzbd.nzbstuff.NzbObject) start = time.time() # keep track of whether we can continue all_ok = True # keep track of par problems par_error = False # keep track of any unpacking errors unpack_error = False nzb_list = [] # These need to be initialised incase of a crash workdir_complete = '' postproc_time = 0 script_log = '' script_line = '' crash_msg = '' ## Get the job flags nzo.save_attribs() flag_repair, flag_unpack, flag_delete = nzo.repair_opts # Normalize PP if flag_delete: flag_unpack = True if flag_unpack: flag_repair = True # Get the NZB name filename = nzo.final_name msgid = nzo.msgid if cfg.allow_streaming() and not (flag_repair or flag_unpack or flag_delete): # After streaming, force +D nzo.set_pp(3) nzo.status = 'Failed' nzo.save_attribs() all_ok = False try: # Get the folder containing the download result workdir = nzo.downpath tmp_workdir_complete = None # if no files are present (except __admin__), fail the job if len(globber(workdir)) < 2: emsg = T('Download failed - Out of your server\'s retention?') nzo.fail_msg = emsg nzo.status = 'Failed' # do not run unpacking or parity verification flag_repair = flag_unpack = False par_error = unpack_error = True all_ok = False script = nzo.script cat = nzo.cat logging.info('Starting PostProcessing on %s' + \ ' => Repair:%s, Unpack:%s, Delete:%s, Script:%s, Cat:%s', filename, flag_repair, flag_unpack, flag_delete, script, cat) ## Par processing, if enabled if flag_repair: par_error, re_add = parring(nzo, workdir) if re_add: # Try to get more par files return False ## Check if user allows unsafe post-processing if flag_repair and cfg.safe_postproc(): all_ok = all_ok and not par_error # Set complete dir to workdir in case we need to abort workdir_complete = workdir dirname = nzo.final_name if all_ok: one_folder = False ## Determine class directory if cfg.create_group_folders(): complete_dir = addPrefixes(cfg.complete_dir.get_path(), nzo.dirprefix) complete_dir = create_dirs(complete_dir) else: catdir = config.get_categories(cat).dir() if catdir.endswith('*'): catdir = catdir.strip('*') one_folder = True complete_dir = real_path(cfg.complete_dir.get_path(), catdir) ## TV/Movie/Date Renaming code part 1 - detect and construct paths file_sorter = Sorter(cat) complete_dir = file_sorter.detect(dirname, complete_dir) if file_sorter.is_sortfile(): one_folder = False if one_folder: workdir_complete = create_dirs(complete_dir) else: workdir_complete = get_unique_path(os.path.join( complete_dir, dirname), create_dir=True) if not workdir_complete or not os.path.exists(workdir_complete): crash_msg = T('Cannot create final folder %s') % unicoder( os.path.join(complete_dir, dirname)) raise IOError if cfg.folder_rename() and not one_folder: tmp_workdir_complete = prefix(workdir_complete, '_UNPACK_') try: renamer(workdir_complete, tmp_workdir_complete) except: pass # On failure, just use the original name else: tmp_workdir_complete = workdir_complete newfiles = [] ## Run Stage 2: Unpack if flag_unpack: if all_ok: #set the current nzo status to "Extracting...". Used in History nzo.status = 'Extracting' logging.info("Running unpack_magic on %s", filename) unpack_error, newfiles = unpack_magic( nzo, workdir, tmp_workdir_complete, flag_delete, one_folder, (), (), (), ()) logging.info("unpack_magic finished on %s", filename) else: nzo.set_unpack_info( 'Unpack', T('No post-processing because of failed verification')) if cfg.safe_postproc(): all_ok = all_ok and not unpack_error if all_ok: ## Move any (left-over) files to destination nzo.status = 'Moving' nzo.set_action_line(T('Moving'), '...') for root, dirs, files in os.walk(workdir): if not root.endswith(JOB_ADMIN): for file_ in files: path = os.path.join(root, file_) new_path = path.replace(workdir, tmp_workdir_complete) new_path = get_unique_filename(new_path) move_to_path(path, new_path, unique=False) ## Set permissions right if not sabnzbd.WIN32: perm_script(tmp_workdir_complete, cfg.umask()) if all_ok: ## Remove files matching the cleanup list cleanup_list(tmp_workdir_complete, True) ## Check if this is an NZB-only download, if so redirect to queue ## except when PP was Download-only if flag_repair: nzb_list = nzb_redirect(tmp_workdir_complete, nzo.final_name, nzo.pp, script, cat, priority=nzo.priority) else: nzb_list = None if nzb_list: nzo.set_unpack_info( 'Download', T('Sent %s to queue') % unicoder(nzb_list)) try: remove_dir(tmp_workdir_complete) except: pass else: cleanup_list(tmp_workdir_complete, False) script_output = '' script_ret = 0 if not nzb_list: ## Give destination its final name if cfg.folder_rename() and tmp_workdir_complete and not one_folder: if not all_ok: workdir_complete = tmp_workdir_complete.replace( '_UNPACK_', '_FAILED_') workdir_complete = get_unique_path(workdir_complete, n=0, create_dir=False) try: collapse_folder(tmp_workdir_complete, workdir_complete) except: logging.error(Ta('Error renaming "%s" to "%s"'), tmp_workdir_complete, workdir_complete) logging.info("Traceback: ", exc_info=True) job_result = int(par_error) + int(unpack_error) * 2 if cfg.ignore_samples() > 0: remove_samples(workdir_complete) ## TV/Movie/Date Renaming code part 2 - rename and move files to parent folder if all_ok: if newfiles and file_sorter.is_sortfile(): file_sorter.rename(newfiles, workdir_complete) workdir_complete = file_sorter.move(workdir_complete) ## Run the user script script_path = make_script_path(script) if all_ok and (not nzb_list) and script_path: #set the current nzo status to "Ext Script...". Used in History nzo.status = 'Running' nzo.set_action_line(T('Running script'), unicoder(script)) nzo.set_unpack_info('Script', T('Running user script %s') % unicoder(script), unique=True) script_log, script_ret = external_processing( script_path, workdir_complete, nzo.filename, msgid, dirname, cat, nzo.group, job_result) script_line = get_last_line(script_log) if script_log: script_output = nzo.nzo_id if script_line: nzo.set_unpack_info('Script', unicoder(script_line), unique=True) else: nzo.set_unpack_info('Script', T('Ran %s') % unicoder(script), unique=True) else: script = "" script_line = "" script_ret = 0 ## Email the results if (not nzb_list) and cfg.email_endjob(): if (cfg.email_endjob() == 1) or (cfg.email_endjob() == 2 and (unpack_error or par_error)): emailer.endjob(dirname, msgid, cat, all_ok, workdir_complete, nzo.bytes_downloaded, nzo.unpack_info, script, TRANS(script_log), script_ret) if script_output: # Can do this only now, otherwise it would show up in the email if script_ret: script_ret = 'Exit(%s) ' % script_ret else: script_ret = '' if script_line: nzo.set_unpack_info( 'Script', u'%s%s <a href="./scriptlog?name=%s">(%s)</a>' % (script_ret, unicoder(script_line), urllib.quote(script_output), T('More')), unique=True) else: nzo.set_unpack_info('Script', u'%s<a href="./scriptlog?name=%s">%s</a>' % (script_ret, urllib.quote(script_output), T('View script output')), unique=True) ## Cleanup again, including NZB files cleanup_list(workdir_complete, False) ## Remove newzbin bookmark, if any if msgid and all_ok: Bookmarks.do.del_bookmark(msgid) elif all_ok: sabnzbd.proxy_rm_bookmark(nzo.url) ## Show final status in history if all_ok: osx.sendGrowlMsg(T('Download Completed'), filename, osx.NOTIFICATION['complete']) nzo.status = 'Completed' else: osx.sendGrowlMsg(T('Download Failed'), filename, osx.NOTIFICATION['complete']) nzo.status = 'Failed' except: logging.error(Ta('Post Processing Failed for %s (%s)'), filename, crash_msg) if not crash_msg: logging.info("Traceback: ", exc_info=True) crash_msg = T('see logfile') nzo.fail_msg = T('PostProcessing was aborted (%s)') % unicoder( crash_msg) osx.sendGrowlMsg(T('Download Failed'), filename, osx.NOTIFICATION['complete']) nzo.status = 'Failed' par_error = True all_ok = False if all_ok: # If the folder only contains one file OR folder, have that as the path # Be aware that series/generic/date sorting may move a single file into a folder containing other files workdir_complete = one_file_or_folder(workdir_complete) workdir_complete = os.path.normpath(workdir_complete) # Log the overall time taken for postprocessing postproc_time = int(time.time() - start) # Create the history DB instance history_db = database.get_history_handle() # Add the nzo to the database. Only the path, script and time taken is passed # Other information is obtained from the nzo history_db.add_history_db(nzo, workdir_complete, nzo.downpath, postproc_time, script_log, script_line) # The connection is only used once, so close it here history_db.close() ## Clean up the NZO try: logging.info('Cleaning up %s (keep_basic=%s)', filename, str(not all_ok)) sabnzbd.nzbqueue.NzbQueue.do.cleanup_nzo(nzo, keep_basic=not all_ok) except: logging.error(Ta('Cleanup of %s failed.'), nzo.final_name) logging.info("Traceback: ", exc_info=True) ## Remove download folder if all_ok: try: if os.path.exists(workdir): logging.debug('Removing workdir %s', workdir) remove_all(workdir, recursive=True) except: logging.error(Ta('Error removing workdir (%s)'), workdir) logging.info("Traceback: ", exc_info=True) return True