def run(self): while 1: job = self.queue.get() if not job: logging.info("Shutting down") break nzo, nzf = job if nzf: # Check if enough disk space is free, if not pause downloader and send email if diskspace(force=True)['download_dir'][1] < ( cfg.download_free.get_float() + nzf.bytes) / GIGI: # Only warn and email once if not sabnzbd.downloader.Downloader.do.paused: logging.warning( T('Too little diskspace forcing PAUSE')) # Pause downloader, but don't save, since the disk is almost full! sabnzbd.downloader.Downloader.do.pause() sabnzbd.emailer.diskfull() # Abort all direct unpackers, just to be sure sabnzbd.directunpacker.abort_all() # Place job back in queue and wait 30 seconds to hope it gets resolved self.process(job) sleep(30) continue # Prepare filename nzo.verify_nzf_filename(nzf) nzf.filename = sanitize_filename(nzf.filename) filepath = get_filepath(long_path(cfg.download_dir.get_path()), nzo, nzf.filename) nzf.filename = get_filename(filepath) if filepath: logging.info('Decoding %s %s', filepath, nzf.type) try: filepath = self.assemble(nzf, filepath) except IOError, (errno, strerror): # If job was deleted or in active post-processing, ignore error if not nzo.deleted and not nzo.is_gone( ) and not nzo.pp_active: # 28 == disk full => pause downloader if errno == 28: logging.error(T('Disk full! Forcing Pause')) else: logging.error( T('Disk error on creating file %s'), clip_path(filepath)) # Log traceback logging.info('Traceback: ', exc_info=True) # Pause without saving sabnzbd.downloader.Downloader.do.pause() continue except: logging.error(T('Fatal error in Assembler'), exc_info=True) break
def prepare_extraction_path(nzo): """ Based on the information that we have, generate the extraction path and create the directory. Separated so it can be called from DirectUnpacker """ one_folder = False marker_file = None # Determine class directory catdir = config.get_categories(nzo.cat).dir() if catdir.endswith('*'): catdir = catdir.strip('*') one_folder = True complete_dir = real_path(cfg.complete_dir.get_path(), catdir) complete_dir = long_path(complete_dir) # TV/Movie/Date Renaming code part 1 - detect and construct paths if cfg.enable_meta(): file_sorter = Sorter(nzo, nzo.cat) else: file_sorter = Sorter(None, nzo.cat) complete_dir = file_sorter.detect(nzo.final_name, complete_dir) if file_sorter.sort_file: one_folder = False complete_dir = sanitize_and_trim_path(complete_dir) if one_folder: workdir_complete = create_dirs(complete_dir) else: workdir_complete = get_unique_path(os.path.join( complete_dir, nzo.final_name), create_dir=True) marker_file = set_marker(workdir_complete) if not workdir_complete or not os.path.exists(workdir_complete): logging.error( T('Cannot create final folder %s') % unicoder(os.path.join(complete_dir, nzo.final_name))) raise IOError if cfg.folder_rename() and not one_folder: prefixed_path = prefix(workdir_complete, '_UNPACK_') tmp_workdir_complete = get_unique_path(prefix(workdir_complete, '_UNPACK_'), create_dir=False) try: renamer(workdir_complete, tmp_workdir_complete) except: pass # On failure, just use the original name # Is the unique path different? Then we also need to modify the final path if prefixed_path != tmp_workdir_complete: workdir_complete = workdir_complete + os.path.splitext( tmp_workdir_complete)[1] else: tmp_workdir_complete = workdir_complete return tmp_workdir_complete, workdir_complete, file_sorter, one_folder, marker_file
def create_unrar_instance(self): """ Start the unrar instance using the user's options """ # Generate extraction path and save for post-proc if not self.unpack_dir_info: try: self.unpack_dir_info = prepare_extraction_path(self.nzo) except: # Prevent fatal crash if directory creation fails self.abort() return # Get the information extraction_path, _, _, one_folder, _ = self.unpack_dir_info # Set options if self.nzo.password: password_command = '-p%s' % self.nzo.password else: password_command = '-p-' if one_folder or cfg.flat_unpack(): action = 'e' else: action = 'x' # The first NZF self.rarfile_nzf = self.have_next_volume() # Generate command rarfile_path = os.path.join(self.nzo.downpath, self.rarfile_nzf.filename) if sabnzbd.WIN32: # For Unrar to support long-path, we need to cricumvent Python's list2cmdline # See: https://github.com/sabnzbd/sabnzbd/issues/1043 command = ['%s' % sabnzbd.newsunpack.RAR_COMMAND, action, '-vp', '-idp', '-o+', '-ai', password_command, '%s' % clip_path(rarfile_path), '%s\\' % long_path(extraction_path)] else: # Don't use "-ai" (not needed for non-Windows) command = ['%s' % sabnzbd.newsunpack.RAR_COMMAND, action, '-vp', '-idp', '-o+', password_command, '%s' % rarfile_path, '%s/' % extraction_path] if cfg.ignore_unrar_dates(): command.insert(3, '-tsm-') # Let's start from the first one! self.cur_volume = 1 stup, need_shell, command, creationflags = build_command(command, flatten_command=True) logging.debug('Running unrar for DirectUnpack %s', command) self.active_instance = Popen(command, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, startupinfo=stup, creationflags=creationflags) # Add to runners ACTIVE_UNPACKERS.append(self) # Doing the first logging.info('DirectUnpacked volume %s for %s', self.cur_volume, self.cur_setname)
def prepare_extraction_path(nzo): """ Based on the information that we have, generate the extraction path and create the directory. Separated so it can be called from DirectUnpacker """ one_folder = False marker_file = None # Determine class directory catdir = config.get_categories(nzo.cat).dir() if catdir.endswith('*'): catdir = catdir.strip('*') one_folder = True complete_dir = real_path(cfg.complete_dir.get_path(), catdir) complete_dir = long_path(complete_dir) # TV/Movie/Date Renaming code part 1 - detect and construct paths if cfg.enable_meta(): file_sorter = Sorter(nzo, nzo.cat) else: file_sorter = Sorter(None, nzo.cat) complete_dir = file_sorter.detect(nzo.final_name, complete_dir) if file_sorter.sort_file: one_folder = False complete_dir = sanitize_and_trim_path(complete_dir) if one_folder: workdir_complete = create_dirs(complete_dir) else: workdir_complete = get_unique_path(os.path.join(complete_dir, nzo.final_name), create_dir=True) marker_file = set_marker(workdir_complete) if not workdir_complete or not os.path.exists(workdir_complete): logging.error(T('Cannot create final folder %s') % unicoder(os.path.join(complete_dir, nzo.final_name))) raise IOError if cfg.folder_rename() and not one_folder: prefixed_path = prefix(workdir_complete, '_UNPACK_') tmp_workdir_complete = get_unique_path(prefix(workdir_complete, '_UNPACK_'), create_dir=False) try: renamer(workdir_complete, tmp_workdir_complete) except: pass # On failure, just use the original name # Is the unique path different? Then we also need to modify the final path if prefixed_path != tmp_workdir_complete: workdir_complete = workdir_complete + os.path.splitext(tmp_workdir_complete)[1] else: tmp_workdir_complete = workdir_complete return tmp_workdir_complete, workdir_complete, file_sorter, one_folder, marker_file
def run(self): while 1: job = self.queue.get() if not job: logging.info("Shutting down") break nzo, nzf = job if nzf: # Check if enough disk space is free, if not pause downloader and send email if diskspace(force=True)['download_dir'][1] < (cfg.download_free.get_float() + nzf.bytes) / GIGI: # Only warn and email once if not sabnzbd.downloader.Downloader.do.paused: logging.warning(T('Too little diskspace forcing PAUSE')) # Pause downloader, but don't save, since the disk is almost full! sabnzbd.downloader.Downloader.do.pause() sabnzbd.emailer.diskfull() # Abort all direct unpackers, just to be sure sabnzbd.directunpacker.abort_all() # Place job back in queue and wait 30 seconds to hope it gets resolved self.process(job) sleep(30) continue # Prepare filename nzo.verify_nzf_filename(nzf) nzf.filename = sanitize_filename(nzf.filename) filepath = get_filepath(long_path(cfg.download_dir.get_path()), nzo, nzf.filename) nzf.filename = get_filename(filepath) if filepath: logging.info('Decoding %s %s', filepath, nzf.type) try: filepath = self.assemble(nzf, filepath) except IOError, (errno, strerror): # If job was deleted or in active post-processing, ignore error if not nzo.deleted and not nzo.is_gone() and not nzo.pp_active: # 28 == disk full => pause downloader if errno == 28: logging.error(T('Disk full! Forcing Pause')) else: logging.error(T('Disk error on creating file %s'), clip_path(filepath)) # Log traceback logging.info('Traceback: ', exc_info=True) # Pause without saving sabnzbd.downloader.Downloader.do.pause() continue except: logging.error(T('Fatal error in Assembler'), exc_info=True) break
def run(self): import sabnzbd.nzbqueue while 1: job = self.queue.get() if not job: logging.info("Shutting down") break nzo, nzf = job if nzf: sabnzbd.CheckFreeSpace() filename = sanitize_filename(nzf.filename) nzf.filename = filename dupe = nzo.check_for_dupe(nzf) filepath = get_filepath(long_path(cfg.download_dir.get_path()), nzo, filename) if filepath: logging.info('Decoding %s %s', filepath, nzf.type) try: filepath = _assemble(nzf, filepath, dupe) except IOError, (errno, strerror): if nzo.deleted: # Job was deleted, ignore error pass else: # 28 == disk full => pause downloader if errno == 28: logging.error(T('Disk full! Forcing Pause')) else: logging.error( T('Disk error on creating file %s'), clip_path(filepath)) # Pause without saving sabnzbd.downloader.Downloader.do.pause(save=False) except: logging.error(T('Fatal error in Assembler'), exc_info=True) break
def run(self): while 1: job = self.queue.get() if not job: logging.info("Shutting down") break nzo, nzf = job if nzf: sabnzbd.CheckFreeSpace() # We allow win_devices because otherwise par2cmdline fails to repair filename = sanitize_filename(nzf.filename, allow_win_devices=True) nzf.filename = filename dupe = nzo.check_for_dupe(nzf) filepath = get_filepath(long_path(cfg.download_dir.get_path()), nzo, filename) if filepath: logging.info('Decoding %s %s', filepath, nzf.type) try: filepath = _assemble(nzf, filepath, dupe) except IOError, (errno, strerror): # If job was deleted, ignore error if not nzo.is_gone(): # 28 == disk full => pause downloader if errno == 28: logging.error(T('Disk full! Forcing Pause')) else: logging.error( T('Disk error on creating file %s'), clip_path(filepath)) # Pause without saving sabnzbd.downloader.Downloader.do.pause(save=False) continue except: logging.error(T('Fatal error in Assembler'), exc_info=True) break
def run(self): import sabnzbd.nzbqueue while 1: job = self.queue.get() if not job: logging.info("Shutting down") break nzo, nzf = job if nzf: sabnzbd.CheckFreeSpace() filename = sanitize_filename(nzf.filename) nzf.filename = filename dupe = nzo.check_for_dupe(nzf) filepath = get_filepath(long_path(cfg.download_dir.get_path()), nzo, filename) if filepath: logging.info("Decoding %s %s", filepath, nzf.type) try: filepath = _assemble(nzf, filepath, dupe) except IOError, (errno, strerror): if nzo.is_gone(): # Job was deleted, ignore error pass else: # 28 == disk full => pause downloader if errno == 28: logging.error(T("Disk full! Forcing Pause")) else: logging.error(T("Disk error on creating file %s"), clip_path(filepath)) # Pause without saving sabnzbd.downloader.Downloader.do.pause(save=False) except: logging.error(T("Fatal error in Assembler"), exc_info=True) break
def run(self): while 1: job = self.queue.get() if not job: logging.info("Shutting down") break nzo, nzf = job if nzf: sabnzbd.CheckFreeSpace() # We allow win_devices because otherwise par2cmdline fails to repair filename = sanitize_filename(nzf.filename, allow_win_devices=True) nzf.filename = filename dupe = nzo.check_for_dupe(nzf) filepath = get_filepath(long_path(cfg.download_dir.get_path()), nzo, filename) if filepath: logging.info('Decoding %s %s', filepath, nzf.type) try: filepath = _assemble(nzf, filepath, dupe) except IOError, (errno, strerror): # If job was deleted, ignore error if not nzo.is_gone(): # 28 == disk full => pause downloader if errno == 28: logging.error(T('Disk full! Forcing Pause')) else: logging.error(T('Disk error on creating file %s'), clip_path(filepath)) # Pause without saving sabnzbd.downloader.Downloader.do.pause(save=False) continue except: logging.error(T('Fatal error in Assembler'), exc_info=True) break
def process_job(nzo): """ Process one job """ if 0: assert isinstance(nzo, sabnzbd.nzbstuff.NzbObject) # Assert only for debug purposes start = time.time() # keep track of whether we can continue all_ok = True # keep track of par problems par_error = False # keep track of any unpacking errors unpack_error = False # Signal empty download, for when 'empty_postproc' is enabled empty = False nzb_list = [] # These need to be initialized in case of a crash workdir_complete = '' postproc_time = 0 # @UnusedVariable -- pep8 bug? script_log = '' script_line = '' crash_msg = '' # Get the job flags nzo.save_attribs() flag_repair, flag_unpack, flag_delete = nzo.repair_opts # Normalize PP if flag_delete: flag_unpack = True if flag_unpack: flag_repair = True # Get the NZB name filename = nzo.final_name if cfg.allow_streaming() and not (flag_repair or flag_unpack or flag_delete): # After streaming, force +D nzo.set_pp(3) nzo.status = Status.FAILED nzo.save_attribs() all_ok = False if nzo.fail_msg: # Special case: aborted due to too many missing data nzo.status = Status.FAILED nzo.save_attribs() all_ok = False par_error = True unpack_error = 1 try: # Get the folder containing the download result workdir = nzo.downpath tmp_workdir_complete = None # if no files are present (except __admin__), fail the job if all_ok and len(globber(workdir)) < 2: if nzo.precheck: _enough, ratio = nzo.check_quality() req_ratio = float(cfg.req_completion_rate()) / 100.0 # Make sure that rounded ratio doesn't equal required ratio # when it is actually below required if (ratio < req_ratio) and (req_ratio - ratio) < 0.001: ratio = req_ratio - 0.001 emsg = '%.1f%%' % (ratio * 100.0) emsg2 = '%.1f%%' % float(cfg.req_completion_rate()) emsg = T('Download might fail, only %s of required %s available') % (emsg, emsg2) else: emsg = T('Download failed - Not on your server(s)') empty = True nzo.fail_msg = emsg nzo.set_unpack_info('Fail', emsg) nzo.status = Status.FAILED # do not run unpacking or parity verification flag_repair = flag_unpack = False all_ok = cfg.empty_postproc() and empty if not all_ok: par_error = True unpack_error = 1 script = nzo.script cat = nzo.cat logging.info('Starting Post-Processing on %s' + ' => Repair:%s, Unpack:%s, Delete:%s, Script:%s, Cat:%s', filename, flag_repair, flag_unpack, flag_delete, script, cat) # Set complete dir to workdir in case we need to abort workdir_complete = workdir dirname = nzo.final_name marker_file = None # Par processing, if enabled if all_ok and flag_repair: if not check_win_maxpath(workdir): crash_msg = T('Path exceeds 260, repair by "par2" is not possible') raise WindowsError par_error, re_add = parring(nzo, workdir) if re_add: # Try to get more par files return False # Check if user allows unsafe post-processing if flag_repair and cfg.safe_postproc(): all_ok = all_ok and not par_error if all_ok: fix_unix_encoding(workdir) one_folder = False # Determine class directory if cfg.create_group_folders(): complete_dir = addPrefixes(cfg.complete_dir.get_path(), nzo.dirprefix) complete_dir = create_dirs(complete_dir) else: catdir = config.get_categories(cat).dir() if catdir.endswith('*'): catdir = catdir.strip('*') one_folder = True complete_dir = real_path(cfg.complete_dir.get_path(), catdir) complete_dir = long_path(complete_dir) # TV/Movie/Date Renaming code part 1 - detect and construct paths if cfg.enable_meta(): file_sorter = Sorter(nzo, cat) else: file_sorter = Sorter(None, cat) complete_dir = file_sorter.detect(dirname, complete_dir) if file_sorter.sort_file: one_folder = False complete_dir = sanitize_and_trim_path(complete_dir) if one_folder: workdir_complete = create_dirs(complete_dir) else: workdir_complete = get_unique_path(os.path.join(complete_dir, dirname), create_dir=True) marker_file = set_marker(workdir_complete) if not workdir_complete or not os.path.exists(workdir_complete): crash_msg = T('Cannot create final folder %s') % unicoder(os.path.join(complete_dir, dirname)) raise IOError if cfg.folder_rename() and not one_folder: tmp_workdir_complete = prefix(workdir_complete, '_UNPACK_') try: renamer(workdir_complete, tmp_workdir_complete) except: pass # On failure, just use the original name else: tmp_workdir_complete = workdir_complete newfiles = [] # Run Stage 2: Unpack if flag_unpack: if all_ok: # set the current nzo status to "Extracting...". Used in History nzo.status = Status.EXTRACTING logging.info("Running unpack_magic on %s", filename) short_complete = short_path(tmp_workdir_complete) unpack_error, newfiles = unpack_magic(nzo, short_path(workdir), short_complete, flag_delete, one_folder, (), (), (), (), ()) if short_complete != tmp_workdir_complete: newfiles = [f.replace(short_complete, tmp_workdir_complete) for f in newfiles] logging.info("unpack_magic finished on %s", filename) else: nzo.set_unpack_info('Unpack', T('No post-processing because of failed verification')) if cfg.safe_postproc(): all_ok = all_ok and not unpack_error if all_ok: # Move any (left-over) files to destination nzo.status = Status.MOVING nzo.set_action_line(T('Moving'), '...') for root, _dirs, files in os.walk(workdir): if not root.endswith(JOB_ADMIN): for file_ in files: path = os.path.join(root, file_) new_path = path.replace(workdir, tmp_workdir_complete) ok, new_path = move_to_path(path, new_path) newfiles.append(new_path) if not ok: nzo.set_unpack_info('Unpack', T('Failed moving %s to %s') % (unicoder(path), unicoder(new_path))) all_ok = False break # Set permissions right set_permissions(tmp_workdir_complete) if all_ok and marker_file: del_marker(os.path.join(tmp_workdir_complete, marker_file)) remove_from_list(marker_file, newfiles) if all_ok: # Remove files matching the cleanup list cleanup_list(tmp_workdir_complete, True) # Check if this is an NZB-only download, if so redirect to queue # except when PP was Download-only if flag_repair: nzb_list = nzb_redirect(tmp_workdir_complete, nzo.final_name, nzo.pp, script, cat, priority=nzo.priority) else: nzb_list = None if nzb_list: nzo.set_unpack_info('Download', T('Sent %s to queue') % unicoder(nzb_list)) cleanup_empty_directories(tmp_workdir_complete) else: cleanup_list(tmp_workdir_complete, False) script_output = '' script_ret = 0 if not nzb_list: # Give destination its final name if cfg.folder_rename() and tmp_workdir_complete and not one_folder: if all_ok: try: newfiles = rename_and_collapse_folder(tmp_workdir_complete, workdir_complete, newfiles) except: logging.error(T('Error renaming "%s" to "%s"'), clip_path(tmp_workdir_complete), clip_path(workdir_complete)) logging.info('Traceback: ', exc_info=True) # Better disable sorting because filenames are all off now file_sorter.sort_file = None else: workdir_complete = tmp_workdir_complete.replace('_UNPACK_', '_FAILED_') workdir_complete = get_unique_path(workdir_complete, n=0, create_dir=False) workdir_complete = workdir_complete if empty: job_result = -1 else: job_result = int(par_error) + int(bool(unpack_error)) * 2 if cfg.ignore_samples(): remove_samples(workdir_complete) # TV/Movie/Date Renaming code part 2 - rename and move files to parent folder if all_ok and file_sorter.sort_file: if newfiles: file_sorter.rename(newfiles, workdir_complete) workdir_complete, ok = file_sorter.move(workdir_complete) else: workdir_complete, ok = file_sorter.rename_with_ext(workdir_complete) if not ok: nzo.set_unpack_info('Unpack', T('Failed to move files')) all_ok = False # Run the user script script_path = make_script_path(script) if (all_ok or not cfg.safe_postproc()) and (not nzb_list) and script_path: # set the current nzo status to "Ext Script...". Used in History nzo.status = Status.RUNNING nzo.set_action_line(T('Running script'), unicoder(script)) nzo.set_unpack_info('Script', T('Running user script %s') % unicoder(script), unique=True) script_log, script_ret = external_processing(short_path(script_path, False), short_path(workdir_complete, False), nzo.filename, dirname, cat, nzo.group, job_result, nzo.nzo_info.get('failure', '')) script_line = get_last_line(script_log) if script_log: script_output = nzo.nzo_id if script_line: nzo.set_unpack_info('Script', unicoder(script_line), unique=True) else: nzo.set_unpack_info('Script', T('Ran %s') % unicoder(script), unique=True) else: script = "" script_line = "" script_ret = 0 # Maybe bad script result should fail job if script_ret and cfg.script_can_fail(): script_error = True all_ok = False nzo.fail_msg = T('Script exit code is %s') % script_ret else: script_error = False # Email the results if (not nzb_list) and cfg.email_endjob(): if (cfg.email_endjob() == 1) or (cfg.email_endjob() == 2 and (unpack_error or par_error or script_error)): emailer.endjob(dirname, cat, all_ok, workdir_complete, nzo.bytes_downloaded, nzo.fail_msg, nzo.unpack_info, script, TRANS(script_log), script_ret) if script_output: # Can do this only now, otherwise it would show up in the email if script_ret: script_ret = 'Exit(%s) ' % script_ret else: script_ret = '' if len(script_log.rstrip().split('\n')) > 1: nzo.set_unpack_info('Script', u'%s%s <a href="./scriptlog?name=%s">(%s)</a>' % (script_ret, xml.sax.saxutils.escape(script_line), xml.sax.saxutils.escape(script_output), T('More')), unique=True) else: # No '(more)' button needed nzo.set_unpack_info('Script', u'%s%s ' % (script_ret, xml.sax.saxutils.escape(script_line)), unique=True) # Cleanup again, including NZB files if all_ok: cleanup_list(workdir_complete, False) # Force error for empty result all_ok = all_ok and not empty # Update indexer with results if cfg.rating_enable(): if nzo.encrypted > 0: Rating.do.update_auto_flag(nzo.nzo_id, Rating.FLAG_ENCRYPTED) if empty: hosts = map(lambda s: s.host, sabnzbd.downloader.Downloader.do.nzo_servers(nzo)) if not hosts: hosts = [None] for host in hosts: Rating.do.update_auto_flag(nzo.nzo_id, Rating.FLAG_EXPIRED, host) # Show final status in history if all_ok: notifier.send_notification(T('Download Completed'), filename, 'complete') nzo.status = Status.COMPLETED else: notifier.send_notification(T('Download Failed'), filename, 'failed') nzo.status = Status.FAILED except: logging.error(T('Post Processing Failed for %s (%s)'), filename, crash_msg) if not crash_msg: logging.info("Traceback: ", exc_info=True) crash_msg = T('see logfile') nzo.fail_msg = T('PostProcessing was aborted (%s)') % unicoder(crash_msg) notifier.send_notification(T('Download Failed'), filename, 'failed') nzo.status = Status.FAILED par_error = True all_ok = False if cfg.email_endjob(): emailer.endjob(dirname, cat, all_ok, clip_path(workdir_complete), nzo.bytes_downloaded, nzo.fail_msg, nzo.unpack_info, '', '', 0) if all_ok: # If the folder only contains one file OR folder, have that as the path # Be aware that series/generic/date sorting may move a single file into a folder containing other files workdir_complete = one_file_or_folder(workdir_complete) workdir_complete = os.path.normpath(workdir_complete) # Log the overall time taken for postprocessing postproc_time = int(time.time() - start) # Create the history DB instance history_db = database.HistoryDB() # Add the nzo to the database. Only the path, script and time taken is passed # Other information is obtained from the nzo history_db.add_history_db(nzo, clip_path(workdir_complete), nzo.downpath, postproc_time, script_log, script_line) # The connection is only used once, so close it here history_db.close() # Clean up the NZO try: logging.info('Cleaning up %s (keep_basic=%s)', filename, str(not all_ok)) sabnzbd.nzbqueue.NzbQueue.do.cleanup_nzo(nzo, keep_basic=not all_ok) except: logging.error(T('Cleanup of %s failed.'), nzo.final_name) logging.info("Traceback: ", exc_info=True) # Remove download folder if all_ok: try: if os.path.exists(workdir): logging.debug('Removing workdir %s', workdir) remove_all(workdir, recursive=True) except: logging.error(T('Error removing workdir (%s)'), clip_path(workdir)) logging.info("Traceback: ", exc_info=True) # Use automatic retry link on par2 errors and encrypted/bad RARs if par_error or unpack_error in (2, 3): try_alt_nzb(nzo) # Update the last check time sabnzbd.LAST_HISTORY_UPDATE = time.time() return True