def rename_similar(folder, skip_ext, name, skipped_files): """ Rename all other files in the 'folder' hierarchy after 'name' and move them to the root of 'folder'. Files having extension 'skip_ext' will be moved, but not renamed. Don't touch files in list `skipped_files` """ logging.debug('Give files in set "%s" matching names.', name) folder = os.path.normpath(folder) skip_ext = skip_ext.lower() for root, dirs, files in os.walk(folder): for f in files: path = os.path.join(root, f) if path in skipped_files: continue org, ext = os.path.splitext(f) if ext.lower() == skip_ext: # Move file, but do not rename newpath = os.path.join(folder, f) else: # Move file and rename newname = "%s%s" % (name, ext) newname = newname.replace('%fn', org) newpath = os.path.join(folder, newname) if path != newpath: newpath = get_unique_filename(newpath) try: logging.debug("Rename: %s to %s", path, newpath) renamer(path, newpath) except: logging.error(T('Failed to rename similar file: %s to %s'), clip_path(path), clip_path(newpath)) logging.info("Traceback: ", exc_info=True) cleanup_empty_directories(folder)
def rename(self, files, current_path): """ Renaming Date file """ logging.debug("Renaming Date file") # find the master file to rename for file in files: if is_full_path(file): filepath = os.path.normpath(file) else: filepath = os.path.normpath(os.path.join(current_path, file)) if os.path.exists(filepath): size = os.stat(filepath).st_size if size > cfg.movie_rename_limit.get_int(): if 'sample' not in file: self.fname, ext = os.path.splitext( os.path.split(file)[1]) newname = "%s%s" % (self.filename_set, ext) newname = newname.replace('%fn', self.fname) newpath = os.path.join(current_path, newname) if not os.path.exists(newpath): try: logging.debug("Rename: %s to %s", filepath, newpath) renamer(filepath, newpath) except: logging.error(T('Failed to rename: %s to %s'), clip_path(current_path), clip_path(newpath)) logging.info("Traceback: ", exc_info=True) rename_similar(current_path, ext, self.filename_set, ()) break
def rename(self, files, current_path): """ Rename for Series """ logging.debug("Renaming Series") largest = (None, None, 0) def to_filepath(f, current_path): if is_full_path(f): filepath = os.path.normpath(f) else: filepath = os.path.normpath(os.path.join(current_path, f)) return filepath # Create a generator of filepaths, ignore sample files and excluded files (vobs ect) filepaths = ((file, to_filepath(file, current_path)) for file in files if not RE_SAMPLE.search(file) and get_ext(file) not in EXCLUDED_FILE_EXTS) # Find the largest existing file for file, fp in filepaths: # If for some reason the file no longer exists, skip if not os.path.exists(fp): continue size = os.stat(fp).st_size f_file, f_fp, f_size = largest if size > f_size: largest = (file, fp, size) file, filepath, size = largest # >20MB if filepath and size > 20971520: self.fname, self.ext = os.path.splitext(os.path.split(file)[1]) newname = "%s%s" % (self.filename_set, self.ext) # Replace %fn with the original filename newname = newname.replace('%fn', self.fname) newpath = os.path.join(current_path, newname) # Replace %ext with extension newpath = newpath.replace('%ext', self.ext) try: logging.debug("Rename: %s to %s", filepath, newpath) renamer(filepath, newpath) except: logging.error(T('Failed to rename: %s to %s'), clip_path(current_path), clip_path(newpath)) logging.info("Traceback: ", exc_info=True) rename_similar(current_path, self.ext, self.filename_set, ()) else: logging.debug('Nothing to rename, %s', files)
def cleanup_list(wdir, skip_nzb): """ Remove all files whose extension matches the cleanup list, optionally ignoring the nzb extension """ if cfg.cleanup_list(): try: files = os.listdir(wdir) except: files = () for filename in files: path = os.path.join(wdir, filename) if os.path.isdir(path): cleanup_list(path, skip_nzb) else: if on_cleanup_list(filename, skip_nzb): try: logging.info("Removing unwanted file %s", path) remove_file(path) except: logging.error(T('Removing %s failed'), clip_path(path)) logging.info("Traceback: ", exc_info=True) if files: try: remove_dir(wdir) except: pass
def move(self, workdir_complete): ok = True if self.type == 'movie': move_to_parent = True # check if we should leave the files inside an extra folder if cfg.movie_extra_folders(): # if there is a folder in the download, leave it in an extra folder move_to_parent = not check_for_folder(workdir_complete) if move_to_parent: workdir_complete, ok = move_to_parent_folder(workdir_complete) else: workdir_complete, ok = move_to_parent_folder(workdir_complete) if not ok: return workdir_complete, False path, part = os.path.split(workdir_complete) if '%fn' in part and self.sorter.fname: old = workdir_complete workdir_complete = os.path.join(path, part.replace('%fn', self.sorter.fname)) workdir_complete = get_unique_path(workdir_complete, create_dir=False) try: renamer(old, workdir_complete) except: logging.error(T('Cannot create directory %s'), clip_path(workdir_complete)) workdir_complete = old ok = False return workdir_complete, ok
def rename(self, files, current_path): """ Renaming Date file """ logging.debug("Renaming Date file") # find the master file to rename for file in files: if is_full_path(file): filepath = os.path.normpath(file) else: filepath = os.path.normpath(os.path.join(current_path, file)) if os.path.exists(filepath): size = os.stat(filepath).st_size if size > cfg.movie_rename_limit.get_int(): if 'sample' not in file: self.fname, ext = os.path.splitext(os.path.split(file)[1]) newname = "%s%s" % (self.filename_set, ext) newname = newname.replace('%fn', self.fname) newpath = os.path.join(current_path, newname) if not os.path.exists(newpath): try: logging.debug("Rename: %s to %s", filepath, newpath) renamer(filepath, newpath) except: logging.error(T('Failed to rename: %s to %s'), clip_path(current_path), clip_path(newpath)) logging.info("Traceback: ", exc_info=True) rename_similar(current_path, ext, self.filename_set, ()) break
def cleanup_list(wdir, skip_nzb): """ Remove all files whose extension matches the cleanup list, optionally ignoring the nzb extension """ if cfg.cleanup_list(): try: files = os.listdir(wdir) except: files = () for filename in files: path = os.path.join(wdir, filename) if os.path.isdir(path): cleanup_list(path, skip_nzb) else: if on_cleanup_list(filename, skip_nzb): try: logging.info("Removing unwanted file %s", path) os.remove(path) except: logging.error(T('Removing %s failed'), clip_path(path)) logging.info("Traceback: ", exc_info=True) if files: try: remove_dir(wdir) except: pass
def move(self, workdir_complete): ok = True if self.type == 'movie': move_to_parent = True # check if we should leave the files inside an extra folder if cfg.movie_extra_folders(): # if there is a folder in the download, leave it in an extra folder move_to_parent = not check_for_folder(workdir_complete) if move_to_parent: workdir_complete, ok = move_to_parent_folder(workdir_complete) else: workdir_complete, ok = move_to_parent_folder(workdir_complete) if not ok: return workdir_complete, False path, part = os.path.split(workdir_complete) if '%fn' in part and self.sorter.fname: old = workdir_complete workdir_complete = os.path.join( path, part.replace('%fn', self.sorter.fname)) workdir_complete = get_unique_path(workdir_complete, create_dir=False) try: renamer(old, workdir_complete) except: logging.error(T('Cannot create directory %s'), clip_path(workdir_complete)) workdir_complete = old ok = False return workdir_complete, ok
def run(self): while 1: job = self.queue.get() if not job: logging.info("Shutting down") break nzo, nzf = job if nzf: # Check if enough disk space is free, if not pause downloader and send email if diskspace(force=True)['download_dir'][1] < ( cfg.download_free.get_float() + nzf.bytes) / GIGI: # Only warn and email once if not sabnzbd.downloader.Downloader.do.paused: logging.warning( T('Too little diskspace forcing PAUSE')) # Pause downloader, but don't save, since the disk is almost full! sabnzbd.downloader.Downloader.do.pause() sabnzbd.emailer.diskfull() # Abort all direct unpackers, just to be sure sabnzbd.directunpacker.abort_all() # Place job back in queue and wait 30 seconds to hope it gets resolved self.process(job) sleep(30) continue # Prepare filename nzo.verify_nzf_filename(nzf) nzf.filename = sanitize_filename(nzf.filename) filepath = get_filepath(long_path(cfg.download_dir.get_path()), nzo, nzf.filename) nzf.filename = get_filename(filepath) if filepath: logging.info('Decoding %s %s', filepath, nzf.type) try: filepath = self.assemble(nzf, filepath) except IOError, (errno, strerror): # If job was deleted or in active post-processing, ignore error if not nzo.deleted and not nzo.is_gone( ) and not nzo.pp_active: # 28 == disk full => pause downloader if errno == 28: logging.error(T('Disk full! Forcing Pause')) else: logging.error( T('Disk error on creating file %s'), clip_path(filepath)) # Log traceback logging.info('Traceback: ', exc_info=True) # Pause without saving sabnzbd.downloader.Downloader.do.pause() continue except: logging.error(T('Fatal error in Assembler'), exc_info=True) break
def create_unrar_instance(self): """ Start the unrar instance using the user's options """ # Generate extraction path and save for post-proc if not self.unpack_dir_info: try: self.unpack_dir_info = prepare_extraction_path(self.nzo) except: # Prevent fatal crash if directory creation fails self.abort() return # Get the information extraction_path, _, _, one_folder, _ = self.unpack_dir_info # Set options if self.nzo.password: password_command = '-p%s' % self.nzo.password else: password_command = '-p-' if one_folder or cfg.flat_unpack(): action = 'e' else: action = 'x' # The first NZF self.rarfile_nzf = self.have_next_volume() # Generate command rarfile_path = os.path.join(self.nzo.downpath, self.rarfile_nzf.filename) if sabnzbd.WIN32: # For Unrar to support long-path, we need to cricumvent Python's list2cmdline # See: https://github.com/sabnzbd/sabnzbd/issues/1043 command = ['%s' % sabnzbd.newsunpack.RAR_COMMAND, action, '-vp', '-idp', '-o+', '-ai', password_command, '%s' % clip_path(rarfile_path), '%s\\' % long_path(extraction_path)] else: # Don't use "-ai" (not needed for non-Windows) command = ['%s' % sabnzbd.newsunpack.RAR_COMMAND, action, '-vp', '-idp', '-o+', password_command, '%s' % rarfile_path, '%s/' % extraction_path] if cfg.ignore_unrar_dates(): command.insert(3, '-tsm-') # Let's start from the first one! self.cur_volume = 1 stup, need_shell, command, creationflags = build_command(command, flatten_command=True) logging.debug('Running unrar for DirectUnpack %s', command) self.active_instance = Popen(command, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, startupinfo=stup, creationflags=creationflags) # Add to runners ACTIVE_UNPACKERS.append(self) # Doing the first logging.info('DirectUnpacked volume %s for %s', self.cur_volume, self.cur_setname)
def remove_samples(path): """ Remove all files that match the sample pattern """ for root, _dirs, files in os.walk(path): for file_ in files: if RE_SAMPLE.search(file_): path = os.path.join(root, file_) try: logging.info("Removing unwanted sample file %s", path) remove_file(path) except: logging.error(T('Removing %s failed'), clip_path(path)) logging.info("Traceback: ", exc_info=True)
def run(self): while 1: job = self.queue.get() if not job: logging.info("Shutting down") break nzo, nzf = job if nzf: # Check if enough disk space is free, if not pause downloader and send email if diskspace(force=True)['download_dir'][1] < (cfg.download_free.get_float() + nzf.bytes) / GIGI: # Only warn and email once if not sabnzbd.downloader.Downloader.do.paused: logging.warning(T('Too little diskspace forcing PAUSE')) # Pause downloader, but don't save, since the disk is almost full! sabnzbd.downloader.Downloader.do.pause() sabnzbd.emailer.diskfull() # Abort all direct unpackers, just to be sure sabnzbd.directunpacker.abort_all() # Place job back in queue and wait 30 seconds to hope it gets resolved self.process(job) sleep(30) continue # Prepare filename nzo.verify_nzf_filename(nzf) nzf.filename = sanitize_filename(nzf.filename) filepath = get_filepath(long_path(cfg.download_dir.get_path()), nzo, nzf.filename) nzf.filename = get_filename(filepath) if filepath: logging.info('Decoding %s %s', filepath, nzf.type) try: filepath = self.assemble(nzf, filepath) except IOError, (errno, strerror): # If job was deleted or in active post-processing, ignore error if not nzo.deleted and not nzo.is_gone() and not nzo.pp_active: # 28 == disk full => pause downloader if errno == 28: logging.error(T('Disk full! Forcing Pause')) else: logging.error(T('Disk error on creating file %s'), clip_path(filepath)) # Log traceback logging.info('Traceback: ', exc_info=True) # Pause without saving sabnzbd.downloader.Downloader.do.pause() continue except: logging.error(T('Fatal error in Assembler'), exc_info=True) break
def run(self): while 1: job = self.queue.get() if not job: logging.info("Shutting down") break nzo, nzf = job if nzf: sabnzbd.CheckFreeSpace() # We allow win_devices because otherwise par2cmdline fails to repair filename = sanitize_filename(nzf.filename, allow_win_devices=True) nzf.filename = filename dupe = nzo.check_for_dupe(nzf) filepath = get_filepath(long_path(cfg.download_dir.get_path()), nzo, filename) if filepath: logging.info('Decoding %s %s', filepath, nzf.type) try: filepath = _assemble(nzf, filepath, dupe) except IOError, (errno, strerror): # If job was deleted, ignore error if not nzo.is_gone(): # 28 == disk full => pause downloader if errno == 28: logging.error(T('Disk full! Forcing Pause')) else: logging.error( T('Disk error on creating file %s'), clip_path(filepath)) # Pause without saving sabnzbd.downloader.Downloader.do.pause(save=False) continue except: logging.error(T('Fatal error in Assembler'), exc_info=True) break
def run(self): import sabnzbd.nzbqueue while 1: job = self.queue.get() if not job: logging.info("Shutting down") break nzo, nzf = job if nzf: sabnzbd.CheckFreeSpace() filename = sanitize_filename(nzf.filename) nzf.filename = filename dupe = nzo.check_for_dupe(nzf) filepath = get_filepath(long_path(cfg.download_dir.get_path()), nzo, filename) if filepath: logging.info('Decoding %s %s', filepath, nzf.type) try: filepath = _assemble(nzf, filepath, dupe) except IOError, (errno, strerror): if nzo.deleted: # Job was deleted, ignore error pass else: # 28 == disk full => pause downloader if errno == 28: logging.error(T('Disk full! Forcing Pause')) else: logging.error( T('Disk error on creating file %s'), clip_path(filepath)) # Pause without saving sabnzbd.downloader.Downloader.do.pause(save=False) except: logging.error(T('Fatal error in Assembler'), exc_info=True) break
def get_values(self): """ Collect and construct all the values needed for path replacement """ try: # - Show Name self.get_shownames() # - Season self.get_seasons() # - Episode Number self.get_episodes() # - Episode Name self.get_showdescriptions() return True except: logging.error(T('Error getting TV info (%s)'), clip_path(self.original_dirname)) logging.info("Traceback: ", exc_info=True) return False
def get_values(self): """ Collect and construct all the values needed for path replacement """ try: # - Show Name self.get_shownames() # - Season self.get_seasons() # - Episode Number self.get_episodes() # - Episode Name self.get_showdescriptions() return True except: logging.error(T('Error getting TV info (%s)'), clip_path(self.original_job_name)) logging.info("Traceback: ", exc_info=True) return False
def run(self): import sabnzbd.nzbqueue while 1: job = self.queue.get() if not job: logging.info("Shutting down") break nzo, nzf = job if nzf: sabnzbd.CheckFreeSpace() filename = sanitize_filename(nzf.filename) nzf.filename = filename dupe = nzo.check_for_dupe(nzf) filepath = get_filepath(long_path(cfg.download_dir.get_path()), nzo, filename) if filepath: logging.info("Decoding %s %s", filepath, nzf.type) try: filepath = _assemble(nzf, filepath, dupe) except IOError, (errno, strerror): if nzo.is_gone(): # Job was deleted, ignore error pass else: # 28 == disk full => pause downloader if errno == 28: logging.error(T("Disk full! Forcing Pause")) else: logging.error(T("Disk error on creating file %s"), clip_path(filepath)) # Pause without saving sabnzbd.downloader.Downloader.do.pause(save=False) except: logging.error(T("Fatal error in Assembler"), exc_info=True) break
def remove_samples(path): """ Remove all files that match the sample pattern Skip deleting if it matches all files or there is only 1 file """ files_to_delete = [] nr_files = 0 for root, _dirs, files in os.walk(path): for file_to_match in files: nr_files += 1 if RE_SAMPLE.search(file_to_match): files_to_delete.append(os.path.join(root, file_to_match)) # Make sure we skip false-positives if 1 < len(files_to_delete) < nr_files: for path in files_to_delete: try: logging.info("Removing unwanted sample file %s", path) remove_file(path) except: logging.error(T('Removing %s failed'), clip_path(path)) logging.info("Traceback: ", exc_info=True) else: logging.info("Skipping sample-removal, false-positive")
def run(self): while 1: job = self.queue.get() if not job: logging.info("Shutting down") break nzo, nzf = job if nzf: sabnzbd.CheckFreeSpace() # We allow win_devices because otherwise par2cmdline fails to repair filename = sanitize_filename(nzf.filename, allow_win_devices=True) nzf.filename = filename dupe = nzo.check_for_dupe(nzf) filepath = get_filepath(long_path(cfg.download_dir.get_path()), nzo, filename) if filepath: logging.info('Decoding %s %s', filepath, nzf.type) try: filepath = _assemble(nzf, filepath, dupe) except IOError, (errno, strerror): # If job was deleted, ignore error if not nzo.is_gone(): # 28 == disk full => pause downloader if errno == 28: logging.error(T('Disk full! Forcing Pause')) else: logging.error(T('Disk error on creating file %s'), clip_path(filepath)) # Pause without saving sabnzbd.downloader.Downloader.do.pause(save=False) continue except: logging.error(T('Fatal error in Assembler'), exc_info=True) break
def ProcessSingleFile(filename, path, pp=None, script=None, cat=None, catdir=None, keep=False, priority=None, nzbname=None, reuse=False, nzo_info=None, dup_check=True, url='', password=None, nzo_id=None): """ Analyze file and create a job from it Supports NZB, NZB.BZ2, NZB.GZ and GZ.NZB-in-disguise returns (status, nzo_ids) status: -2==Error/retry, -1==Error, 0==OK, 1==OK-but-ignorecannot-delete """ nzo_ids = [] if catdir is None: catdir = cat try: f = open(path, 'rb') b1 = f.read(1) b2 = f.read(1) f.close() if b1 == '\x1f' and b2 == '\x8b': # gzip file or gzip in disguise name = filename.replace('.nzb.gz', '.nzb') f = gzip.GzipFile(path, 'rb') elif b1 == 'B' and b2 == 'Z': # bz2 file or bz2 in disguise name = filename.replace('.nzb.bz2', '.nzb') f = bz2.BZ2File(path, 'rb') else: name = filename f = open(path, 'rb') data = f.read() f.close() except: logging.warning(T('Cannot read %s'), misc.clip_path(path)) logging.info("Traceback: ", exc_info=True) return -2, nzo_ids if name: name, cat = name_to_cat(name, catdir) # The name is used as the name of the folder, so sanitize it using folder specific santization if not nzbname: # Prevent embedded password from being damaged by sanitize and trimming nzbname = os.path.split(name)[1] try: nzo = nzbstuff.NzbObject(name, pp, script, data, cat=cat, priority=priority, nzbname=nzbname, nzo_info=nzo_info, url=url, reuse=reuse, dup_check=dup_check) if not nzo.password: nzo.password = password except TypeError: # Duplicate, ignore if nzo_id: sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False) nzo = None except ValueError: # Empty, but correct file return -1, nzo_ids except: if data.find("<nzb") >= 0 > data.find("</nzb"): # Looks like an incomplete file, retry return -2, nzo_ids else: # Something else is wrong, show error logging.error(T('Error while adding %s, removing'), name, exc_info=True) return -1, nzo_ids if nzo: if nzo_id: # Re-use existing nzo_id, when a "future" job gets it payload sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False) nzo.nzo_id = nzo_id nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo, quiet=reuse)) nzo.update_rating() try: if not keep: misc.remove_file(path) except: logging.error(T('Error removing %s'), misc.clip_path(path)) logging.info("Traceback: ", exc_info=True) return 1, nzo_ids return 0, nzo_ids
def run_dir(folder, catdir): try: files = os.listdir(folder) except: if not self.error_reported and not catdir: logging.error(T('Cannot read Watched Folder %s'), misc.clip_path(folder)) self.error_reported = True files = [] for filename in files: path = os.path.join(folder, platform_encode(filename)) if os.path.isdir( path) or path in self.ignored or filename[0] == '.': continue ext = os.path.splitext(path)[1].lower() candidate = ext in VALID_NZB_FILES + VALID_ARCHIVES if candidate: try: stat_tuple = os.stat(path) except: continue else: self.ignored[path] = 1 if path in self.suspected: if CompareStat(self.suspected[path], stat_tuple): # Suspected file still has the same attributes continue else: del self.suspected[path] if candidate and stat_tuple.st_size > 0: logging.info('Trying to import %s', path) # Wait until the attributes are stable for 1 second # but give up after 3 sec stable = False for n in xrange(3): time.sleep(1.0) try: stat_tuple_tmp = os.stat(path) except: continue if CompareStat(stat_tuple, stat_tuple_tmp): stable = True break else: stat_tuple = stat_tuple_tmp if not stable: continue # Handle archive files, but only when containing just NZB files if ext in VALID_ARCHIVES: res, nzo_ids = ProcessArchiveFile(filename, path, catdir=catdir, url=path) if res == -1: self.suspected[path] = stat_tuple elif res == 0: self.error_reported = False else: self.ignored[path] = 1 # Handle .nzb, .nzb.gz or gzip-disguised-as-nzb or .bz2 elif ext == '.nzb' or filename.lower().endswith( '.nzb.gz') or filename.lower().endswith( '.nzb.bz2'): res, nzo_id = ProcessSingleFile(filename, path, catdir=catdir, url=path) if res < 0: self.suspected[path] = stat_tuple elif res == 0: self.error_reported = False else: self.ignored[path] = 1 else: self.ignored[path] = 1 CleanList(self.ignored, folder, files) CleanList(self.suspected, folder, files)
def process_job(nzo): """ Process one job """ start = time.time() # keep track of whether we can continue all_ok = True # keep track of par problems par_error = False # keep track of any unpacking errors unpack_error = False # Signal empty download, for when 'empty_postproc' is enabled empty = False nzb_list = [] # These need to be initialized in case of a crash workdir_complete = '' script_log = '' script_line = '' # Get the job flags nzo.save_attribs() flag_repair, flag_unpack, flag_delete = nzo.repair_opts # Normalize PP if flag_delete: flag_unpack = True if flag_unpack: flag_repair = True # Get the NZB name filename = nzo.final_name if nzo.fail_msg: # Special case: aborted due to too many missing data nzo.status = Status.FAILED nzo.save_attribs() all_ok = False par_error = True unpack_error = 1 try: # Get the folder containing the download result workdir = nzo.downpath tmp_workdir_complete = None # if no files are present (except __admin__), fail the job if all_ok and len(globber(workdir)) < 2: if nzo.precheck: _enough, ratio = nzo.check_quality() req_ratio = float(cfg.req_completion_rate()) / 100.0 # Make sure that rounded ratio doesn't equal required ratio # when it is actually below required if (ratio < req_ratio) and (req_ratio - ratio) < 0.001: ratio = req_ratio - 0.001 emsg = '%.1f%%' % (ratio * 100.0) emsg2 = '%.1f%%' % float(cfg.req_completion_rate()) emsg = T( 'Download might fail, only %s of required %s available' ) % (emsg, emsg2) else: emsg = T('Download failed - Not on your server(s)') empty = True emsg += ' - https://sabnzbd.org/not-complete' nzo.fail_msg = emsg nzo.set_unpack_info('Fail', emsg) nzo.status = Status.FAILED # do not run unpacking or parity verification flag_repair = flag_unpack = False all_ok = cfg.empty_postproc() and empty if not all_ok: par_error = True unpack_error = 1 script = nzo.script logging.info( 'Starting Post-Processing on %s' + ' => Repair:%s, Unpack:%s, Delete:%s, Script:%s, Cat:%s', filename, flag_repair, flag_unpack, flag_delete, script, nzo.cat) # Set complete dir to workdir in case we need to abort workdir_complete = workdir # Par processing, if enabled if all_ok and flag_repair: par_error, re_add = parring(nzo, workdir) if re_add: # Try to get more par files return False # If we don't need extra par2, we can disconnect if sabnzbd.nzbqueue.NzbQueue.do.actives( grabs=False) == 0 and cfg.autodisconnect(): # This was the last job, close server connections sabnzbd.downloader.Downloader.do.disconnect() # Sanitize the resulting files if sabnzbd.WIN32: sanitize_files_in_folder(workdir) # Check if user allows unsafe post-processing if flag_repair and cfg.safe_postproc(): all_ok = all_ok and not par_error if all_ok: # Fix encodings fix_unix_encoding(workdir) # Use dirs generated by direct-unpacker if nzo.direct_unpacker and nzo.direct_unpacker.unpack_dir_info: tmp_workdir_complete, workdir_complete, file_sorter, one_folder, marker_file = nzo.direct_unpacker.unpack_dir_info else: # Generate extraction path tmp_workdir_complete, workdir_complete, file_sorter, one_folder, marker_file = prepare_extraction_path( nzo) newfiles = [] # Run Stage 2: Unpack if flag_unpack: # set the current nzo status to "Extracting...". Used in History nzo.status = Status.EXTRACTING logging.info("Running unpack_magic on %s", filename) unpack_error, newfiles = unpack_magic(nzo, workdir, tmp_workdir_complete, flag_delete, one_folder, (), (), (), (), ()) logging.info("Unpacked files %s", newfiles) if sabnzbd.WIN32: # Sanitize the resulting files newfiles = sanitize_files_in_folder(tmp_workdir_complete) logging.info("Finished unpack_magic on %s", filename) if cfg.safe_postproc(): all_ok = all_ok and not unpack_error if all_ok: # Move any (left-over) files to destination nzo.status = Status.MOVING nzo.set_action_line(T('Moving'), '...') for root, _dirs, files in os.walk(workdir): if not root.endswith(JOB_ADMIN): for file_ in files: path = os.path.join(root, file_) new_path = path.replace(workdir, tmp_workdir_complete) ok, new_path = move_to_path(path, new_path) if new_path: newfiles.append(new_path) if not ok: nzo.set_unpack_info( 'Unpack', T('Failed moving %s to %s') % (unicoder(path), unicoder(new_path))) all_ok = False break # Set permissions right set_permissions(tmp_workdir_complete) if all_ok and marker_file: del_marker(os.path.join(tmp_workdir_complete, marker_file)) remove_from_list(marker_file, newfiles) if all_ok: # Remove files matching the cleanup list cleanup_list(tmp_workdir_complete, True) # Check if this is an NZB-only download, if so redirect to queue # except when PP was Download-only if flag_repair: nzb_list = nzb_redirect(tmp_workdir_complete, nzo.final_name, nzo.pp, script, nzo.cat, priority=nzo.priority) else: nzb_list = None if nzb_list: nzo.set_unpack_info( 'Download', T('Sent %s to queue') % unicoder(nzb_list)) cleanup_empty_directories(tmp_workdir_complete) else: cleanup_list(tmp_workdir_complete, False) script_output = '' script_ret = 0 if not nzb_list: # Give destination its final name if cfg.folder_rename() and tmp_workdir_complete and not one_folder: if all_ok: try: newfiles = rename_and_collapse_folder( tmp_workdir_complete, workdir_complete, newfiles) except: logging.error(T('Error renaming "%s" to "%s"'), clip_path(tmp_workdir_complete), clip_path(workdir_complete)) logging.info('Traceback: ', exc_info=True) # Better disable sorting because filenames are all off now file_sorter.sort_file = None else: workdir_complete = tmp_workdir_complete.replace( '_UNPACK_', '_FAILED_') workdir_complete = get_unique_path(workdir_complete, n=0, create_dir=False) if empty: job_result = -1 else: job_result = int(par_error) + int(bool(unpack_error)) * 2 if cfg.ignore_samples(): remove_samples(workdir_complete) # TV/Movie/Date Renaming code part 2 - rename and move files to parent folder if all_ok and file_sorter.sort_file: if newfiles: file_sorter.rename(newfiles, workdir_complete) workdir_complete, ok = file_sorter.move(workdir_complete) else: workdir_complete, ok = file_sorter.rename_with_ext( workdir_complete) if not ok: nzo.set_unpack_info('Unpack', T('Failed to move files')) all_ok = False # Run the user script script_path = make_script_path(script) if (all_ok or not cfg.safe_postproc()) and ( not nzb_list) and script_path: # Set the current nzo status to "Ext Script...". Used in History nzo.status = Status.RUNNING nzo.set_action_line(T('Running script'), unicoder(script)) nzo.set_unpack_info('Script', T('Running user script %s') % unicoder(script), unique=True) script_log, script_ret = external_processing( script_path, nzo, clip_path(workdir_complete), nzo.final_name, job_result) script_line = get_last_line(script_log) if script_log: script_output = nzo.nzo_id if script_line: nzo.set_unpack_info('Script', unicoder(script_line), unique=True) else: nzo.set_unpack_info('Script', T('Ran %s') % unicoder(script), unique=True) else: script = "" script_line = "" script_ret = 0 # Maybe bad script result should fail job if script_ret and cfg.script_can_fail(): script_error = True all_ok = False nzo.fail_msg = T('Script exit code is %s') % script_ret else: script_error = False # Email the results if (not nzb_list) and cfg.email_endjob(): if (cfg.email_endjob() == 1) or (cfg.email_endjob() == 2 and (unpack_error or par_error or script_error)): emailer.endjob(nzo.final_name, nzo.cat, all_ok, workdir_complete, nzo.bytes_downloaded, nzo.fail_msg, nzo.unpack_info, script, TRANS(script_log), script_ret) if script_output: # Can do this only now, otherwise it would show up in the email if script_ret: script_ret = 'Exit(%s) ' % script_ret else: script_ret = '' if len(script_log.rstrip().split('\n')) > 1: nzo.set_unpack_info( 'Script', u'%s%s <a href="./scriptlog?name=%s">(%s)</a>' % (script_ret, script_line, xml.sax.saxutils.escape(script_output), T('More')), unique=True) else: # No '(more)' button needed nzo.set_unpack_info('Script', u'%s%s ' % (script_ret, script_line), unique=True) # Cleanup again, including NZB files if all_ok: cleanup_list(workdir_complete, False) # Force error for empty result all_ok = all_ok and not empty # Update indexer with results if cfg.rating_enable(): if nzo.encrypted > 0: Rating.do.update_auto_flag(nzo.nzo_id, Rating.FLAG_ENCRYPTED) if empty: hosts = map(lambda s: s.host, sabnzbd.downloader.Downloader.do.nzo_servers(nzo)) if not hosts: hosts = [None] for host in hosts: Rating.do.update_auto_flag(nzo.nzo_id, Rating.FLAG_EXPIRED, host) except: logging.error(T('Post Processing Failed for %s (%s)'), filename, T('see logfile')) logging.info("Traceback: ", exc_info=True) nzo.fail_msg = T('PostProcessing was aborted (%s)') % T('see logfile') notifier.send_notification(T('Download Failed'), filename, 'failed', nzo.cat) nzo.status = Status.FAILED par_error = True all_ok = False if cfg.email_endjob(): emailer.endjob(nzo.final_name, nzo.cat, all_ok, clip_path(workdir_complete), nzo.bytes_downloaded, nzo.fail_msg, nzo.unpack_info, '', '', 0) if all_ok: # If the folder only contains one file OR folder, have that as the path # Be aware that series/generic/date sorting may move a single file into a folder containing other files workdir_complete = one_file_or_folder(workdir_complete) workdir_complete = os.path.normpath(workdir_complete) # Clean up the NZO try: logging.info('Cleaning up %s (keep_basic=%s)', filename, str(not all_ok)) sabnzbd.nzbqueue.NzbQueue.do.cleanup_nzo(nzo, keep_basic=not all_ok) except: logging.error(T('Cleanup of %s failed.'), nzo.final_name) logging.info("Traceback: ", exc_info=True) # Remove download folder if all_ok: try: if os.path.exists(workdir): logging.debug('Removing workdir %s', workdir) remove_all(workdir, recursive=True) except: logging.error(T('Error removing workdir (%s)'), clip_path(workdir)) logging.info("Traceback: ", exc_info=True) # Use automatic retry link on par2 errors and encrypted/bad RARs if par_error or unpack_error in (2, 3): try_alt_nzb(nzo) # Show final status in history if all_ok: notifier.send_notification(T('Download Completed'), filename, 'complete', nzo.cat) nzo.status = Status.COMPLETED else: notifier.send_notification(T('Download Failed'), filename, 'failed', nzo.cat) nzo.status = Status.FAILED # Log the overall time taken for postprocessing postproc_time = int(time.time() - start) # Create the history DB instance history_db = database.HistoryDB() # Add the nzo to the database. Only the path, script and time taken is passed # Other information is obtained from the nzo history_db.add_history_db(nzo, clip_path(workdir_complete), nzo.downpath, postproc_time, script_log, script_line) # Purge items history_db.auto_history_purge() # The connection is only used once, so close it here history_db.close() sabnzbd.history_updated() return True
def ProcessArchiveFile(filename, path, pp=None, script=None, cat=None, catdir=None, keep=False, priority=None, url='', nzbname=None, password=None, nzo_id=None): """ Analyse ZIP file and create job(s). Accepts ZIP files with ONLY nzb/nfo/folder files in it. returns (status, nzo_ids) status: -1==Error/Retry, 0==OK, 1==Ignore """ nzo_ids = [] if catdir is None: catdir = cat filename, cat = name_to_cat(filename, catdir) status, zf, extension = is_archive(path) if status != 0: return status, [] status = 1 names = zf.namelist() nzbcount = 0 for name in names: name = name.lower() if name.endswith('.nzb'): status = 0 nzbcount += 1 if status == 0: if nzbcount != 1: nzbname = None for name in names: if name.lower().endswith('.nzb'): try: data = zf.read(name) except: logging.error(T('Cannot read %s'), name, exc_info=True) zf.close() return -1, [] name = os.path.basename(name) if data: nzo = None try: nzo = nzbstuff.NzbObject(name, pp, script, data, cat=cat, url=url, priority=priority, nzbname=nzbname) if not nzo.password: nzo.password = password except (TypeError, ValueError): # Duplicate or empty, ignore pass except: # Something else is wrong, show error logging.error(T('Error while adding %s, removing'), name, exc_info=True) if nzo: if nzo_id: # Re-use existing nzo_id, when a "future" job gets it payload sabnzbd.nzbqueue.NzbQueue.do.remove( nzo_id, add_to_history=False) nzo.nzo_id = nzo_id nzo_id = None nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo)) nzo.update_rating() zf.close() try: if not keep: misc.remove_file(path) except: logging.error(T('Error removing %s'), misc.clip_path(path)) logging.info("Traceback: ", exc_info=True) status = 1 else: zf.close() status = 1 return status, nzo_ids
def rename(self, _files, current_path): """ Rename for Generic files """ logging.debug("Renaming Generic file") def filter_files(_file, current_path): if is_full_path(_file): filepath = os.path.normpath(_file) else: filepath = os.path.normpath(os.path.join(current_path, _file)) if os.path.exists(filepath): size = os.stat(filepath).st_size if size >= cfg.movie_rename_limit.get_int() and not RE_SAMPLE.search(_file) \ and get_ext(_file) not in EXCLUDED_FILE_EXTS: return True return False # remove any files below the limit from this list files = [_file for _file in _files if filter_files(_file, current_path)] length = len(files) # Single File Handling if length == 1: file = files[0] if is_full_path(file): filepath = os.path.normpath(file) else: filepath = os.path.normpath(os.path.join(current_path, file)) if os.path.exists(filepath): self.fname, ext = os.path.splitext(os.path.split(file)[1]) newname = "%s%s" % (self.filename_set, ext) newname = newname.replace('%fn', self.fname) newpath = os.path.join(current_path, newname) try: logging.debug("Rename: %s to %s", filepath, newpath) renamer(filepath, newpath) except: logging.error(T('Failed to rename: %s to %s'), clip_path(filepath), clip_path(newpath)) logging.info("Traceback: ", exc_info=True) rename_similar(current_path, ext, self.filename_set, ()) # Sequence File Handling # if there is more than one extracted file check for CD1/1/A in the title elif self.extra: matched_files = check_for_multiple(files) # rename files marked as in a set if matched_files: logging.debug("Renaming a series of generic files (%s)", matched_files) renamed = matched_files.values() for index, file in matched_files.iteritems(): filepath = os.path.join(current_path, file) renamed.append(filepath) self.fname, ext = os.path.splitext(os.path.split(file)[1]) name = '%s%s' % (self.filename_set, self.extra) name = name.replace('%1', str(index)).replace('%fn', self.fname) name = name + ext newpath = os.path.join(current_path, name) try: logging.debug("Rename: %s to %s", filepath, newpath) renamer(filepath, newpath) except: logging.error(T('Failed to rename: %s to %s'), clip_path(filepath), clip_path(newpath)) logging.info("Traceback: ", exc_info=True) rename_similar(current_path, ext, self.filename_set, renamed) else: logging.debug("Movie files not in sequence %s", _files)
def rename(self, _files, current_path): """ Rename for Generic files """ logging.debug("Renaming Generic file") def filter_files(_file, current_path): if is_full_path(_file): filepath = os.path.normpath(_file) else: filepath = os.path.normpath(os.path.join(current_path, _file)) if os.path.exists(filepath): size = os.stat(filepath).st_size if size >= cfg.movie_rename_limit.get_int() and not RE_SAMPLE.search(_file) \ and get_ext(_file) not in EXCLUDED_FILE_EXTS: return True return False # remove any files below the limit from this list files = [ _file for _file in _files if filter_files(_file, current_path) ] length = len(files) # Single File Handling if length == 1: file = files[0] if is_full_path(file): filepath = os.path.normpath(file) else: filepath = os.path.normpath(os.path.join(current_path, file)) if os.path.exists(filepath): self.fname, ext = os.path.splitext(os.path.split(file)[1]) newname = "%s%s" % (self.filename_set, ext) newname = newname.replace('%fn', self.fname) newpath = os.path.join(current_path, newname) try: logging.debug("Rename: %s to %s", filepath, newpath) renamer(filepath, newpath) except: logging.error(T('Failed to rename: %s to %s'), clip_path(filepath), clip_path(newpath)) logging.info("Traceback: ", exc_info=True) rename_similar(current_path, ext, self.filename_set, ()) # Sequence File Handling # if there is more than one extracted file check for CD1/1/A in the title elif self.extra: matched_files = check_for_multiple(files) # rename files marked as in a set if matched_files: logging.debug("Renaming a series of generic files (%s)", matched_files) renamed = matched_files.values() for index, file in matched_files.iteritems(): filepath = os.path.join(current_path, file) renamed.append(filepath) self.fname, ext = os.path.splitext(os.path.split(file)[1]) name = '%s%s' % (self.filename_set, self.extra) name = name.replace('%1', str(index)).replace('%fn', self.fname) name = name + ext newpath = os.path.join(current_path, name) try: logging.debug("Rename: %s to %s", filepath, newpath) renamer(filepath, newpath) except: logging.error(T('Failed to rename: %s to %s'), clip_path(filepath), clip_path(newpath)) logging.info("Traceback: ", exc_info=True) rename_similar(current_path, ext, self.filename_set, renamed) else: logging.debug("Movie files not in sequence %s", _files)
def ProcessArchiveFile(filename, path, pp=None, script=None, cat=None, catdir=None, keep=False, priority=None, url='', nzbname=None, password=None, nzo_id=None): """ Analyse ZIP file and create job(s). Accepts ZIP files with ONLY nzb/nfo/folder files in it. returns (status, nzo_ids) status: -1==Error/Retry, 0==OK, 1==Ignore """ nzo_ids = [] if catdir is None: catdir = cat filename, cat = name_to_cat(filename, catdir) status, zf, extension = is_archive(path) if status != 0: return status, [] status = 1 names = zf.namelist() nzbcount = 0 for name in names: name = name.lower() if name.endswith('.nzb'): status = 0 nzbcount += 1 if status == 0: if nzbcount != 1: nzbname = None for name in names: if name.lower().endswith('.nzb'): try: data = zf.read(name) except: logging.error(T('Cannot read %s'), name, exc_info=True) zf.close() return -1, [] name = os.path.basename(name) if data: nzo = None try: nzo = nzbstuff.NzbObject(name, pp, script, data, cat=cat, url=url, priority=priority, nzbname=nzbname) if not nzo.password: nzo.password = password except (TypeError, ValueError): # Duplicate or empty, ignore pass except: # Something else is wrong, show error logging.error(T('Error while adding %s, removing'), name, exc_info=True) if nzo: if nzo_id: # Re-use existing nzo_id, when a "future" job gets it payload sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False) nzo.nzo_id = nzo_id nzo_id = None nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo)) nzo.update_rating() zf.close() try: if not keep: misc.remove_file(path) except: logging.error(T('Error removing %s'), misc.clip_path(path)) logging.info("Traceback: ", exc_info=True) status = 1 else: zf.close() status = 1 return status, nzo_ids
def process_job(nzo): """ Process one job """ if 0: assert isinstance(nzo, sabnzbd.nzbstuff.NzbObject) # Assert only for debug purposes start = time.time() # keep track of whether we can continue all_ok = True # keep track of par problems par_error = False # keep track of any unpacking errors unpack_error = False # Signal empty download, for when 'empty_postproc' is enabled empty = False nzb_list = [] # These need to be initialized in case of a crash workdir_complete = '' postproc_time = 0 # @UnusedVariable -- pep8 bug? script_log = '' script_line = '' crash_msg = '' # Get the job flags nzo.save_attribs() flag_repair, flag_unpack, flag_delete = nzo.repair_opts # Normalize PP if flag_delete: flag_unpack = True if flag_unpack: flag_repair = True # Get the NZB name filename = nzo.final_name if cfg.allow_streaming() and not (flag_repair or flag_unpack or flag_delete): # After streaming, force +D nzo.set_pp(3) nzo.status = Status.FAILED nzo.save_attribs() all_ok = False if nzo.fail_msg: # Special case: aborted due to too many missing data nzo.status = Status.FAILED nzo.save_attribs() all_ok = False par_error = True unpack_error = 1 try: # Get the folder containing the download result workdir = nzo.downpath tmp_workdir_complete = None # if no files are present (except __admin__), fail the job if all_ok and len(globber(workdir)) < 2: if nzo.precheck: _enough, ratio = nzo.check_quality() req_ratio = float(cfg.req_completion_rate()) / 100.0 # Make sure that rounded ratio doesn't equal required ratio # when it is actually below required if (ratio < req_ratio) and (req_ratio - ratio) < 0.001: ratio = req_ratio - 0.001 emsg = '%.1f%%' % (ratio * 100.0) emsg2 = '%.1f%%' % float(cfg.req_completion_rate()) emsg = T('Download might fail, only %s of required %s available') % (emsg, emsg2) else: emsg = T('Download failed - Not on your server(s)') empty = True nzo.fail_msg = emsg nzo.set_unpack_info('Fail', emsg) nzo.status = Status.FAILED # do not run unpacking or parity verification flag_repair = flag_unpack = False all_ok = cfg.empty_postproc() and empty if not all_ok: par_error = True unpack_error = 1 script = nzo.script cat = nzo.cat logging.info('Starting Post-Processing on %s' + ' => Repair:%s, Unpack:%s, Delete:%s, Script:%s, Cat:%s', filename, flag_repair, flag_unpack, flag_delete, script, cat) # Set complete dir to workdir in case we need to abort workdir_complete = workdir dirname = nzo.final_name marker_file = None # Par processing, if enabled if all_ok and flag_repair: if not check_win_maxpath(workdir): crash_msg = T('Path exceeds 260, repair by "par2" is not possible') raise WindowsError par_error, re_add = parring(nzo, workdir) if re_add: # Try to get more par files return False # Check if user allows unsafe post-processing if flag_repair and cfg.safe_postproc(): all_ok = all_ok and not par_error if all_ok: fix_unix_encoding(workdir) one_folder = False # Determine class directory if cfg.create_group_folders(): complete_dir = addPrefixes(cfg.complete_dir.get_path(), nzo.dirprefix) complete_dir = create_dirs(complete_dir) else: catdir = config.get_categories(cat).dir() if catdir.endswith('*'): catdir = catdir.strip('*') one_folder = True complete_dir = real_path(cfg.complete_dir.get_path(), catdir) complete_dir = long_path(complete_dir) # TV/Movie/Date Renaming code part 1 - detect and construct paths if cfg.enable_meta(): file_sorter = Sorter(nzo, cat) else: file_sorter = Sorter(None, cat) complete_dir = file_sorter.detect(dirname, complete_dir) if file_sorter.sort_file: one_folder = False complete_dir = sanitize_and_trim_path(complete_dir) if one_folder: workdir_complete = create_dirs(complete_dir) else: workdir_complete = get_unique_path(os.path.join(complete_dir, dirname), create_dir=True) marker_file = set_marker(workdir_complete) if not workdir_complete or not os.path.exists(workdir_complete): crash_msg = T('Cannot create final folder %s') % unicoder(os.path.join(complete_dir, dirname)) raise IOError if cfg.folder_rename() and not one_folder: tmp_workdir_complete = prefix(workdir_complete, '_UNPACK_') try: renamer(workdir_complete, tmp_workdir_complete) except: pass # On failure, just use the original name else: tmp_workdir_complete = workdir_complete newfiles = [] # Run Stage 2: Unpack if flag_unpack: if all_ok: # set the current nzo status to "Extracting...". Used in History nzo.status = Status.EXTRACTING logging.info("Running unpack_magic on %s", filename) short_complete = short_path(tmp_workdir_complete) unpack_error, newfiles = unpack_magic(nzo, short_path(workdir), short_complete, flag_delete, one_folder, (), (), (), (), ()) if short_complete != tmp_workdir_complete: newfiles = [f.replace(short_complete, tmp_workdir_complete) for f in newfiles] logging.info("unpack_magic finished on %s", filename) else: nzo.set_unpack_info('Unpack', T('No post-processing because of failed verification')) if cfg.safe_postproc(): all_ok = all_ok and not unpack_error if all_ok: # Move any (left-over) files to destination nzo.status = Status.MOVING nzo.set_action_line(T('Moving'), '...') for root, _dirs, files in os.walk(workdir): if not root.endswith(JOB_ADMIN): for file_ in files: path = os.path.join(root, file_) new_path = path.replace(workdir, tmp_workdir_complete) ok, new_path = move_to_path(path, new_path) newfiles.append(new_path) if not ok: nzo.set_unpack_info('Unpack', T('Failed moving %s to %s') % (unicoder(path), unicoder(new_path))) all_ok = False break # Set permissions right set_permissions(tmp_workdir_complete) if all_ok and marker_file: del_marker(os.path.join(tmp_workdir_complete, marker_file)) remove_from_list(marker_file, newfiles) if all_ok: # Remove files matching the cleanup list cleanup_list(tmp_workdir_complete, True) # Check if this is an NZB-only download, if so redirect to queue # except when PP was Download-only if flag_repair: nzb_list = nzb_redirect(tmp_workdir_complete, nzo.final_name, nzo.pp, script, cat, priority=nzo.priority) else: nzb_list = None if nzb_list: nzo.set_unpack_info('Download', T('Sent %s to queue') % unicoder(nzb_list)) cleanup_empty_directories(tmp_workdir_complete) else: cleanup_list(tmp_workdir_complete, False) script_output = '' script_ret = 0 if not nzb_list: # Give destination its final name if cfg.folder_rename() and tmp_workdir_complete and not one_folder: if all_ok: try: newfiles = rename_and_collapse_folder(tmp_workdir_complete, workdir_complete, newfiles) except: logging.error(T('Error renaming "%s" to "%s"'), clip_path(tmp_workdir_complete), clip_path(workdir_complete)) logging.info('Traceback: ', exc_info=True) # Better disable sorting because filenames are all off now file_sorter.sort_file = None else: workdir_complete = tmp_workdir_complete.replace('_UNPACK_', '_FAILED_') workdir_complete = get_unique_path(workdir_complete, n=0, create_dir=False) workdir_complete = workdir_complete if empty: job_result = -1 else: job_result = int(par_error) + int(bool(unpack_error)) * 2 if cfg.ignore_samples(): remove_samples(workdir_complete) # TV/Movie/Date Renaming code part 2 - rename and move files to parent folder if all_ok and file_sorter.sort_file: if newfiles: file_sorter.rename(newfiles, workdir_complete) workdir_complete, ok = file_sorter.move(workdir_complete) else: workdir_complete, ok = file_sorter.rename_with_ext(workdir_complete) if not ok: nzo.set_unpack_info('Unpack', T('Failed to move files')) all_ok = False # Run the user script script_path = make_script_path(script) if (all_ok or not cfg.safe_postproc()) and (not nzb_list) and script_path: # set the current nzo status to "Ext Script...". Used in History nzo.status = Status.RUNNING nzo.set_action_line(T('Running script'), unicoder(script)) nzo.set_unpack_info('Script', T('Running user script %s') % unicoder(script), unique=True) script_log, script_ret = external_processing(short_path(script_path, False), short_path(workdir_complete, False), nzo.filename, dirname, cat, nzo.group, job_result, nzo.nzo_info.get('failure', '')) script_line = get_last_line(script_log) if script_log: script_output = nzo.nzo_id if script_line: nzo.set_unpack_info('Script', unicoder(script_line), unique=True) else: nzo.set_unpack_info('Script', T('Ran %s') % unicoder(script), unique=True) else: script = "" script_line = "" script_ret = 0 # Maybe bad script result should fail job if script_ret and cfg.script_can_fail(): script_error = True all_ok = False nzo.fail_msg = T('Script exit code is %s') % script_ret else: script_error = False # Email the results if (not nzb_list) and cfg.email_endjob(): if (cfg.email_endjob() == 1) or (cfg.email_endjob() == 2 and (unpack_error or par_error or script_error)): emailer.endjob(dirname, cat, all_ok, workdir_complete, nzo.bytes_downloaded, nzo.fail_msg, nzo.unpack_info, script, TRANS(script_log), script_ret) if script_output: # Can do this only now, otherwise it would show up in the email if script_ret: script_ret = 'Exit(%s) ' % script_ret else: script_ret = '' if len(script_log.rstrip().split('\n')) > 1: nzo.set_unpack_info('Script', u'%s%s <a href="./scriptlog?name=%s">(%s)</a>' % (script_ret, xml.sax.saxutils.escape(script_line), xml.sax.saxutils.escape(script_output), T('More')), unique=True) else: # No '(more)' button needed nzo.set_unpack_info('Script', u'%s%s ' % (script_ret, xml.sax.saxutils.escape(script_line)), unique=True) # Cleanup again, including NZB files if all_ok: cleanup_list(workdir_complete, False) # Force error for empty result all_ok = all_ok and not empty # Update indexer with results if cfg.rating_enable(): if nzo.encrypted > 0: Rating.do.update_auto_flag(nzo.nzo_id, Rating.FLAG_ENCRYPTED) if empty: hosts = map(lambda s: s.host, sabnzbd.downloader.Downloader.do.nzo_servers(nzo)) if not hosts: hosts = [None] for host in hosts: Rating.do.update_auto_flag(nzo.nzo_id, Rating.FLAG_EXPIRED, host) # Show final status in history if all_ok: notifier.send_notification(T('Download Completed'), filename, 'complete') nzo.status = Status.COMPLETED else: notifier.send_notification(T('Download Failed'), filename, 'failed') nzo.status = Status.FAILED except: logging.error(T('Post Processing Failed for %s (%s)'), filename, crash_msg) if not crash_msg: logging.info("Traceback: ", exc_info=True) crash_msg = T('see logfile') nzo.fail_msg = T('PostProcessing was aborted (%s)') % unicoder(crash_msg) notifier.send_notification(T('Download Failed'), filename, 'failed') nzo.status = Status.FAILED par_error = True all_ok = False if cfg.email_endjob(): emailer.endjob(dirname, cat, all_ok, clip_path(workdir_complete), nzo.bytes_downloaded, nzo.fail_msg, nzo.unpack_info, '', '', 0) if all_ok: # If the folder only contains one file OR folder, have that as the path # Be aware that series/generic/date sorting may move a single file into a folder containing other files workdir_complete = one_file_or_folder(workdir_complete) workdir_complete = os.path.normpath(workdir_complete) # Log the overall time taken for postprocessing postproc_time = int(time.time() - start) # Create the history DB instance history_db = database.HistoryDB() # Add the nzo to the database. Only the path, script and time taken is passed # Other information is obtained from the nzo history_db.add_history_db(nzo, clip_path(workdir_complete), nzo.downpath, postproc_time, script_log, script_line) # The connection is only used once, so close it here history_db.close() # Clean up the NZO try: logging.info('Cleaning up %s (keep_basic=%s)', filename, str(not all_ok)) sabnzbd.nzbqueue.NzbQueue.do.cleanup_nzo(nzo, keep_basic=not all_ok) except: logging.error(T('Cleanup of %s failed.'), nzo.final_name) logging.info("Traceback: ", exc_info=True) # Remove download folder if all_ok: try: if os.path.exists(workdir): logging.debug('Removing workdir %s', workdir) remove_all(workdir, recursive=True) except: logging.error(T('Error removing workdir (%s)'), clip_path(workdir)) logging.info("Traceback: ", exc_info=True) # Use automatic retry link on par2 errors and encrypted/bad RARs if par_error or unpack_error in (2, 3): try_alt_nzb(nzo) # Update the last check time sabnzbd.LAST_HISTORY_UPDATE = time.time() return True
def create_unrar_instance(self): """ Start the unrar instance using the user's options """ # Generate extraction path and save for post-proc if not self.unpack_dir_info: self.unpack_dir_info = prepare_extraction_path(self.nzo) extraction_path, _, _, one_folder, _ = self.unpack_dir_info # Set options if self.nzo.password: password_command = '-p%s' % self.nzo.password else: password_command = '-p-' if one_folder or cfg.flat_unpack(): action = 'e' else: action = 'x' # The first NZF self.rarfile_nzf = self.have_next_volume() # Generate command rarfile_path = os.path.join(self.nzo.downpath, self.rarfile_nzf.filename) if sabnzbd.WIN32: if not has_win_device(rarfile_path): command = [ '%s' % sabnzbd.newsunpack.RAR_COMMAND, action, '-vp', '-idp', '-o+', '-ai', password_command, '%s' % clip_path(rarfile_path), clip_path(extraction_path) ] else: # Need long-path notation in case of forbidden-names command = [ '%s' % sabnzbd.newsunpack.RAR_COMMAND, action, '-vp', '-idp', '-o+', '-ai', password_command, '%s' % clip_path(rarfile_path), '%s\\' % extraction_path ] else: # Don't use "-ai" (not needed for non-Windows) command = [ '%s' % sabnzbd.newsunpack.RAR_COMMAND, action, '-vp', '-idp', '-o+', password_command, '%s' % rarfile_path, '%s/' % extraction_path ] if cfg.ignore_unrar_dates(): command.insert(3, '-tsm-') # Let's start from the first one! self.cur_volume = 1 stup, need_shell, command, creationflags = build_command(command) logging.debug('Running unrar for DirectUnpack %s', command) self.active_instance = Popen(command, shell=need_shell, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, startupinfo=stup, creationflags=creationflags) # Add to runners ACTIVE_UNPACKERS.append(self) # Doing the first logging.info('DirectUnpacked volume %s for %s', self.cur_volume, self.cur_setname)
def ProcessArchiveFile(filename, path, pp=None, script=None, cat=None, catdir=None, keep=False, priority=None, url='', nzbname=None, password=None, nzo_id=None): """ Analyse ZIP file and create job(s). Accepts ZIP files with ONLY nzb/nfo/folder files in it. returns (status, nzo_ids) status: -1==Error/Retry, 0==OK, 1==Ignore """ from sabnzbd.nzbqueue import add_nzo nzo_ids = [] if catdir is None: catdir = cat filename, cat = name_to_cat(filename, catdir) if zipfile.is_zipfile(path): try: zf = zipfile.ZipFile(path) except: return -1, [] elif is_rarfile(path): try: zf = RarFile(path) except: return -1, [] elif is_sevenfile(path): try: zf = SevenZip(path) except: return -1, [] else: return 1, [] status = 1 names = zf.namelist() names.sort() nzbcount = 0 for name in names: name = name.lower() if not (name.endswith('.nzb') or name.endswith('.nfo') or name.endswith('/')): status = 1 break elif name.endswith('.nzb'): status = 0 nzbcount += 1 if status == 0: if nzbcount != 1: nzbname = None for name in names: if name.lower().endswith('.nzb'): try: data = zf.read(name) except: zf.close() return -1, [] name = os.path.basename(name) if data: try: nzo = nzbstuff.NzbObject(name, pp, script, data, cat=cat, url=url, priority=priority, nzbname=nzbname) if not nzo.password: nzo.password = password except: nzo = None if nzo: if nzo_id: # Re-use existing nzo_id, when a "future" job gets it payload sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False) nzo.nzo_id = nzo_id nzo_ids.append(add_nzo(nzo)) nzo.update_rating() zf.close() try: if not keep: os.remove(path) except: logging.error(T('Error removing %s'), misc.clip_path(path)) logging.info("Traceback: ", exc_info=True) status = 1 else: zf.close() status = 1 return status, nzo_ids
def run_dir(folder, catdir): try: files = os.listdir(folder) except: if not self.error_reported and not catdir: logging.error(T('Cannot read Watched Folder %s'), misc.clip_path(folder)) self.error_reported = True files = [] for filename in files: path = os.path.join(folder, platform_encode(filename)) if os.path.isdir(path) or path in self.ignored or filename[0] == '.': continue ext = os.path.splitext(path)[1].lower() candidate = ext in VALID_NZB_FILES + VALID_ARCHIVES if candidate: try: stat_tuple = os.stat(path) except: continue else: self.ignored[path] = 1 if path in self.suspected: if CompareStat(self.suspected[path], stat_tuple): # Suspected file still has the same attributes continue else: del self.suspected[path] if candidate and stat_tuple.st_size > 0: logging.info('Trying to import %s', path) # Wait until the attributes are stable for 1 second # but give up after 3 sec stable = False for n in xrange(3): time.sleep(1.0) try: stat_tuple_tmp = os.stat(path) except: continue if CompareStat(stat_tuple, stat_tuple_tmp): stable = True break else: stat_tuple = stat_tuple_tmp if not stable: continue # Handle archive files, but only when containing just NZB files if ext in VALID_ARCHIVES: res, nzo_ids = ProcessArchiveFile(filename, path, catdir=catdir, url=path) if res == -1: self.suspected[path] = stat_tuple elif res == 0: self.error_reported = False else: self.ignored[path] = 1 # Handle .nzb, .nzb.gz or gzip-disguised-as-nzb or .bz2 elif ext == '.nzb' or filename.lower().endswith('.nzb.gz') or filename.lower().endswith('.nzb.bz2'): res, nzo_id = ProcessSingleFile(filename, path, catdir=catdir, url=path) if res < 0: self.suspected[path] = stat_tuple elif res == 0: self.error_reported = False else: self.ignored[path] = 1 else: self.ignored[path] = 1 CleanList(self.ignored, folder, files) CleanList(self.suspected, folder, files)