def load_data(data_id, path, remove=True, do_pickle=True, silent=False): """ Read data from disk file """ path = os.path.join(path, data_id) if not os.path.exists(path): logging.info("[%s] %s missing", misc.caller_name(), path) return None if not silent: logging.debug("[%s] Loading data for %s from %s", misc.caller_name(), data_id, path) try: with open(path, "rb") as data_file: if do_pickle: try: data = pickle.load(data_file, encoding=sabnzbd.encoding.CODEPAGE) except UnicodeDecodeError: # Could be Python 2 data that we can load using old encoding data = pickle.load(data_file, encoding="latin1") else: data = data_file.read() if remove: filesystem.remove_file(path) except: logging.error(T("Loading %s failed"), path) logging.info("Traceback: ", exc_info=True) return None return data
def read_queue(self, repair): """Read queue from disk, supporting repair modes 0 = no repairs 1 = use existing queue, add missing "incomplete" folders 2 = Discard all queue admin, reconstruct from "incomplete" folders """ nzo_ids = [] if repair < 2: # Try to process the queue file try: data = sabnzbd.load_admin(QUEUE_FILE_NAME) if data: queue_vers, nzo_ids, _ = data if not queue_vers == QUEUE_VERSION: nzo_ids = [] logging.error(T("Incompatible queuefile found, cannot proceed")) if not repair: panic_queue(os.path.join(cfg.admin_dir.get_path(), QUEUE_FILE_NAME)) exit_sab(2) except: nzo_ids = [] logging.error( T("Error loading %s, corrupt file detected"), os.path.join(cfg.admin_dir.get_path(), QUEUE_FILE_NAME), ) # First handle jobs in the queue file folders = [] for nzo_id in nzo_ids: folder, _id = os.path.split(nzo_id) path = get_admin_path(folder, future=False) # Try as normal job nzo = sabnzbd.load_data(_id, path, remove=False) if not nzo: # Try as future job path = get_admin_path(folder, future=True) nzo = sabnzbd.load_data(_id, path) if nzo: self.add(nzo, save=False, quiet=True) folders.append(folder) # Scan for any folders in "incomplete" that are not yet in the queue if repair: logging.info("Starting queue repair") self.scan_jobs(not folders) # Handle any lost future jobs for item in globber_full(os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER)): path, nzo_id = os.path.split(item) if nzo_id not in self.__nzo_table: if nzo_id.startswith("SABnzbd_nzo"): nzo = sabnzbd.load_data(nzo_id, path, remove=True) if nzo: self.add(nzo, save=True) else: try: remove_file(item) except: pass
def remove_data(_id, path): """ Remove admin file """ path = os.path.join(path, _id) try: if os.path.exists(path): filesystem.remove_file(path) except: logging.debug("Failed to remove %s", path)
def del_marker(path): """ Remove marker file """ if path and os.path.exists(path): logging.debug("Removing marker file %s", path) try: remove_file(path) except: logging.info("Cannot remove marker file %s", path) logging.info("Traceback: ", exc_info=True)
def check_repair_request(): """ Return True if repair request found, remove afterwards """ path = os.path.join(cfg.admin_dir.get_path(), REPAIR_REQUEST) if os.path.exists(path): try: filesystem.remove_file(path) except: pass return True return False
def execute(self, command, args=(), save=False): """ Wrapper for executing SQL commands """ for tries in range(5, 0, -1): try: if args and isinstance(args, tuple): self.c.execute(command, args) else: self.c.execute(command) if save: self.con.commit() return True except: error = str(sys.exc_info()[1]) if tries >= 0 and "is locked" in error: logging.debug("Database locked, wait and retry") time.sleep(0.5) continue elif "readonly" in error: logging.error( T("Cannot write to History database, check access rights!" )) # Report back success, because there's no recovery possible return True elif "not a database" in error or "malformed" in error or "duplicate column name" in error: logging.error( T("Damaged History database, created empty replacement" )) logging.info("Traceback: ", exc_info=True) self.close() try: remove_file(HistoryDB.db_path) except: pass self.connect() # Return False in case of "duplicate column" error # because the column addition in connect() must be terminated return "duplicate column name" not in error else: logging.error(T("SQL Command Failed, see log")) logging.info("SQL: %s", command) logging.info("Arguments: %s", repr(args)) logging.info("Traceback: ", exc_info=True) try: self.con.rollback() except: logging.debug("Rollback Failed:", exc_info=True) return False
def pid_file(pid_path=None, pid_file=None, port=0): """ Create or remove pid file """ if not sabnzbd.WIN32: if pid_path and pid_path.startswith("/"): sabnzbd.DIR_PID = os.path.join(pid_path, "sabnzbd-%d.pid" % port) elif pid_file and pid_file.startswith("/"): sabnzbd.DIR_PID = pid_file if sabnzbd.DIR_PID: try: if port: with open(sabnzbd.DIR_PID, "w") as f: f.write("%d\n" % os.getpid()) else: filesystem.remove_file(sabnzbd.DIR_PID) except: logging.warning(T("Cannot access PID file %s"), sabnzbd.DIR_PID)
def remove_samples(path): """Remove all files that match the sample pattern Skip deleting if it matches all files or there is only 1 file """ files_to_delete = [] nr_files = 0 for root, _dirs, files in os.walk(path): for file_to_match in files: nr_files += 1 if RE_SAMPLE.search(file_to_match): files_to_delete.append(os.path.join(root, file_to_match)) # Make sure we skip false-positives if len(files_to_delete) < nr_files: for path in files_to_delete: try: logging.info("Removing unwanted sample file %s", path) remove_file(path) except: logging.error(T("Removing %s failed"), clip_path(path)) logging.info("Traceback: ", exc_info=True) else: logging.info("Skipping sample-removal, false-positive")
def cleanup_list(wdir, skip_nzb): """Remove all files whose extension matches the cleanup list, optionally ignoring the nzb extension """ if cfg.cleanup_list(): try: files = os.listdir(wdir) except: files = () for filename in files: path = os.path.join(wdir, filename) if os.path.isdir(path): cleanup_list(path, skip_nzb) else: if on_cleanup_list(filename, skip_nzb): try: logging.info("Removing unwanted file %s", path) remove_file(path) except: logging.error(T("Removing %s failed"), clip_path(path)) logging.info("Traceback: ", exc_info=True) if files: # If directories only contained unwanted files, remove them cleanup_empty_directories(wdir)
def abort(self): """ Abort running instance and delete generated files """ if not self.killed and self.cur_setname: logging.info("Aborting DirectUnpack for %s", self.cur_setname) self.killed = True # Save reference to the first rarfile rarfile_nzf = self.rarfile_nzf # Abort Unrar if self.active_instance: # First we try to abort gracefully try: self.active_instance.stdin.write(b"Q\n") time.sleep(0.2) except IOError: pass # Now force kill and give it a bit of time try: self.active_instance.kill() time.sleep(0.2) except AttributeError: # Already killed by the Quit command pass # Wake up the thread with self.next_file_lock: self.next_file_lock.notify() # No new sets self.next_sets = [] self.success_sets = {} # Remove files if self.unpack_dir_info: extraction_path, _, _, one_folder, _ = self.unpack_dir_info # In case of flat-unpack we need to remove the files manually if one_folder: # RarFile can fail for mysterious reasons try: rar_contents = RarFile( os.path.join(self.nzo.downpath, rarfile_nzf.filename), single_file_check=True).filelist() for rm_file in rar_contents: # Flat-unpack, so remove foldername from RarFile output f = os.path.join(extraction_path, os.path.basename(rm_file)) remove_file(f) except: # The user will have to remove it themselves logging.info( "Failed to clean Direct Unpack after aborting %s", rarfile_nzf.filename, exc_info=True) else: # We can just remove the whole path remove_all(extraction_path, recursive=True) # Remove dir-info self.unpack_dir_info = None # Reset settings self.reset_active()
def process_single_nzb( filename, path, pp=None, script=None, cat=None, catdir=None, keep=False, priority=None, nzbname=None, reuse=None, nzo_info=None, dup_check=True, url=None, password=None, nzo_id=None, ): """Analyze file and create a job from it Supports NZB, NZB.BZ2, NZB.GZ and GZ.NZB-in-disguise returns (status, nzo_ids) status: -2==Error/retry, -1==Error, 0==OK """ nzo_ids = [] if catdir is None: catdir = cat try: with open(path, "rb") as nzb_file: check_bytes = nzb_file.read(2) if check_bytes == b"\x1f\x8b": # gzip file or gzip in disguise filename = filename.replace(".nzb.gz", ".nzb") nzb_reader_handler = gzip.GzipFile elif check_bytes == b"BZ": # bz2 file or bz2 in disguise filename = filename.replace(".nzb.bz2", ".nzb") nzb_reader_handler = bz2.BZ2File else: nzb_reader_handler = open # Let's get some data and hope we can decode it with nzb_reader_handler(path, "rb") as nzb_file: data = correct_unknown_encoding(nzb_file.read()) except OSError: logging.warning(T("Cannot read %s"), filesystem.clip_path(path)) logging.info("Traceback: ", exc_info=True) return -2, nzo_ids if filename: filename, cat = name_to_cat(filename, catdir) # The name is used as the name of the folder, so sanitize it using folder specific santization if not nzbname: # Prevent embedded password from being damaged by sanitize and trimming nzbname = get_filename(filename) try: nzo = nzbstuff.NzbObject( filename, pp=pp, script=script, nzb=data, cat=cat, url=url, priority=priority, nzbname=nzbname, nzo_info=nzo_info, reuse=reuse, dup_check=dup_check, ) if not nzo.password: nzo.password = password except TypeError: # Duplicate, ignore if nzo_id: sabnzbd.NzbQueue.remove(nzo_id) nzo = None except ValueError: # Empty return 1, nzo_ids except: if data.find("<nzb") >= 0 > data.find("</nzb"): # Looks like an incomplete file, retry return -2, nzo_ids else: # Something else is wrong, show error logging.error(T("Error while adding %s, removing"), filename, exc_info=True) return -1, nzo_ids if nzo: if nzo_id: # Re-use existing nzo_id, when a "future" job gets it payload sabnzbd.NzbQueue.remove(nzo_id, delete_all_data=False) nzo.nzo_id = nzo_id nzo_ids.append(sabnzbd.NzbQueue.add(nzo, quiet=reuse)) nzo.update_rating() try: if not keep: filesystem.remove_file(path) except OSError: # Job was still added to the queue, so throw error but don't report failed add logging.error(T("Error removing %s"), filesystem.clip_path(path)) logging.info("Traceback: ", exc_info=True) return 0, nzo_ids
def process_nzb_archive_file( filename, path, pp=None, script=None, cat=None, catdir=None, keep=False, priority=None, nzbname=None, reuse=None, nzo_info=None, dup_check=True, url=None, password=None, nzo_id=None, ): """Analyse ZIP file and create job(s). Accepts ZIP files with ONLY nzb/nfo/folder files in it. returns (status, nzo_ids) status: -1==Error, 0==OK, 1==Ignore """ nzo_ids = [] if catdir is None: catdir = cat filename, cat = name_to_cat(filename, catdir) # Returns -1==Error/Retry, 0==OK, 1==Ignore status, zf, extension = is_archive(path) if status != 0: return status, [] status = 1 names = zf.namelist() nzbcount = 0 for name in names: name = name.lower() if name.endswith(".nzb"): status = 0 nzbcount += 1 if status == 0: if nzbcount != 1: nzbname = None for name in names: if name.lower().endswith(".nzb"): try: data = correct_unknown_encoding(zf.read(name)) except OSError: logging.error(T("Cannot read %s"), name, exc_info=True) zf.close() return -1, [] name = filesystem.setname_from_path(name) if data: nzo = None try: nzo = nzbstuff.NzbObject( name, pp=pp, script=script, nzb=data, cat=cat, url=url, priority=priority, nzbname=nzbname, nzo_info=nzo_info, reuse=reuse, dup_check=dup_check, ) if not nzo.password: nzo.password = password except (TypeError, ValueError): # Duplicate or empty, ignore pass except: # Something else is wrong, show error logging.error(T("Error while adding %s, removing"), name, exc_info=True) if nzo: if nzo_id: # Re-use existing nzo_id, when a "future" job gets it payload sabnzbd.NzbQueue.remove(nzo_id, delete_all_data=False) nzo.nzo_id = nzo_id nzo_id = None nzo_ids.append(sabnzbd.NzbQueue.add(nzo)) nzo.update_rating() zf.close() try: if not keep: filesystem.remove_file(path) except OSError: logging.error(T("Error removing %s"), filesystem.clip_path(path)) logging.info("Traceback: ", exc_info=True) else: zf.close() status = 1 return status, nzo_ids
def save_config(force=False): """ Update Setup file with current option values """ global CFG, database, modified if not (modified or force): return True if sabnzbd.cfg.configlock(): logging.warning(T("Configuration locked, cannot save settings")) return False for section in database: if section in ("servers", "categories", "rss"): try: CFG[section] except KeyError: CFG[section] = {} for subsec in database[section]: if section == "servers": subsec_mod = subsec.replace("[", "{").replace("]", "}") else: subsec_mod = subsec try: CFG[section][subsec_mod] except KeyError: CFG[section][subsec_mod] = {} items = database[section][subsec].get_dict() CFG[section][subsec_mod] = items else: for option in database[section]: sec, kw = database[section][option].ident() sec = sec[-1] try: CFG[sec] except KeyError: CFG[sec] = {} value = database[section][option]() # bool is a subclass of int, check first if isinstance(value, bool): # convert bool to int when saving so we store 0 or 1 CFG[sec][kw] = str(int(value)) elif isinstance(value, int): CFG[sec][kw] = str(value) else: CFG[sec][kw] = value res = False filename = CFG.filename bakname = filename + ".bak" # Check if file is writable if not is_writable(filename): logging.error(T("Cannot write to INI file %s"), filename) return res # copy current file to backup try: shutil.copyfile(filename, bakname) shutil.copymode(filename, bakname) except: # Something wrong with the backup, logging.error(T("Cannot create backup file for %s"), bakname) logging.info("Traceback: ", exc_info=True) return res # Write new config file try: logging.info("Writing settings to INI file %s", filename) CFG.write() shutil.copymode(bakname, filename) modified = False res = True except: logging.error(T("Cannot write to INI file %s"), filename) logging.info("Traceback: ", exc_info=True) try: remove_file(filename) except: pass # Restore INI file from backup renamer(bakname, filename) return res