def repair_job(self, folder, new_nzb=None, password=None): """ Reconstruct admin for a single job folder, optionally with new NZB """ def all_verified(path): """ Return True when all sets have been successfully verified """ verified = sabnzbd.load_data(VERIFIED_FILE, path, remove=False) or {'x': False} return all(verified[x] for x in verified) name = os.path.basename(folder) path = os.path.join(folder, JOB_ADMIN) if hasattr(new_nzb, 'filename'): filename = new_nzb.filename else: filename = '' if not filename: if not all_verified(path): filename = globber_full(path, '*.gz') if len(filename) > 0: logging.debug('Repair job %s by reparsing stored NZB', name) nzo_id = sabnzbd.add_nzbfile(filename[0], pp=None, script=None, cat=None, priority=None, nzbname=name, reuse=True, password=password)[1] else: logging.debug('Repair job %s without stored NZB', name) nzo = NzbObject(name, pp=None, script=None, nzb='', cat=None, priority=None, nzbname=name, reuse=True) nzo.password = password self.add(nzo) nzo_id = nzo.nzo_id else: remove_all(path, '*.gz') logging.debug('Repair job %s with new NZB (%s)', name, filename) nzo_id = sabnzbd.add_nzbfile(new_nzb, pp=None, script=None, cat=None, priority=None, nzbname=name, reuse=True, password=password)[1] return nzo_id
def application_openFiles_(self, nsapp, filenames): # logging.info('[osx] file open') # logging.info('[osx] file : %s' % (filenames)) for filename in filenames: logging.info("[osx] receiving from macOS : %s", filename) if os.path.exists(filename): if sabnzbd.filesystem.get_ext(filename) in VALID_ARCHIVES + VALID_NZB_FILES: sabnzbd.add_nzbfile(filename, keep=True)
def repair_job(self, folder, new_nzb=None): """ Reconstruct admin for a single job folder, optionally with new NZB """ name = os.path.basename(folder) path = os.path.join(folder, JOB_ADMIN) if new_nzb is None or not new_nzb.filename: filename = globber(path, '*.gz') if len(filename) > 0: logging.debug('Repair job %s by reparsing stored NZB', latin1(name)) sabnzbd.add_nzbfile(filename[0], pp=None, script=None, cat=None, priority=None, nzbname=name, reuse=True) else: logging.debug('Repair job %s without stored NZB', latin1(name)) nzo = NzbObject(name, 0, pp=None, script=None, nzb='', cat=None, priority=None, nzbname=name, reuse=True) self.add(nzo) else: remove_all(path, '*.gz') logging.debug('Repair job %s without new NZB (%s)', latin1(name), latin1(new_nzb.filename)) sabnzbd.add_nzbfile(new_nzb, pp=None, script=None, cat=None, priority=None, nzbname=name, reuse=True)
def repair_job(self, repair_folder, new_nzb=None, password=None): """ Reconstruct admin for a single job folder, optionally with new NZB """ # Check if folder exists if not repair_folder or not os.path.exists(repair_folder): return None name = os.path.basename(repair_folder) admin_path = os.path.join(repair_folder, JOB_ADMIN) # If Retry was used and a new NZB was uploaded if getattr(new_nzb, "filename", None): remove_all(admin_path, "*.gz", keep_folder=True) logging.debug("Repair job %s with new NZB (%s)", name, new_nzb.filename) _, nzo_ids = sabnzbd.add_nzbfile(new_nzb, nzbname=name, reuse=repair_folder, password=password) nzo_id = nzo_ids[0] else: # Was this file already post-processed? verified = sabnzbd.load_data(VERIFIED_FILE, admin_path, remove=False) filenames = [] if not verified or not all(verified[x] for x in verified): filenames = globber_full(admin_path, "*.gz") if filenames: logging.debug("Repair job %s by re-parsing stored NZB", name) _, nzo_ids = sabnzbd.add_nzbfile(filenames[0], nzbname=name, reuse=repair_folder, password=password) nzo_id = nzo_ids[0] else: logging.debug("Repair job %s without stored NZB", name) nzo = NzbObject(name, nzbname=name, reuse=repair_folder) nzo.password = password self.add(nzo) nzo_id = nzo.nzo_id return nzo_id
def addnzb(self, icon): """ menu handlers """ dialog = Gtk.FileChooserDialog(title="SABnzbd - " + T("Add NZB"), action=Gtk.FileChooserAction.OPEN) dialog.add_buttons(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OPEN, Gtk.ResponseType.OK) dialog.set_select_multiple(True) filter = Gtk.FileFilter() filter.set_name("*.nzb,*.gz,*.bz2,*.zip,*.rar,*.7z") filter.add_pattern("*.nzb") filter.add_pattern("*.gz") filter.add_pattern("*.bz2") filter.add_pattern("*.zip") filter.add_pattern("*.rar") filter.add_pattern("*.7z") dialog.add_filter(filter) response = dialog.run() if response == Gtk.ResponseType.OK: for filename in dialog.get_filenames(): sabnzbd.add_nzbfile(filename) dialog.destroy()
def run_dir(folder, catdir): try: files = os.listdir(folder) except OSError: if not self.error_reported and not catdir: logging.error(T("Cannot read Watched Folder %s"), filesystem.clip_path(folder)) self.error_reported = True files = [] for filename in files: path = os.path.join(folder, filename) if os.path.isdir( path) or path in self.ignored or filename[0] == ".": continue if filesystem.get_ext( path) in VALID_NZB_FILES + VALID_ARCHIVES: try: stat_tuple = os.stat(path) except OSError: continue else: self.ignored[path] = 1 continue if path in self.suspected: if compare_stat_tuple(self.suspected[path], stat_tuple): # Suspected file still has the same attributes continue else: del self.suspected[path] if stat_tuple.st_size > 0: logging.info("Trying to import %s", path) # Wait until the attributes are stable for 1 second, but give up after 3 sec # This indicates that the file is fully written to disk for n in range(3): time.sleep(1.0) try: stat_tuple_tmp = os.stat(path) except OSError: continue if compare_stat_tuple(stat_tuple, stat_tuple_tmp): break stat_tuple = stat_tuple_tmp else: # Not stable continue # Add the NZB's res, _ = sabnzbd.add_nzbfile(path, catdir=catdir, keep=False) if res < 0: # Retry later, for example when we can't read the file self.suspected[path] = stat_tuple elif res == 0: self.error_reported = False else: self.ignored[path] = 1 # Remove files from the bookkeeping that are no longer on the disk clean_file_list(self.ignored, folder, files) clean_file_list(self.suspected, folder, files)
def run(self): self.shutdown = False while not self.shutdown: # Set NzbObject object to None so reference from this thread # does not keep the object alive in the future (see #1628) future_nzo = None url, future_nzo = self.queue.get() if not url: # stop signal, go test self.shutdown continue if future_nzo: # Re-queue when too early and still active if future_nzo.url_wait and future_nzo.url_wait > time.time(): self.add(url, future_nzo) time.sleep(1.0) continue # Paused if future_nzo.status == Status.PAUSED: self.add(url, future_nzo) time.sleep(1.0) continue url = url.replace(" ", "") try: if future_nzo: # If nzo entry deleted, give up try: deleted = future_nzo.deleted except AttributeError: deleted = True if deleted: logging.debug("Dropping URL %s, job entry missing", url) continue filename = None category = None nzo_info = {} wait = 0 retry = True fetch_request = None logging.info("Grabbing URL %s", url) try: fetch_request = _build_request(url) except Exception as e: # Cannot list exceptions here, because of unpredictability over platforms error0 = str(sys.exc_info()[0]).lower() error1 = str(sys.exc_info()[1]).lower() logging.debug('Error "%s" trying to get the url %s', error1, url) if "certificate_verify_failed" in error1 or "certificateerror" in error0: msg = T("Server %s uses an untrusted HTTPS certificate" ) % "" msg += " - https://sabnzbd.org/certificate-errors" retry = False elif "nodename nor servname provided" in error1: msg = T("Server name does not resolve") retry = False elif "401" in error1 or "unauthorized" in error1: msg = T("Unauthorized access") retry = False elif "404" in error1: msg = T("File not on server") retry = False elif hasattr(e, "headers") and "retry-after" in e.headers: # Catch if the server send retry (e.headers is case-INsensitive) wait = misc.int_conv(e.headers["retry-after"]) if fetch_request: for hdr in fetch_request.headers: try: item = hdr.lower() value = fetch_request.headers[hdr] except: continue if item in ("category_id", "x-dnzb-category"): category = value elif item in ("x-dnzb-moreinfo", ): nzo_info["more_info"] = value elif item in ("x-dnzb-name", ): filename = value if not filename.endswith(".nzb"): filename += ".nzb" elif item == "x-dnzb-propername": nzo_info["propername"] = value elif item == "x-dnzb-episodename": nzo_info["episodename"] = value elif item == "x-dnzb-year": nzo_info["year"] = value elif item == "x-dnzb-failure": nzo_info["failure"] = value elif item == "x-dnzb-details": nzo_info["details"] = value elif item == "x-dnzb-password": nzo_info["password"] = value elif item == "retry-after": wait = misc.int_conv(value) # Rating fields if item in _RARTING_FIELDS: nzo_info[item] = value # Get filename from Content-Disposition header if not filename and "filename=" in value: filename = value[value.index("filename=") + 9:].strip(";").strip('"') if wait: # For sites that have a rate-limiting attribute msg = "" retry = True fetch_request = None elif retry: fetch_request, msg, retry, wait, data = _analyse( fetch_request, future_nzo) if not fetch_request: if retry: logging.info("Retry URL %s", url) self.add(url, future_nzo, wait) else: self.fail_to_history(future_nzo, url, msg) continue if not filename: filename = os.path.basename(urllib.parse.unquote(url)) # URL was redirected, maybe the redirect has better filename? # Check if the original URL has extension if (url != fetch_request.geturl() and sabnzbd.filesystem.get_ext(filename) not in VALID_NZB_FILES + VALID_ARCHIVES): filename = os.path.basename( urllib.parse.unquote(fetch_request.geturl())) elif "&nzbname=" in filename: # Sometimes the filename contains the full URL, duh! filename = filename[filename.find("&nzbname=") + 9:] pp = future_nzo.pp script = future_nzo.script cat = future_nzo.cat if (cat is None or cat == "*") and category: cat = misc.cat_convert(category) priority = future_nzo.priority nzbname = future_nzo.custom_name # process data if not data: try: data = fetch_request.read() except (IncompleteRead, IOError): self.fail_to_history( future_nzo, url, T("Server could not complete request")) fetch_request.close() continue fetch_request.close() if b"<nzb" in data and sabnzbd.filesystem.get_ext( filename) != ".nzb": filename += ".nzb" # Sanitize filename first (also removing forbidden Windows-names) filename = sabnzbd.filesystem.sanitize_filename(filename) # If no filename, make one if not filename: filename = sabnzbd.get_new_id( "url", os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER)) # Write data to temp file path = os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER, filename) with open(path, "wb") as temp_nzb: temp_nzb.write(data) # Check if nzb file if sabnzbd.filesystem.get_ext( filename) in VALID_ARCHIVES + VALID_NZB_FILES: res, _ = sabnzbd.add_nzbfile( path, pp=pp, script=script, cat=cat, priority=priority, nzbname=nzbname, nzo_info=nzo_info, url=future_nzo.url, keep=False, password=future_nzo.password, nzo_id=future_nzo.nzo_id, ) # -2==Error/retry, -1==Error, 0==OK, 1==Empty if res == -2: logging.info("Incomplete NZB, retry after 5 min %s", url) self.add(url, future_nzo, when=300) elif res == -1: # Error already thrown self.fail_to_history(future_nzo, url) elif res == 1: # No NZB-files inside archive self.fail_to_history(future_nzo, url, T("Empty NZB file %s") % filename) else: logging.info( "Unknown filetype when fetching NZB, retry after 30s %s", url) self.add(url, future_nzo, 30) # Always clean up what we wrote to disk try: sabnzbd.filesystem.remove_file(path) except: pass except: logging.error(T("URLGRABBER CRASHED"), exc_info=True) logging.debug("URLGRABBER Traceback: ", exc_info=True)