def finish_connect(self, code): if not (self.server.username or self.server.password or self.force_login): self.connected = True self.user_sent = True self.user_ok = True self.pass_sent = True self.pass_ok = True if code == 501 and self.user_sent: # Change to a sensible text code = 481 self.data[0] = "%d %s" % ( code, T("Authentication failed, check username/password.")) self.user_ok = True self.pass_sent = True if code == 480: self.force_login = True self.connected = False self.user_sent = False self.user_ok = False self.pass_sent = False self.pass_ok = False if code in (400, 502): raise NNTPPermanentError(nntp_to_msg(self.data)) elif not self.user_sent: command = utob("authinfo user %s\r\n" % self.server.username) self.nntp.sock.sendall(command) self.data = [] self.user_sent = True elif not self.user_ok: if code == 381: self.user_ok = True elif code == 281: # No login required self.user_ok = True self.pass_sent = True self.pass_ok = True self.connected = True if self.user_ok and not self.pass_sent: command = utob("authinfo pass %s\r\n" % self.server.password) self.nntp.sock.sendall(command) self.data = [] self.pass_sent = True elif self.user_ok and not self.pass_ok: if code != 281: # Assume that login failed (code 481 or other) raise NNTPPermanentError(nntp_to_msg(self.data)) else: self.connected = True self.timeout = time.time() + self.server.timeout
def body(self, precheck): self.timeout = time.time() + self.server.timeout if precheck: if self.server.have_stat: command = utob("STAT <%s>\r\n" % (self.article.article)) else: command = utob("HEAD <%s>\r\n" % (self.article.article)) elif self.server.have_body: command = utob("BODY <%s>\r\n" % (self.article.article)) else: command = utob("ARTICLE <%s>\r\n" % (self.article.article)) self.nntp.sock.sendall(command) self.data = []
def body(self): """ Request the body of the article """ self.timeout = time.time() + self.server.timeout if self.article.nzf.nzo.precheck: if self.server.have_stat: command = utob("STAT <%s>\r\n" % self.article.article) else: command = utob("HEAD <%s>\r\n" % self.article.article) elif self.server.have_body: command = utob("BODY <%s>\r\n" % self.article.article) else: command = utob("ARTICLE <%s>\r\n" % self.article.article) self.nntp.sock.sendall(command) self.data = []
def save_compressed(folder, filename, data): """ Save compressed NZB file in folder """ if filename.endswith(".nzb"): filename += ".gz" else: filename += ".nzb.gz" logging.info("Backing up %s", os.path.join(folder, filename)) try: # Have to get around the path being put inside the tgz with open(os.path.join(folder, filename), "wb") as tgz_file: f = gzip.GzipFile(filename, fileobj=tgz_file) f.write(encoding.utob(data)) f.flush() f.close() except: logging.error(T("Saving %s failed"), os.path.join(folder, filename)) logging.info("Traceback: ", exc_info=True)
def panic_message(panic_code, a=None, b=None): """ Create the panic message from templates """ if sabnzbd.WIN32: os_str = T("Press Startkey+R and type the line (example):") prog_path = '"%s"' % sabnzbd.MY_FULLNAME else: os_str = T("Open a Terminal window and type the line (example):") prog_path = sabnzbd.MY_FULLNAME if panic_code == PANIC_PORT: newport = int(b) + 1 newport = "%s" % newport msg = MSG_BAD_PORT() % (b, a, os_str, prog_path, a, newport) elif panic_code == PANIC_TEMPL: msg = MSG_BAD_TEMPL() % a elif panic_code == PANIC_QUEUE: msg = MSG_BAD_QUEUE() % (a, os_str, prog_path) elif panic_code == PANIC_SQLITE: msg = MSG_SQLITE() elif panic_code == PANIC_HOST: msg = MSG_BAD_HOST() % (os_str, prog_path, "localhost", b) else: msg = MSG_OTHER() % (a, b) msg = MSG_BAD_NEWS() % ( sabnzbd.MY_NAME, sabnzbd.__version__, sabnzbd.MY_NAME, sabnzbd.__version__, msg, T("Program did not start!"), ) if sabnzbd.WIN_SERVICE: sabnzbd.WIN_SERVICE.ErrLogger("Panic exit", msg) if (not cfg.autobrowser()) or sabnzbd.DAEMON: return msgfile, url = tempfile.mkstemp(suffix=".html") os.write(msgfile, utob(msg)) os.close(msgfile) return url
def _build_request(url): # Detect basic auth # Adapted from python-feedparser user_passwd = None u = urllib.parse.urlparse(url) if u.username is not None or u.password is not None: if u.username and u.password: user_passwd = "%s:%s" % (u.username, u.password) host_port = u.hostname if u.port: host_port += ":" + str(u.port) url = urllib.parse.urlunparse(u._replace(netloc=host_port)) # Start request req = urllib.request.Request(url) # Add headers req.add_header("User-Agent", "SABnzbd/%s" % sabnzbd.__version__) req.add_header("Accept-encoding", "gzip") if user_passwd: req.add_header("Authorization", "Basic " + ubtou(base64.b64encode(utob(user_passwd))).strip()) return urllib.request.urlopen(req)
def build_history_info(nzo, workdir_complete="", postproc_time=0, script_output="", script_line="", series_info=False): """ Collects all the information needed for the database """ completed = int(time.time()) pp = _PP_LOOKUP.get(opts_to_pp(*nzo.repair_opts), "X") if script_output: # Compress the output of the script script_output = sqlite3.Binary(zlib.compress(utob(script_output))) download_time = nzo.nzo_info.get("download_time", 0) url_info = nzo.nzo_info.get("details", "") or nzo.nzo_info.get( "more_info", "") # Get the dictionary containing the stages and their unpack process # Pack the dictionary up into a single string # Stage Name is separated by ::: stage lines by ; and stages by \r\n lines = [] for key, results in nzo.unpack_info.items(): lines.append("%s:::%s" % (key, ";".join(results))) stage_log = "\r\n".join(lines) # Reuse the old 'report' column to indicate a URL-fetch report = "future" if nzo.futuretype else "" # Analyze series info only when job is finished series = "" if series_info: seriesname, season, episode, _ = sabnzbd.newsunpack.analyse_show( nzo.final_name) if seriesname and season and episode: series = "%s/%s/%s" % (seriesname.lower(), season, episode) return ( completed, nzo.final_name, nzo.filename, nzo.cat, pp, nzo.script, report, nzo.url, nzo.status, nzo.nzo_id, clip_path(workdir_complete), clip_path(nzo.downpath), script_output, script_line, download_time, postproc_time, stage_log, nzo.bytes_downloaded, nzo.fail_msg, url_info, nzo.bytes_downloaded, series, nzo.md5sum, nzo.password, )
def nzbfile_parser(raw_data, nzo): # Load data as file-object raw_data = raw_data.replace("http://www.newzbin.com/DTD/2003/nzb", "", 1) nzb_tree = xml.etree.ElementTree.fromstring(raw_data) # Hash for dupe-checking md5sum = hashlib.md5() # Average date avg_age_sum = 0 # In case of failing timestamps and failing files time_now = time.time() skipped_files = 0 valid_files = 0 # Parse the header if nzb_tree.find("head"): for meta in nzb_tree.find("head").iter("meta"): meta_type = meta.attrib.get("type") if meta_type and meta.text: # Meta tags can occur multiple times if meta_type not in nzo.meta: nzo.meta[meta_type] = [] nzo.meta[meta_type].append(meta.text) logging.debug("NZB file meta-data = %s", nzo.meta) # Parse the files for file in nzb_tree.iter("file"): # Get subject and date file_name = "" if file.attrib.get("subject"): file_name = file.attrib.get("subject") # Don't fail if no date present try: file_date = datetime.datetime.fromtimestamp( int(file.attrib.get("date"))) file_timestamp = int(file.attrib.get("date")) except: file_date = datetime.datetime.fromtimestamp(time_now) file_timestamp = time_now # Get group for group in file.iter("group"): if group.text not in nzo.groups: nzo.groups.append(group.text) # Get segments raw_article_db = {} file_bytes = 0 if file.find("segments"): for segment in file.find("segments").iter("segment"): try: article_id = segment.text segment_size = int(segment.attrib.get("bytes")) partnum = int(segment.attrib.get("number")) # Update hash md5sum.update(utob(article_id)) # Duplicate parts? if partnum in raw_article_db: if article_id != raw_article_db[partnum][0]: logging.info( "Duplicate part %s, but different ID-s (%s // %s)", partnum, raw_article_db[partnum][0], article_id, ) nzo.increase_bad_articles_counter( "duplicate_articles") else: logging.info("Skipping duplicate article (%s)", article_id) elif segment_size <= 0 or segment_size >= 2**23: # Perform sanity check (not negative, 0 or larger than 8MB) on article size # We use this value later to allocate memory in cache and sabyenc logging.info( "Skipping article %s due to strange size (%s)", article_id, segment_size) nzo.increase_bad_articles_counter("bad_articles") else: raw_article_db[partnum] = (article_id, segment_size) file_bytes += segment_size except: # In case of missing attributes pass # Sort the articles by part number, compatible with Python 3.5 raw_article_db_sorted = [ raw_article_db[partnum] for partnum in sorted(raw_article_db) ] # Create NZF nzf = sabnzbd.nzbstuff.NzbFile(file_date, file_name, raw_article_db_sorted, file_bytes, nzo) # Check if we already have this exact NZF (see custom eq-checks) if nzf in nzo.files: logging.info("File %s occured twice in NZB, skipping", nzf.filename) continue # Add valid NZF's if file_name and nzf.valid and nzf.nzf_id: logging.info("File %s added to queue", nzf.filename) nzo.files.append(nzf) nzo.files_table[nzf.nzf_id] = nzf nzo.bytes += nzf.bytes valid_files += 1 avg_age_sum += file_timestamp else: logging.info("Error importing %s, skipping", file_name) if nzf.nzf_id: sabnzbd.remove_data(nzf.nzf_id, nzo.admin_path) skipped_files += 1 # Final bookkeeping nr_files = max(1, valid_files) nzo.avg_stamp = avg_age_sum / nr_files nzo.avg_date = datetime.datetime.fromtimestamp(avg_age_sum / nr_files) nzo.md5sum = md5sum.hexdigest() if skipped_files: logging.warning(T("Failed to import %s files from %s"), skipped_files, nzo.filename)
def send_group(self, group): self.timeout = time.time() + self.server.timeout command = utob("GROUP %s\r\n" % (group)) self.nntp.sock.sendall(command) self.data = []
def send_group(self, group: str): """ Send the NNTP GROUP command """ self.timeout = time.time() + self.server.timeout command = utob("GROUP %s\r\n" % group) self.nntp.sock.sendall(command) self.data = []