def assemble(nzf, file_done): """Assemble a NZF from its table of articles 1) Partial write: write what we have 2) Nothing written before: write all """ # New hash-object needed? if not nzf.md5: nzf.md5 = hashlib.md5() with open(nzf.filepath, "ab") as fout: for article in nzf.decodetable: # Break if deleted during writing if nzf.nzo.status is Status.DELETED: break # Skip already written articles if article.on_disk: continue # Write all decoded articles if article.decoded: data = ArticleCache.do.load_article(article) # Could be empty in case nzo was deleted if data: fout.write(data) nzf.md5.update(data) article.on_disk = True else: logging.info("No data found when trying to write %s", article) else: # If the article was not decoded but the file # is done, it is just a missing piece, so keep writing if file_done: continue else: # We reach an article that was not decoded break # Final steps if file_done: set_permissions(nzf.filepath) nzf.md5sum = nzf.md5.digest()
def _runner(self, perms_test, perms_after): """ Generic test runner for permissions testing. The umask is set per test via the relevant sab config option; the fileystem parameter in setUp(). Note that the umask set in the environment before starting the program also affects the results if sabnzbd.cfg.umask isn't set. Arguments: str perms_test: permissions for test objects, chmod style "0755". str perms_after: expected permissions after completion of the test. """ perms_test = int(perms_test, 8) if sabnzbd.cfg.umask(): perms_after = int(perms_after, 8) else: perms_after = int("0777", 8) & (sabnzbd.ORG_UMASK ^ int("0777", 8)) # Setup and verify fake dir test_dir = "/test" try: self.fs.create_dir(test_dir, perms_test) except PermissionError: ffs.set_uid(0) self.fs.create_dir(test_dir, perms_test) assert os.path.exists(test_dir) is True assert stat.filemode(os.stat(test_dir).st_mode) == "d" + stat.filemode(perms_test)[1:] # Setup and verify fake files for file in ( "foobar", "file.ext", "sub/dir/.nzb", "another/sub/dir/WithSome.File", ): file = os.path.join(test_dir, file) try: self.fs.create_file(file, perms_test) except PermissionError: try: ffs.set_uid(0) self.fs.create_file(file, perms_test) except Exception: # Skip creating files, if not even using root gets the job done. break assert os.path.exists(file) is True assert stat.filemode(os.stat(file).st_mode)[1:] == stat.filemode(perms_test)[1:] # Set permissions, recursive by default filesystem.set_permissions(test_dir) # Check the results for root, dirs, files in os.walk(test_dir): for dir in [os.path.join(root, d) for d in dirs]: # Permissions on directories should now match perms_after assert stat.filemode(os.stat(dir).st_mode) == "d" + stat.filemode(perms_after)[1:] for file in [os.path.join(root, f) for f in files]: # Files also shouldn't have any executable or special bits set assert ( stat.filemode(os.stat(file).st_mode)[1:] == stat.filemode( perms_after & ~(stat.S_ISUID | stat.S_ISGID | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) )[1:] ) # Cleanup ffs.set_uid(0) self.fs.remove_object(test_dir) assert os.path.exists(test_dir) is False ffs.set_uid(global_uid)
def test_win32(self): # Should not do or return anything on Windows assert filesystem.set_permissions(r"F:\who\cares", recursive=False) is None
def process_job(nzo): """ Process one job """ start = time.time() # keep track of whether we can continue all_ok = True # keep track of par problems par_error = False # keep track of any unpacking errors unpack_error = False # Signal empty download, for when 'empty_postproc' is enabled empty = False nzb_list = [] # These need to be initialized in case of a crash workdir_complete = "" script_log = "" script_line = "" # Get the job flags nzo.save_attribs() flag_repair, flag_unpack, flag_delete = nzo.repair_opts # Normalize PP if flag_delete: flag_unpack = True if flag_unpack: flag_repair = True # Get the NZB name filename = nzo.final_name # Download-processes can mark job as failed if nzo.fail_msg: nzo.status = Status.FAILED nzo.save_attribs() all_ok = False par_error = True unpack_error = 1 try: # Get the folder containing the download result workdir = nzo.downpath tmp_workdir_complete = None # if no files are present (except __admin__), fail the job if all_ok and len(globber(workdir)) < 2: if nzo.precheck: _, ratio = nzo.check_availability_ratio() emsg = T( "Download might fail, only %s of required %s available" ) % (ratio, cfg.req_completion_rate()) else: emsg = T("Download failed - Not on your server(s)") empty = True emsg += " - https://sabnzbd.org/not-complete" nzo.fail_msg = emsg nzo.set_unpack_info("Fail", emsg) nzo.status = Status.FAILED # do not run unpacking or parity verification flag_repair = flag_unpack = False all_ok = cfg.empty_postproc() and empty if not all_ok: par_error = True unpack_error = 1 script = nzo.script logging.info( "Starting Post-Processing on %s => Repair:%s, Unpack:%s, Delete:%s, Script:%s, Cat:%s", filename, flag_repair, flag_unpack, flag_delete, script, nzo.cat, ) # Set complete dir to workdir in case we need to abort workdir_complete = workdir # Send post-processing notification notifier.send_notification(T("Post-processing"), nzo.final_name, "pp", nzo.cat) # Par processing, if enabled if all_ok and flag_repair: par_error, re_add = parring(nzo, workdir) if re_add: # Try to get more par files return False # If we don't need extra par2, we can disconnect if sabnzbd.nzbqueue.NzbQueue.do.actives( grabs=False) == 0 and cfg.autodisconnect(): # This was the last job, close server connections sabnzbd.downloader.Downloader.do.disconnect() # Sanitize the resulting files if sabnzbd.WIN32: sanitize_files_in_folder(workdir) # Check if user allows unsafe post-processing if flag_repair and cfg.safe_postproc(): all_ok = all_ok and not par_error if all_ok: # Fix encodings fix_unix_encoding(workdir) # Use dirs generated by direct-unpacker if nzo.direct_unpacker and nzo.direct_unpacker.unpack_dir_info: ( tmp_workdir_complete, workdir_complete, file_sorter, one_folder, marker_file, ) = nzo.direct_unpacker.unpack_dir_info else: # Generate extraction path tmp_workdir_complete, workdir_complete, file_sorter, one_folder, marker_file = prepare_extraction_path( nzo) newfiles = [] # Run Stage 2: Unpack if flag_unpack: # Set the current nzo status to "Extracting...". Used in History nzo.status = Status.EXTRACTING logging.info("Running unpack_magic on %s", filename) unpack_error, newfiles = unpack_magic(nzo, workdir, tmp_workdir_complete, flag_delete, one_folder, (), (), (), (), ()) logging.info("Unpacked files %s", newfiles) if sabnzbd.WIN32: # Sanitize the resulting files newfiles = sanitize_files_in_folder(tmp_workdir_complete) logging.info("Finished unpack_magic on %s", filename) if cfg.safe_postproc(): all_ok = all_ok and not unpack_error if all_ok: # Move any (left-over) files to destination nzo.status = Status.MOVING nzo.set_action_line(T("Moving"), "...") for root, _dirs, files in os.walk(workdir): if not root.endswith(JOB_ADMIN): for file_ in files: path = os.path.join(root, file_) new_path = path.replace(workdir, tmp_workdir_complete) ok, new_path = move_to_path(path, new_path) if new_path: newfiles.append(new_path) if not ok: nzo.set_unpack_info( "Unpack", T("Failed moving %s to %s") % (path, new_path)) all_ok = False break # Set permissions right set_permissions(tmp_workdir_complete) if all_ok and marker_file: del_marker(os.path.join(tmp_workdir_complete, marker_file)) remove_from_list(marker_file, newfiles) if all_ok: # Remove files matching the cleanup list cleanup_list(tmp_workdir_complete, skip_nzb=True) # Check if this is an NZB-only download, if so redirect to queue # except when PP was Download-only if flag_repair: nzb_list = nzb_redirect(tmp_workdir_complete, nzo.final_name, nzo.pp, script, nzo.cat, nzo.priority) else: nzb_list = None if nzb_list: nzo.set_unpack_info("Download", T("Sent %s to queue") % nzb_list) cleanup_empty_directories(tmp_workdir_complete) else: # Full cleanup including nzb's cleanup_list(tmp_workdir_complete, skip_nzb=False) script_output = "" script_ret = 0 if not nzb_list: # Give destination its final name if cfg.folder_rename() and tmp_workdir_complete and not one_folder: if not all_ok: # Rename failed folders so they are easy to recognize workdir_complete = tmp_workdir_complete.replace( "_UNPACK_", "_FAILED_") workdir_complete = get_unique_path(workdir_complete, create_dir=False) try: newfiles = rename_and_collapse_folder( tmp_workdir_complete, workdir_complete, newfiles) except: logging.error( T('Error renaming "%s" to "%s"'), clip_path(tmp_workdir_complete), clip_path(workdir_complete), ) logging.info("Traceback: ", exc_info=True) # Better disable sorting because filenames are all off now file_sorter.sort_file = None if empty: job_result = -1 else: job_result = int(par_error) + int(bool(unpack_error)) * 2 if cfg.ignore_samples(): remove_samples(workdir_complete) # TV/Movie/Date Renaming code part 2 - rename and move files to parent folder if all_ok and file_sorter.sort_file: if newfiles: file_sorter.rename(newfiles, workdir_complete) workdir_complete, ok = file_sorter.move(workdir_complete) else: workdir_complete, ok = file_sorter.rename_with_ext( workdir_complete) if not ok: nzo.set_unpack_info("Unpack", T("Failed to move files")) all_ok = False if cfg.deobfuscate_final_filenames() and all_ok and not nzb_list: # deobfuscate the filenames logging.info("Running deobfuscate") deobfuscate.deobfuscate_list(newfiles, nzo.final_name) # Run the user script script_path = make_script_path(script) if (all_ok or not cfg.safe_postproc()) and ( not nzb_list) and script_path: # Set the current nzo status to "Ext Script...". Used in History nzo.status = Status.RUNNING nzo.set_action_line(T("Running script"), script) nzo.set_unpack_info("Script", T("Running user script %s") % script, unique=True) script_log, script_ret = external_processing( script_path, nzo, clip_path(workdir_complete), nzo.final_name, job_result) script_line = get_last_line(script_log) if script_log: script_output = nzo.nzo_id if script_line: nzo.set_unpack_info("Script", script_line, unique=True) else: nzo.set_unpack_info("Script", T("Ran %s") % script, unique=True) else: script = "" script_line = "" script_ret = 0 # Maybe bad script result should fail job if script_ret and cfg.script_can_fail(): script_error = True all_ok = False nzo.fail_msg = T("Script exit code is %s") % script_ret else: script_error = False # Email the results if (not nzb_list) and cfg.email_endjob(): if (cfg.email_endjob() == 1) or (cfg.email_endjob() == 2 and (unpack_error or par_error or script_error)): emailer.endjob( nzo.final_name, nzo.cat, all_ok, workdir_complete, nzo.bytes_downloaded, nzo.fail_msg, nzo.unpack_info, script, script_log, script_ret, ) if script_output: # Can do this only now, otherwise it would show up in the email if script_ret: script_ret = "Exit(%s) " % script_ret else: script_ret = "" if len(script_log.rstrip().split("\n")) > 1: nzo.set_unpack_info( "Script", '%s%s <a href="./scriptlog?name=%s">(%s)</a>' % (script_ret, script_line, encoding.xml_name(script_output), T("More")), unique=True, ) else: # No '(more)' button needed nzo.set_unpack_info("Script", "%s%s " % (script_ret, script_line), unique=True) # Cleanup again, including NZB files if all_ok: cleanup_list(workdir_complete, False) # Force error for empty result all_ok = all_ok and not empty # Update indexer with results if cfg.rating_enable(): if nzo.encrypted > 0: Rating.do.update_auto_flag(nzo.nzo_id, Rating.FLAG_ENCRYPTED) if empty: hosts = [ s.host for s in sabnzbd.downloader.Downloader.do.nzo_servers(nzo) ] if not hosts: hosts = [None] for host in hosts: Rating.do.update_auto_flag(nzo.nzo_id, Rating.FLAG_EXPIRED, host) except: logging.error(T("Post Processing Failed for %s (%s)"), filename, T("see logfile")) logging.info("Traceback: ", exc_info=True) nzo.fail_msg = T("PostProcessing was aborted (%s)") % T("see logfile") notifier.send_notification(T("Download Failed"), filename, "failed", nzo.cat) nzo.status = Status.FAILED par_error = True all_ok = False if cfg.email_endjob(): emailer.endjob( nzo.final_name, nzo.cat, all_ok, clip_path(workdir_complete), nzo.bytes_downloaded, nzo.fail_msg, nzo.unpack_info, "", "", 0, ) if all_ok: # If the folder only contains one file OR folder, have that as the path # Be aware that series/generic/date sorting may move a single file into a folder containing other files workdir_complete = one_file_or_folder(workdir_complete) workdir_complete = os.path.normpath(workdir_complete) # Clean up the NZO data try: nzo.purge_data(delete_all_data=all_ok) except: logging.error(T("Cleanup of %s failed."), nzo.final_name) logging.info("Traceback: ", exc_info=True) # Use automatic retry link on par2 errors and encrypted/bad RARs if par_error or unpack_error in (2, 3): try_alt_nzb(nzo) # Show final status in history if all_ok: notifier.send_notification(T("Download Completed"), filename, "complete", nzo.cat) nzo.status = Status.COMPLETED else: notifier.send_notification(T("Download Failed"), filename, "failed", nzo.cat) nzo.status = Status.FAILED # Log the overall time taken for postprocessing postproc_time = int(time.time() - start) # Create the history DB instance history_db = database.HistoryDB() # Add the nzo to the database. Only the path, script and time taken is passed # Other information is obtained from the nzo history_db.add_history_db(nzo, workdir_complete, postproc_time, script_log, script_line) # Purge items history_db.auto_history_purge() # The connection is only used once, so close it here history_db.close() sabnzbd.history_updated() return True
def initialize(pause_downloader=False, clean_up=False, evalSched=False, repair=0): global __INITIALIZED__, __SHUTTING_DOWN__, LOGFILE, WEBLOGFILE, LOGHANDLER, GUIHANDLER, AMBI_LOCALHOST, WAITEXIT, DAEMON, MY_NAME, MY_FULLNAME, NEW_VERSION, DIR_HOME, DIR_APPDATA, DIR_LCLDATA, DIR_PROG, DIR_INTERFACES, DARWIN, RESTART_REQ if __INITIALIZED__: return False __SHUTTING_DOWN__ = False # Set global database connection for Web-UI threads cherrypy.engine.subscribe("start_thread", get_db_connection) # Paused? pause_downloader = pause_downloader or cfg.start_paused() # Clean-up, if requested if clean_up: # New admin folder filesystem.remove_all(cfg.admin_dir.get_path(), "*.sab") # Optionally wait for "incomplete" to become online if cfg.wait_for_dfolder(): wait_for_download_folder() else: cfg.download_dir.set(cfg.download_dir(), create=True) cfg.download_dir.set_create(True) # Set access rights for "incomplete" base folder filesystem.set_permissions(cfg.download_dir.get_path(), recursive=False) # If dirscan_dir cannot be created, set a proper value anyway. # Maybe it's a network path that's temporarily missing. path = cfg.dirscan_dir.get_path() if not os.path.exists(path): filesystem.create_real_path(cfg.dirscan_dir.ident(), "", path, False) # Set call backs for Config items cfg.cache_limit.callback(new_limit) cfg.cherryhost.callback(guard_restart) cfg.cherryport.callback(guard_restart) cfg.web_dir.callback(guard_restart) cfg.web_color.callback(guard_restart) cfg.username.callback(guard_restart) cfg.password.callback(guard_restart) cfg.log_dir.callback(guard_restart) cfg.https_port.callback(guard_restart) cfg.https_cert.callback(guard_restart) cfg.https_key.callback(guard_restart) cfg.enable_https.callback(guard_restart) cfg.top_only.callback(guard_top_only) cfg.pause_on_post_processing.callback(guard_pause_on_pp) cfg.quota_size.callback(guard_quota_size) cfg.quota_day.callback(guard_quota_dp) cfg.quota_period.callback(guard_quota_dp) cfg.language.callback(guard_language) cfg.enable_https_verification.callback(guard_https_ver) guard_https_ver() # Set cache limit if not cfg.cache_limit() or (cfg.cache_limit() in ("200M", "450M") and (sabnzbd.WIN32 or sabnzbd.DARWIN)): cfg.cache_limit.set(misc.get_cache_limit()) ArticleCache.do.new_limit(cfg.cache_limit.get_int()) check_incomplete_vs_complete() # Set language files lang.set_locale_info("SABnzbd", DIR_LANGUAGE) lang.set_language(cfg.language()) sabnzbd.api.clear_trans_cache() sabnzbd.change_queue_complete_action(cfg.queue_complete(), new=False) # One time conversion "speedlimit" in schedules. if not cfg.sched_converted(): schedules = cfg.schedules() newsched = [] for sched in schedules: if "speedlimit" in sched: newsched.append(re.sub(r"(speedlimit \d+)$", r"\1K", sched)) else: newsched.append(sched) cfg.schedules.set(newsched) cfg.sched_converted.set(1) # Second time schedule conversion if cfg.sched_converted() != 2: cfg.schedules.set(["%s %s" % (1, schedule) for schedule in cfg.schedules()]) cfg.sched_converted.set(2) config.save_config() # Convert auto-sort if cfg.auto_sort() == "0": cfg.auto_sort.set("") elif cfg.auto_sort() == "1": cfg.auto_sort.set("avg_age asc") # Add hostname to the whitelist if not cfg.host_whitelist(): cfg.host_whitelist.set(socket.gethostname()) # Do repair if requested if check_repair_request(): repair = 2 pause_downloader = True # Initialize threads rss.init() paused = BPSMeter.do.read() NzbQueue() Downloader(pause_downloader or paused) Decoder() Assembler() PostProcessor() NzbQueue.do.read_queue(repair) DirScanner() Rating() URLGrabber() scheduler.init() if evalSched: scheduler.analyse(pause_downloader) logging.info("All processes started") RESTART_REQ = False __INITIALIZED__ = True return True
def initialize(pause_downloader=False, clean_up=False, repair=0): if sabnzbd.__INITIALIZED__: return False sabnzbd.__SHUTTING_DOWN__ = False # Set global database connection for Web-UI threads cherrypy.engine.subscribe("start_thread", get_db_connection) # Paused? pause_downloader = pause_downloader or cfg.start_paused() # Clean-up, if requested if clean_up: # New admin folder filesystem.remove_all(cfg.admin_dir.get_path(), "*.sab") # Optionally wait for "incomplete" to become online if cfg.wait_for_dfolder(): wait_for_download_folder() else: cfg.download_dir.set(cfg.download_dir(), create=True) cfg.download_dir.set_create(True) # Set access rights for "incomplete" base folder filesystem.set_permissions(cfg.download_dir.get_path(), recursive=False) # If dirscan_dir cannot be created, set a proper value anyway. # Maybe it's a network path that's temporarily missing. path = cfg.dirscan_dir.get_path() if not os.path.exists(path): filesystem.create_real_path(cfg.dirscan_dir.ident(), "", path, False) # Set call backs for Config items cfg.cache_limit.callback(new_limit) cfg.cherryhost.callback(guard_restart) cfg.cherryport.callback(guard_restart) cfg.web_dir.callback(guard_restart) cfg.web_color.callback(guard_restart) cfg.username.callback(guard_restart) cfg.password.callback(guard_restart) cfg.log_dir.callback(guard_restart) cfg.https_port.callback(guard_restart) cfg.https_cert.callback(guard_restart) cfg.https_key.callback(guard_restart) cfg.enable_https.callback(guard_restart) cfg.top_only.callback(guard_top_only) cfg.pause_on_post_processing.callback(guard_pause_on_pp) cfg.quota_size.callback(guard_quota_size) cfg.quota_day.callback(guard_quota_dp) cfg.quota_period.callback(guard_quota_dp) cfg.language.callback(guard_language) cfg.enable_https_verification.callback(guard_https_ver) guard_https_ver() check_incomplete_vs_complete() # Set language files lang.set_locale_info("SABnzbd", DIR_LANGUAGE) lang.set_language(cfg.language()) sabnzbd.api.clear_trans_cache() # Set end-of-queue action sabnzbd.change_queue_complete_action(cfg.queue_complete(), new=False) # Convert auto-sort if cfg.auto_sort() == "0": cfg.auto_sort.set("") elif cfg.auto_sort() == "1": cfg.auto_sort.set("avg_age asc") # Add hostname to the whitelist if not cfg.host_whitelist(): cfg.host_whitelist.set(socket.gethostname()) # Do repair if requested if check_repair_request(): repair = 2 pause_downloader = True # Initialize threads sabnzbd.ArticleCache = sabnzbd.articlecache.ArticleCache() sabnzbd.BPSMeter = sabnzbd.bpsmeter.BPSMeter() sabnzbd.NzbQueue = sabnzbd.nzbqueue.NzbQueue() sabnzbd.Downloader = sabnzbd.downloader.Downloader(sabnzbd.BPSMeter.read() or pause_downloader) sabnzbd.Decoder = sabnzbd.decoder.Decoder() sabnzbd.Assembler = sabnzbd.assembler.Assembler() sabnzbd.PostProcessor = sabnzbd.postproc.PostProcessor() sabnzbd.DirScanner = sabnzbd.dirscanner.DirScanner() sabnzbd.Rating = sabnzbd.rating.Rating() sabnzbd.URLGrabber = sabnzbd.urlgrabber.URLGrabber() sabnzbd.RSSReader = sabnzbd.rss.RSSReader() sabnzbd.Scheduler = sabnzbd.scheduler.Scheduler() # Run startup tasks sabnzbd.NzbQueue.read_queue(repair) sabnzbd.Scheduler.analyse(pause_downloader) # Set cache limit for new users if not cfg.cache_limit(): cfg.cache_limit.set(misc.get_cache_limit()) sabnzbd.ArticleCache.new_limit(cfg.cache_limit.get_int()) logging.info("All processes started") sabnzbd.RESTART_REQ = False sabnzbd.__INITIALIZED__ = True