예제 #1
0
    def repair_job(self, repair_folder, new_nzb=None, password=None):
        """ Reconstruct admin for a single job folder, optionally with new NZB """
        # Check if folder exists
        if not repair_folder or not os.path.exists(repair_folder):
            return None

        name = os.path.basename(repair_folder)
        admin_path = os.path.join(repair_folder, JOB_ADMIN)

        # If Retry was used and a new NZB was uploaded
        if getattr(new_nzb, "filename", None):
            remove_all(admin_path, "*.gz", keep_folder=True)
            logging.debug("Repair job %s with new NZB (%s)", name,
                          new_nzb.filename)
            _, nzo_ids = sabnzbd.add_nzbfile(new_nzb,
                                             nzbname=name,
                                             reuse=repair_folder,
                                             password=password)
            nzo_id = nzo_ids[0]
        else:
            # Was this file already post-processed?
            verified = sabnzbd.load_data(VERIFIED_FILE,
                                         admin_path,
                                         remove=False)
            filenames = []
            if not verified or not all(verified[x] for x in verified):
                filenames = globber_full(admin_path, "*.gz")

            if filenames:
                logging.debug("Repair job %s by re-parsing stored NZB", name)
                _, nzo_ids = sabnzbd.add_nzbfile(filenames[0],
                                                 nzbname=name,
                                                 reuse=repair_folder,
                                                 password=password)
                nzo_id = nzo_ids[0]
            else:
                logging.debug("Repair job %s without stored NZB", name)
                nzo = NzbObject(name, nzbname=name, reuse=repair_folder)
                nzo.password = password
                self.add(nzo)
                nzo_id = nzo.nzo_id

        return nzo_id
예제 #2
0
    def abort(self):
        """ Abort running instance and delete generated files """
        if not self.killed and self.cur_setname:
            logging.info("Aborting DirectUnpack for %s", self.cur_setname)
            self.killed = True

            # Save reference to the first rarfile
            rarfile_nzf = self.rarfile_nzf

            # Abort Unrar
            if self.active_instance:
                # First we try to abort gracefully
                try:
                    self.active_instance.stdin.write(b"Q\n")
                    time.sleep(0.2)
                except IOError:
                    pass

                # Now force kill and give it a bit of time
                try:
                    self.active_instance.kill()
                    time.sleep(0.2)
                except AttributeError:
                    # Already killed by the Quit command
                    pass

            # Wake up the thread
            with self.next_file_lock:
                self.next_file_lock.notify()

            # No new sets
            self.next_sets = []
            self.success_sets = {}

            # Remove files
            if self.unpack_dir_info:
                extraction_path, _, _, one_folder, _ = self.unpack_dir_info
                # In case of flat-unpack we need to remove the files manually
                if one_folder:
                    # RarFile can fail for mysterious reasons
                    try:
                        rar_contents = RarFile(
                            os.path.join(self.nzo.downpath,
                                         rarfile_nzf.filename),
                            single_file_check=True).filelist()
                        for rm_file in rar_contents:
                            # Flat-unpack, so remove foldername from RarFile output
                            f = os.path.join(extraction_path,
                                             os.path.basename(rm_file))
                            remove_file(f)
                    except:
                        # The user will have to remove it themselves
                        logging.info(
                            "Failed to clean Direct Unpack after aborting %s",
                            rarfile_nzf.filename,
                            exc_info=True)
                else:
                    # We can just remove the whole path
                    remove_all(extraction_path, recursive=True)
                # Remove dir-info
                self.unpack_dir_info = None

            # Reset settings
            self.reset_active()
예제 #3
0
def initialize(pause_downloader=False, clean_up=False, evalSched=False, repair=0):
    global __INITIALIZED__, __SHUTTING_DOWN__, LOGFILE, WEBLOGFILE, LOGHANDLER, GUIHANDLER, AMBI_LOCALHOST, WAITEXIT, DAEMON, MY_NAME, MY_FULLNAME, NEW_VERSION, DIR_HOME, DIR_APPDATA, DIR_LCLDATA, DIR_PROG, DIR_INTERFACES, DARWIN, RESTART_REQ

    if __INITIALIZED__:
        return False

    __SHUTTING_DOWN__ = False

    # Set global database connection for Web-UI threads
    cherrypy.engine.subscribe("start_thread", get_db_connection)

    # Paused?
    pause_downloader = pause_downloader or cfg.start_paused()

    # Clean-up, if requested
    if clean_up:
        # New admin folder
        filesystem.remove_all(cfg.admin_dir.get_path(), "*.sab")

    # Optionally wait for "incomplete" to become online
    if cfg.wait_for_dfolder():
        wait_for_download_folder()
    else:
        cfg.download_dir.set(cfg.download_dir(), create=True)
    cfg.download_dir.set_create(True)

    # Set access rights for "incomplete" base folder
    filesystem.set_permissions(cfg.download_dir.get_path(), recursive=False)

    # If dirscan_dir cannot be created, set a proper value anyway.
    # Maybe it's a network path that's temporarily missing.
    path = cfg.dirscan_dir.get_path()
    if not os.path.exists(path):
        filesystem.create_real_path(cfg.dirscan_dir.ident(), "", path, False)

    # Set call backs for Config items
    cfg.cache_limit.callback(new_limit)
    cfg.cherryhost.callback(guard_restart)
    cfg.cherryport.callback(guard_restart)
    cfg.web_dir.callback(guard_restart)
    cfg.web_color.callback(guard_restart)
    cfg.username.callback(guard_restart)
    cfg.password.callback(guard_restart)
    cfg.log_dir.callback(guard_restart)
    cfg.https_port.callback(guard_restart)
    cfg.https_cert.callback(guard_restart)
    cfg.https_key.callback(guard_restart)
    cfg.enable_https.callback(guard_restart)
    cfg.top_only.callback(guard_top_only)
    cfg.pause_on_post_processing.callback(guard_pause_on_pp)
    cfg.quota_size.callback(guard_quota_size)
    cfg.quota_day.callback(guard_quota_dp)
    cfg.quota_period.callback(guard_quota_dp)
    cfg.language.callback(guard_language)
    cfg.enable_https_verification.callback(guard_https_ver)
    guard_https_ver()

    # Set cache limit
    if not cfg.cache_limit() or (cfg.cache_limit() in ("200M", "450M") and (sabnzbd.WIN32 or sabnzbd.DARWIN)):
        cfg.cache_limit.set(misc.get_cache_limit())
    ArticleCache.do.new_limit(cfg.cache_limit.get_int())

    check_incomplete_vs_complete()

    # Set language files
    lang.set_locale_info("SABnzbd", DIR_LANGUAGE)
    lang.set_language(cfg.language())
    sabnzbd.api.clear_trans_cache()

    sabnzbd.change_queue_complete_action(cfg.queue_complete(), new=False)

    # One time conversion "speedlimit" in schedules.
    if not cfg.sched_converted():
        schedules = cfg.schedules()
        newsched = []
        for sched in schedules:
            if "speedlimit" in sched:
                newsched.append(re.sub(r"(speedlimit \d+)$", r"\1K", sched))
            else:
                newsched.append(sched)
        cfg.schedules.set(newsched)
        cfg.sched_converted.set(1)

    # Second time schedule conversion
    if cfg.sched_converted() != 2:
        cfg.schedules.set(["%s %s" % (1, schedule) for schedule in cfg.schedules()])
        cfg.sched_converted.set(2)
        config.save_config()

    # Convert auto-sort
    if cfg.auto_sort() == "0":
        cfg.auto_sort.set("")
    elif cfg.auto_sort() == "1":
        cfg.auto_sort.set("avg_age asc")

    # Add hostname to the whitelist
    if not cfg.host_whitelist():
        cfg.host_whitelist.set(socket.gethostname())

    # Do repair if requested
    if check_repair_request():
        repair = 2
        pause_downloader = True

    # Initialize threads
    rss.init()

    paused = BPSMeter.do.read()

    NzbQueue()

    Downloader(pause_downloader or paused)

    Decoder()

    Assembler()

    PostProcessor()

    NzbQueue.do.read_queue(repair)

    DirScanner()

    Rating()

    URLGrabber()

    scheduler.init()

    if evalSched:
        scheduler.analyse(pause_downloader)

    logging.info("All processes started")
    RESTART_REQ = False
    __INITIALIZED__ = True
    return True
예제 #4
0
def initialize(pause_downloader=False, clean_up=False, repair=0):
    if sabnzbd.__INITIALIZED__:
        return False

    sabnzbd.__SHUTTING_DOWN__ = False

    # Set global database connection for Web-UI threads
    cherrypy.engine.subscribe("start_thread", get_db_connection)

    # Paused?
    pause_downloader = pause_downloader or cfg.start_paused()

    # Clean-up, if requested
    if clean_up:
        # New admin folder
        filesystem.remove_all(cfg.admin_dir.get_path(), "*.sab")

    # Optionally wait for "incomplete" to become online
    if cfg.wait_for_dfolder():
        wait_for_download_folder()
    else:
        cfg.download_dir.set(cfg.download_dir(), create=True)
    cfg.download_dir.set_create(True)

    # Set access rights for "incomplete" base folder
    filesystem.set_permissions(cfg.download_dir.get_path(), recursive=False)

    # If dirscan_dir cannot be created, set a proper value anyway.
    # Maybe it's a network path that's temporarily missing.
    path = cfg.dirscan_dir.get_path()
    if not os.path.exists(path):
        filesystem.create_real_path(cfg.dirscan_dir.ident(), "", path, False)

    # Set call backs for Config items
    cfg.cache_limit.callback(new_limit)
    cfg.cherryhost.callback(guard_restart)
    cfg.cherryport.callback(guard_restart)
    cfg.web_dir.callback(guard_restart)
    cfg.web_color.callback(guard_restart)
    cfg.username.callback(guard_restart)
    cfg.password.callback(guard_restart)
    cfg.log_dir.callback(guard_restart)
    cfg.https_port.callback(guard_restart)
    cfg.https_cert.callback(guard_restart)
    cfg.https_key.callback(guard_restart)
    cfg.enable_https.callback(guard_restart)
    cfg.top_only.callback(guard_top_only)
    cfg.pause_on_post_processing.callback(guard_pause_on_pp)
    cfg.quota_size.callback(guard_quota_size)
    cfg.quota_day.callback(guard_quota_dp)
    cfg.quota_period.callback(guard_quota_dp)
    cfg.language.callback(guard_language)
    cfg.enable_https_verification.callback(guard_https_ver)
    guard_https_ver()

    check_incomplete_vs_complete()

    # Set language files
    lang.set_locale_info("SABnzbd", DIR_LANGUAGE)
    lang.set_language(cfg.language())
    sabnzbd.api.clear_trans_cache()

    # Set end-of-queue action
    sabnzbd.change_queue_complete_action(cfg.queue_complete(), new=False)

    # Convert auto-sort
    if cfg.auto_sort() == "0":
        cfg.auto_sort.set("")
    elif cfg.auto_sort() == "1":
        cfg.auto_sort.set("avg_age asc")

    # Add hostname to the whitelist
    if not cfg.host_whitelist():
        cfg.host_whitelist.set(socket.gethostname())

    # Do repair if requested
    if check_repair_request():
        repair = 2
        pause_downloader = True

    # Initialize threads
    sabnzbd.ArticleCache = sabnzbd.articlecache.ArticleCache()
    sabnzbd.BPSMeter = sabnzbd.bpsmeter.BPSMeter()
    sabnzbd.NzbQueue = sabnzbd.nzbqueue.NzbQueue()
    sabnzbd.Downloader = sabnzbd.downloader.Downloader(sabnzbd.BPSMeter.read() or pause_downloader)
    sabnzbd.Decoder = sabnzbd.decoder.Decoder()
    sabnzbd.Assembler = sabnzbd.assembler.Assembler()
    sabnzbd.PostProcessor = sabnzbd.postproc.PostProcessor()
    sabnzbd.DirScanner = sabnzbd.dirscanner.DirScanner()
    sabnzbd.Rating = sabnzbd.rating.Rating()
    sabnzbd.URLGrabber = sabnzbd.urlgrabber.URLGrabber()
    sabnzbd.RSSReader = sabnzbd.rss.RSSReader()
    sabnzbd.Scheduler = sabnzbd.scheduler.Scheduler()

    # Run startup tasks
    sabnzbd.NzbQueue.read_queue(repair)
    sabnzbd.Scheduler.analyse(pause_downloader)

    # Set cache limit for new users
    if not cfg.cache_limit():
        cfg.cache_limit.set(misc.get_cache_limit())
    sabnzbd.ArticleCache.new_limit(cfg.cache_limit.get_int())

    logging.info("All processes started")
    sabnzbd.RESTART_REQ = False
    sabnzbd.__INITIALIZED__ = True