Esempio n. 1
0
 def read(self):
     """ Read admin from disk, return True when pause is needed """
     res = False
     quota = self.left = cfg.quota_size.get_float()  # Quota for this period
     self.have_quota = bool(cfg.quota_size())
     data = sabnzbd.load_admin(BYTES_FILE_NAME)
     if not data:
         data = sabnzbd.load_admin(BYTES_FILE_NAME_OLD)
         data = fix_keys(data)
     try:
         self.last_update, self.grand_total, \
             self.day_total, self.week_total, self.month_total, \
             self.end_of_day, self.end_of_week, self.end_of_month = data[:8]
         if len(data) >= 11:
             self.quota, self.left, self.q_time = data[8:11]
             logging.debug('Read quota q=%s l=%s reset=%s',
                           self.quota, self.left, self.q_time)
             if abs(quota - self.quota) > 0.5:
                 self.change_quota()
             # Get timeline stats
             if len(data) == 12:
                 self.timeline_total = data[11]
         else:
             self.quota = self.left = cfg.quota_size.get_float()
         res = self.reset_quota()
     except:
         self.defaults()
     # Force update of counters and validate data
     try:
         for server in self.grand_total.keys():
             self.update(server)
     except TypeError:
         self.defaults()
         self.update()
     return res
Esempio n. 2
0
 def read(self):
     """ Read admin from disk, return True when pause is needed """
     res = False
     quota = self.left = cfg.quota_size.get_float()  # Quota for this period
     self.have_quota = bool(cfg.quota_size())
     data = sabnzbd.load_admin(BYTES_FILE_NAME)
     if not data:
         data = sabnzbd.load_admin(BYTES_FILE_NAME_OLD)
         data = fix_keys(data)
     try:
         self.last_update, self.grand_total, \
             self.day_total, self.week_total, self.month_total, \
             self.end_of_day, self.end_of_week, self.end_of_month = data[:8]
         if len(data) >= 11:
             self.quota, self.left, self.q_time = data[8:11]
             logging.debug('Read quota q=%s l=%s reset=%s', self.quota,
                           self.left, self.q_time)
             if abs(quota - self.quota) > 0.5:
                 self.change_quota()
             # Get timeline stats
             if len(data) == 12:
                 self.timeline_total = data[11]
         else:
             self.quota = self.left = cfg.quota_size.get_float()
         res = self.reset_quota()
     except:
         self.defaults()
     # Force update of counters and validate data
     try:
         for server in self.grand_total.keys():
             self.update(server)
     except TypeError:
         self.defaults()
         self.update()
     return res
Esempio n. 3
0
    def __init__(self):
        super().__init__()

        self.newdir()
        try:
            dirscan_dir, self.ignored, self.suspected = sabnzbd.load_admin(
                SCAN_FILE_NAME)
            if dirscan_dir != self.dirscan_dir:
                self.ignored = {}
                self.suspected = {}
        except:
            self.ignored = {}  # Will hold all unusable files and the
            # successfully processed ones that cannot be deleted
            self.suspected = {
            }  # Will hold name/attributes of suspected candidates

        self.loop_condition = threading.Condition(threading.Lock())
        self.shutdown = False
        self.error_reported = False  # Prevents multiple reporting of missing watched folder
        self.dirscan_dir = cfg.dirscan_dir.get_path()
        self.dirscan_speed = cfg.dirscan_speed(
        ) or None  # If set to 0, use None so the wait() is forever
        self.busy = False
        cfg.dirscan_dir.callback(self.newdir)
        cfg.dirscan_speed.callback(self.newspeed)
Esempio n. 4
0
 def read(self):
     """ Read admin from disk """
     quota = self.left = cfg.quota_size.get_float() # Quota for this period
     self.have_quota = bool(cfg.quota_size())
     data = sabnzbd.load_admin(BYTES_FILE_NAME)
     try:
         self.last_update, self.grand_total, \
         self.day_total, self.week_total, self.month_total, \
         self.end_of_day, self.end_of_week, self.end_of_month = data[:8]
         if len(data) == 11:
             self.quota, self.left, self.q_time = data[8:]
             logging.debug('Read quota q=%s l=%s reset=%s',
                           self.quota, self.left, self.q_time)
             if abs(quota - self.quota) > 0.5:
                 self.change_quota()
         else:
             self.quota = self.left = cfg.quota_size.get_float()
         res = self.reset_quota()
     except:
         # Get the latest data from the database and assign to a fake server
         logging.debug('Setting default BPS meter values')
         grand, month, week  = sabnzbd.proxy_get_history_size()
         if grand: self.grand_total['x'] = grand
         if month: self.month_total['x'] = month
         if week:  self.week_total['x'] = week
         self.quota = self.left = cfg.quota_size.get_float()
         res = False
     # Force update of counters
     self.update()
     return res
Esempio n. 5
0
    def read(self):
        """ Read admin from disk, return True when pause is needed """
        res = False
        quota = self.left = cfg.quota_size.get_float()  # Quota for this period
        self.have_quota = bool(cfg.quota_size())
        data = sabnzbd.load_admin(BYTES_FILE_NAME)
        try:
            (
                self.last_update,
                self.grand_total,
                self.day_total,
                self.week_total,
                self.month_total,
                self.end_of_day,
                self.end_of_week,
                self.end_of_month,
                self.quota,
                self.left,
                self.q_time,
                self.timeline_total,
            ) = data[:12]

            # Article statistics were only added in 3.2.x
            if len(data) > 12:
                self.article_stats_tried, self.article_stats_failed = data[
                    12:14]

            # Trigger quota actions
            if abs(quota - self.quota) > 0.5:
                self.change_quota()
            res = self.reset_quota()
        except:
            self.defaults()
        return res
Esempio n. 6
0
    def __init__(self):
        threading.Thread.__init__(self)

        self.newdir()
        try:
            dir, self.ignored, self.suspected = sabnzbd.load_admin(
                SCAN_FILE_NAME)
            if dir != self.dirscan_dir:
                self.ignored = {}
                self.suspected = {}
        except:
            self.ignored = {}  # Will hold all unusable files and the
            # successfully processed ones that cannot be deleted
            self.suspected = {
            }  # Will hold name/attributes of suspected candidates

        self.shutdown = False
        self.error_reported = False  # Prevents mulitple reporting of missing watched folder
        self.dirscan_dir = cfg.dirscan_dir.get_path()
        self.dirscan_speed = cfg.dirscan_speed()
        self.busy = False
        self.trigger = False
        cfg.dirscan_dir.callback(self.newdir)
        cfg.dirscan_speed.callback(self.newspeed)
        DirScanner.do = self
Esempio n. 7
0
    def __init__(self):
        self.jobs = {}
        self.next_run = time.time()
        self.shutdown = False

        try:
            defined = config.get_rss().keys()
            feeds = sabnzbd.load_admin(RSS_FILE_NAME)
            if type(feeds) == type({}):
                for feed in feeds:
                    if feed not in defined:
                        logging.debug('Dropping obsolete data for feed "%s"',
                                      feed)
                        continue
                    self.jobs[feed] = {}
                    for link in feeds[feed]:
                        # Consistency check on data
                        try:
                            item = feeds[feed][link]
                            if not isinstance(item, dict) or not isinstance(
                                    item.get('title'), unicode):
                                raise IndexError
                            self.jobs[feed][link] = item
                        except (KeyError, IndexError):
                            logging.info(
                                'Incorrect entry in %s detected, discarding %s',
                                RSS_FILE_NAME, item)
                    remove_obsolete(self.jobs[feed], self.jobs[feed].keys())
        except IOError:
            logging.debug('Cannot read file %s', RSS_FILE_NAME)
Esempio n. 8
0
 def read(self):
     """ Read admin from disk, return True when pause is needed """
     res = False
     quota = self.left = cfg.quota_size.get_float()  # Quota for this period
     self.have_quota = bool(cfg.quota_size())
     data = sabnzbd.load_admin(BYTES_FILE_NAME)
     try:
         (
             self.last_update,
             self.grand_total,
             self.day_total,
             self.week_total,
             self.month_total,
             self.end_of_day,
             self.end_of_week,
             self.end_of_month,
             self.quota,
             self.left,
             self.q_time,
             self.timeline_total,
         ) = data
         if abs(quota - self.quota) > 0.5:
             self.change_quota()
         res = self.reset_quota()
     except:
         self.defaults()
     # Force update of counters and validate data
     try:
         for server in self.grand_total.keys():
             self.update(server)
     except TypeError:
         self.defaults()
         self.update()
     return res
Esempio n. 9
0
    def read_queue(self, repair):
        """Read queue from disk, supporting repair modes
        0 = no repairs
        1 = use existing queue, add missing "incomplete" folders
        2 = Discard all queue admin, reconstruct from "incomplete" folders
        """
        nzo_ids = []
        if repair < 2:
            # Try to process the queue file
            try:
                data = sabnzbd.load_admin(QUEUE_FILE_NAME)
                if data:
                    queue_vers, nzo_ids, _ = data
                    if not queue_vers == QUEUE_VERSION:
                        nzo_ids = []
                        logging.error(T("Incompatible queuefile found, cannot proceed"))
                        if not repair:
                            panic_queue(os.path.join(cfg.admin_dir.get_path(), QUEUE_FILE_NAME))
                            exit_sab(2)
            except:
                nzo_ids = []
                logging.error(
                    T("Error loading %s, corrupt file detected"),
                    os.path.join(cfg.admin_dir.get_path(), QUEUE_FILE_NAME),
                )

        # First handle jobs in the queue file
        folders = []
        for nzo_id in nzo_ids:
            folder, _id = os.path.split(nzo_id)
            path = get_admin_path(folder, future=False)

            # Try as normal job
            nzo = sabnzbd.load_data(_id, path, remove=False)
            if not nzo:
                # Try as future job
                path = get_admin_path(folder, future=True)
                nzo = sabnzbd.load_data(_id, path)
            if nzo:
                self.add(nzo, save=False, quiet=True)
                folders.append(folder)

        # Scan for any folders in "incomplete" that are not yet in the queue
        if repair:
            logging.info("Starting queue repair")
            self.scan_jobs(not folders)
            # Handle any lost future jobs
            for item in globber_full(os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER)):
                path, nzo_id = os.path.split(item)
                if nzo_id not in self.__nzo_table:
                    if nzo_id.startswith("SABnzbd_nzo"):
                        nzo = sabnzbd.load_data(nzo_id, path, remove=True)
                        if nzo:
                            self.add(nzo, save=True)
                    else:
                        try:
                            remove_file(item)
                        except:
                            pass
Esempio n. 10
0
    def read_queue(self, repair):
        """ Read queue from disk, supporting repair modes
            0 = no repairs
            1 = use existing queue, add missing "incomplete" folders
            2 = Discard all queue admin, reconstruct from "incomplete" folders
        """
        nzo_ids = []
        if repair < 2:
            # Read the queue from the saved files
            data = sabnzbd.load_admin(QUEUE_FILE_NAME)
            if data:
                try:
                    queue_vers, nzo_ids, dummy = data
                    if not queue_vers == QUEUE_VERSION:
                        nzo_ids = []
                        logging.error(
                            Ta('Incompatible queuefile found, cannot proceed'))
                        if not repair:
                            panic_queue(
                                os.path.join(cfg.cache_dir.get_path(),
                                             QUEUE_FILE_NAME))
                            exit_sab(2)
                except ValueError:
                    nzo_ids = []
                    logging.error(
                        Ta('Error loading %s, corrupt file detected'),
                        os.path.join(cfg.cache_dir.get_path(),
                                     QUEUE_FILE_NAME))
                    if not repair:
                        return

        # First handle jobs in the queue file
        folders = []
        for nzo_id in nzo_ids:
            folder, _id = os.path.split(nzo_id)
            # Try as normal job
            path = get_admin_path(bool(folder), folder, False)
            nzo = sabnzbd.load_data(_id, path, remove=False)
            if not nzo:
                # Try as future job
                path = get_admin_path(bool(folder), folder, True)
                nzo = sabnzbd.load_data(_id, path)
            if nzo:
                self.add(nzo, save=False, quiet=True)
                folders.append(folder)

        # Scan for any folders in "incomplete" that are not yet in the queue
        if repair:
            self.scan_jobs(not folders)
            # Handle any lost future jobs
            for path in globber(
                    os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER)):
                path, nzo_id = os.path.split(path)
                if nzo_id not in self.__nzo_table:
                    nzo = sabnzbd.load_data(nzo_id, path, remove=True)
                    if nzo:
                        self.add(nzo, save=True)
Esempio n. 11
0
    def read_queue(self, repair):
        """ Read queue from disk, supporting repair modes
            0 = no repairs
            1 = use existing queue, add missing "incomplete" folders
            2 = Discard all queue admin, reconstruct from "incomplete" folders
        """
        nzo_ids = []
        if repair < 2:
            # Read the queue from the saved files
            data = sabnzbd.load_admin(QUEUE_FILE_NAME)
            if data:
                try:
                    queue_vers, nzo_ids, dummy = data
                    if not queue_vers == QUEUE_VERSION:
                        nzo_ids = []
                        logging.error(Ta("Incompatible queuefile found, cannot proceed"))
                        if not repair:
                            panic_queue(os.path.join(cfg.cache_dir.get_path(), QUEUE_FILE_NAME))
                            exit_sab(2)
                except ValueError:
                    nzo_ids = []
                    logging.error(
                        Ta("Error loading %s, corrupt file detected"),
                        os.path.join(cfg.cache_dir.get_path(), QUEUE_FILE_NAME),
                    )
                    if not repair:
                        return

        # First handle jobs in the queue file
        folders = []
        for nzo_id in nzo_ids:
            folder, _id = os.path.split(nzo_id)
            # Try as normal job
            path = get_admin_path(bool(folder), folder, False)
            nzo = sabnzbd.load_data(_id, path, remove=False)
            if not nzo:
                # Try as future job
                path = get_admin_path(bool(folder), folder, True)
                nzo = sabnzbd.load_data(_id, path)
            if nzo:
                self.add(nzo, save=False, quiet=True)
                folders.append(folder)

        # Scan for any folders in "incomplete" that are not yet in the queue
        if repair:
            self.scan_jobs(not folders)
            # Handle any lost future jobs
            for path in globber(os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER)):
                path, nzo_id = os.path.split(path)
                if nzo_id not in self.__nzo_table:
                    nzo = sabnzbd.load_data(nzo_id, path, remove=True)
                    if nzo:
                        self.add(nzo, save=True)
Esempio n. 12
0
 def __init__(self):
     self.shutdown = False
     self.queue = OrderedSetQueue()
     self.version = Rating.VERSION
     self.ratings = {}
     self.nzo_indexer_map = {}
     try:
         rating_data = sabnzbd.load_admin(RATING_FILE_NAME)
         if rating_data:
             self.version, self.ratings, self.nzo_indexer_map = rating_data
     except:
         logging.info("Corrupt %s file, discarding", RATING_FILE_NAME)
         logging.info("Traceback: ", exc_info=True)
     super().__init__()
Esempio n. 13
0
 def read(self):
     """ Read admin from disk """
     data = sabnzbd.load_admin(BYTES_FILE_NAME)
     try:
         self.last_update, self.grand_total, \
         self.day_total, self.week_total, self.month_total, \
         self.end_of_day, self.end_of_week, self.end_of_month = data
     except:
         # Get the latest data from the database and assign to a fake server
         grand, month, week = sabnzbd.proxy_get_history_size()
         if grand: self.grand_total['x'] = grand
         if month: self.month_total['x'] = month
         if week: self.week_total['x'] = week
     # Force update of counters
     self.update()
Esempio n. 14
0
 def read(self):
     """ Read admin from disk """
     data = sabnzbd.load_admin(BYTES_FILE_NAME)
     try:
         self.last_update, self.grand_total, \
         self.day_total, self.week_total, self.month_total, \
         self.end_of_day, self.end_of_week, self.end_of_month = data
     except:
         # Get the latest data from the database and assign to a fake server
         grand, month, week  = sabnzbd.proxy_get_history_size()
         if grand: self.grand_total['x'] = grand
         if month: self.month_total['x'] = month
         if week:  self.week_total['x'] = week
     # Force update of counters
     self.update()
Esempio n. 15
0
 def __init__(self):
     Rating.do = self
     self.shutdown = False
     self.queue = OrderedSetQueue()
     try:
         (self.version, self.ratings, self.nzo_indexer_map) = sabnzbd.load_admin("Rating.sab")
         if (self.version != Rating.VERSION):
             raise Exception()
     except:
         self.version = Rating.VERSION
         self.ratings = {}
         self.nzo_indexer_map = {}
     Thread.__init__(self)
     if not _HAVE_SSL:
         logging.warning('Ratings server requires secure connection')
         self.stop()
Esempio n. 16
0
 def load(self):
     """ Save postproc queue """
     self.history_queue = []
     logging.info("Loading postproc queue")
     data = sabnzbd.load_admin(POSTPROC_QUEUE_FILE_NAME)
     if data is None:
         return
     try:
         version, history_queue = data
         if POSTPROC_QUEUE_VERSION != version:
             logging.warning(T('Old queue detected, use Status->Repair to convert the queue'))
         elif isinstance(history_queue, list):
             self.history_queue = [nzo for nzo in history_queue if os.path.exists(nzo.downpath)]
     except:
         logging.info('Corrupt %s file, discarding', POSTPROC_QUEUE_FILE_NAME)
         logging.info("Traceback: ", exc_info=True)
Esempio n. 17
0
 def load(self):
     """ Save postproc queue """
     self.history_queue = []
     logging.info("Loading postproc queue")
     data = sabnzbd.load_admin(POSTPROC_QUEUE_FILE_NAME)
     if data is None:
         return
     try:
         version, history_queue = data
         if POSTPROC_QUEUE_VERSION != version:
             logging.warning(T('Old queue detected, use Status->Repair to convert the queue'))
         elif isinstance(history_queue, list):
             self.history_queue = [nzo for nzo in history_queue if os.path.exists(nzo.downpath)]
     except:
         logging.info('Corrupt %s file, discarding', POSTPROC_QUEUE_FILE_NAME)
         logging.info("Traceback: ", exc_info=True)
Esempio n. 18
0
 def load(self):
     """ Save postproc queue """
     self.history_queue = []
     logging.info("Loading postproc queue")
     data = sabnzbd.load_admin(POSTPROC_QUEUE_FILE_NAME)
     if data is None:
         return
     try:
         version, history_queue = data
         if POSTPROC_QUEUE_VERSION != version:
             logging.warning(Ta('Failed to load postprocessing queue: Wrong version (need:%s, found:%s)'), POSTPROC_QUEUE_VERSION, version)
         if isinstance(history_queue, list):
             self.history_queue = [nzo for nzo in history_queue if os.path.exists(nzo.downpath)]
     except:
         logging.info('Corrupt %s file, discarding', POSTPROC_QUEUE_FILE_NAME)
         logging.info("Traceback: ", exc_info = True)
Esempio n. 19
0
    def read_queue(self, repair):
        """ Read queue from disk, supporting repair modes
            0 = no repairs
            1 = use existing queue, add missing "incomplete" folders
            2 = Discard all queue admin, reconstruct from "incomplete" folders
        """
        nzo_ids = []
        if repair < 2:
            # Read the queue from the saved files
            data = sabnzbd.load_admin(QUEUE_FILE_NAME)

            # Process the data and check compatibility
            nzo_ids = self.check_compatibility(data)

        # First handle jobs in the queue file
        folders = []
        for nzo_id in nzo_ids:
            folder, _id = os.path.split(nzo_id)
            path = get_admin_path(folder, future=False)

            # Try as normal job
            nzo = sabnzbd.load_data(_id, path, remove=False)
            if not nzo:
                # Try as future job
                path = get_admin_path(folder, future=True)
                nzo = sabnzbd.load_data(_id, path)
            if nzo:
                self.add(nzo, save=False, quiet=True)
                folders.append(folder)

        # Scan for any folders in "incomplete" that are not yet in the queue
        if repair:
            self.scan_jobs(not folders)
            # Handle any lost future jobs
            for item in globber_full(
                    os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER)):
                path, nzo_id = os.path.split(item)
                if nzo_id not in self.__nzo_table:
                    if nzo_id.startswith('SABnzbd_nzo'):
                        nzo = sabnzbd.load_data(nzo_id, path, remove=True)
                        if nzo:
                            self.add(nzo, save=True)
                    else:
                        try:
                            os.remove(item)
                        except:
                            pass
Esempio n. 20
0
    def __init__(self):
        self.jobs = {}
        self.next_run = time.time()
        self.shutdown = False

        try:
            self.jobs = sabnzbd.load_admin(RSS_FILE_NAME)
            if self.jobs:
                for feed in self.jobs:
                    remove_obsolete(self.jobs[feed], self.jobs[feed].keys())
        except:
            logging.warning(T('Cannot read %s'), RSS_FILE_NAME)
            logging.info("Traceback: ", exc_info=True)

        # Storage needs to be dict
        if not self.jobs:
            self.jobs = {}
Esempio n. 21
0
    def read_queue(self, repair):
        """ Read queue from disk, supporting repair modes
            0 = no repairs
            1 = use existing queue, add missing "incomplete" folders
            2 = Discard all queue admin, reconstruct from "incomplete" folders
        """
        nzo_ids = []
        if repair < 2:
            # Read the queue from the saved files
            data = sabnzbd.load_admin(QUEUE_FILE_NAME)

            # Process the data and check compatibility
            nzo_ids = self.check_compatibility(repair, data)

        # First handle jobs in the queue file
        folders = []
        for nzo_id in nzo_ids:
            folder, _id = os.path.split(nzo_id)
            path = get_admin_path(folder, future=False)

            # Try as normal job
            nzo = sabnzbd.load_data(_id, path, remove=False)
            if not nzo:
                # Try as future job
                path = get_admin_path(folder, future=True)
                nzo = sabnzbd.load_data(_id, path)
            if nzo:
                self.add(nzo, save=False, quiet=True)
                folders.append(folder)

        # Scan for any folders in "incomplete" that are not yet in the queue
        if repair:
            self.scan_jobs(not folders)
            # Handle any lost future jobs
            for item in globber_full(os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER)):
                path, nzo_id = os.path.split(item)
                if nzo_id not in self.__nzo_table:
                    if nzo_id.startswith('SABnzbd_nzo'):
                        nzo = sabnzbd.load_data(nzo_id, path, remove=True)
                        if nzo:
                            self.add(nzo, save=True)
                    else:
                        try:
                            remove_file(item)
                        except:
                            pass
Esempio n. 22
0
    def __init__(self):
        self.jobs = {}
        self.next_run = time.time()
        self.shutdown = False

        try:
            self.jobs = sabnzbd.load_admin(RSS_FILE_NAME)
            if self.jobs:
                for feed in self.jobs:
                    remove_obsolete(self.jobs[feed], self.jobs[feed].keys())
        except:
            logging.warning(T('Cannot read %s'), RSS_FILE_NAME)
            logging.info("Traceback: ", exc_info=True)

        # Storage needs to be dict
        if not self.jobs:
            self.jobs = {}
Esempio n. 23
0
 def __init__(self):
     Rating.do = self
     self.shutdown = False
     self.queue = OrderedSetQueue()
     try:
         self.version, self.ratings, self.nzo_indexer_map = sabnzbd.load_admin(
             "Rating.sab", silent=not cfg.rating_enable())
         if self.version == 1:
             ratings = {}
             for k, v in self.ratings.iteritems():
                 ratings[k] = NzbRatingV2().to_v2(v)
             self.ratings = ratings
             self.version = 2
         if self.version != Rating.VERSION:
             raise Exception()
     except:
         self.version = Rating.VERSION
         self.ratings = {}
         self.nzo_indexer_map = {}
     Thread.__init__(self)
Esempio n. 24
0
 def __init__(self):
     Rating.do = self
     self.shutdown = False
     self.queue = OrderedSetQueue()
     try:
         self.version, self.ratings, self.nzo_indexer_map = sabnzbd.load_admin("Rating.sab",
                                                                               silent=not cfg.rating_enable())
         if self.version == 1:
             ratings = {}
             for k, v in self.ratings.iteritems():
                 ratings[k] = NzbRatingV2().to_v2(v)
             self.ratings = ratings
             self.version = 2
         if self.version != Rating.VERSION:
             raise Exception()
     except:
         self.version = Rating.VERSION
         self.ratings = {}
         self.nzo_indexer_map = {}
     Thread.__init__(self)
Esempio n. 25
0
    def __init__(self):
        self.jobs = {}
        self.next_run = time.time()
        self.shutdown = False

        try:
            self.jobs = sabnzbd.load_admin(RSS_FILE_NAME)
            if self.jobs:
                for feed in self.jobs:
                    remove_obsolete(self.jobs[feed],
                                    list(self.jobs[feed].keys()))
        except:
            logging.warning(T("Cannot read %s"), RSS_FILE_NAME)
            logging.info("Traceback: ", exc_info=True)

        # Storage needs to be dict
        if not self.jobs:
            self.jobs = {}

        # jobs is a NAME-indexed dictionary
        #    Each element is link-indexed dictionary
        #        Each element is another dictionary:
        #           status : 'D', 'G', 'B', 'X' (downloaded, good-match, bad-match, obsolete)
        #               '*' added means: from the initial batch
        #               '-' added to 'D' means downloaded, but not displayed anymore
        #           title : Title
        #           url : URL
        #           cat : category
        #           orgcat : category as read from feed
        #           pp : pp
        #           script : script
        #           prio : priority
        #           time : timestamp (used for time-based clean-up)
        #           size : size in bytes
        #           age : age in datetime format as specified by feed
        #           season : season number (if applicable)
        #           episode : episode number (if applicable)

        # Patch feedparser
        patch_feedparser()
Esempio n. 26
0
 def __init__(self):
     Rating.do = self
     self.shutdown = False
     self.queue = OrderedSetQueue()
     try:
         (self.version, self.ratings, self.nzo_indexer_map) = sabnzbd.load_admin("Rating.sab")
         if self.version == 1:
             ratings = {}
             for k, v in self.ratings.iteritems():
                 ratings[k] = NzbRatingV2().to_v2(v)
             self.ratings = ratings
             self.version = 2
         if (self.version != Rating.VERSION):
             raise Exception()
     except:
         self.version = Rating.VERSION
         self.ratings = {}
         self.nzo_indexer_map = {}
     Thread.__init__(self)
     if not _HAVE_SSL:
         logging.warning('Ratings server requires secure connection')
         self.stop()
Esempio n. 27
0
 def load(self):
     """ Save postproc queue """
     self.history_queue = []
     logging.info("Loading postproc queue")
     data = sabnzbd.load_admin(POSTPROC_QUEUE_FILE_NAME)
     if data is None:
         return
     try:
         version, history_queue = data
         if POSTPROC_QUEUE_VERSION != version:
             logging.warning(
                 Ta('Failed to load postprocessing queue: Wrong version (need:%s, found:%s)'
                    ), POSTPROC_QUEUE_VERSION, version)
         if isinstance(history_queue, list):
             self.history_queue = [
                 nzo for nzo in history_queue
                 if os.path.exists(nzo.downpath)
             ]
     except:
         logging.info('Corrupt %s file, discarding',
                      POSTPROC_QUEUE_FILE_NAME)
         logging.info("Traceback: ", exc_info=True)
Esempio n. 28
0
    def __init__(self):
        threading.Thread.__init__(self)

        self.newdir()
        try:
            dir, self.ignored, self.suspected = sabnzbd.load_admin(SCAN_FILE_NAME)
            if dir != self.dirscan_dir:
                self.ignored = {}
                self.suspected = {}
        except:
            self.ignored = {}   # Will hold all unusable files and the
            # successfully processed ones that cannot be deleted
            self.suspected = {}  # Will hold name/attributes of suspected candidates

        self.shutdown = False
        self.error_reported = False  # Prevents mulitple reporting of missing watched folder
        self.dirscan_dir = cfg.dirscan_dir.get_path()
        self.dirscan_speed = cfg.dirscan_speed()
        self.busy = False
        self.trigger = False
        cfg.dirscan_dir.callback(self.newdir)
        cfg.dirscan_speed.callback(self.newspeed)
        DirScanner.do = self
Esempio n. 29
0
 def __init__(self):
     Rating.do = self
     self.shutdown = False
     self.queue = OrderedSetQueue()
     try:
         self.version, self.ratings, self.nzo_indexer_map = sabnzbd.load_admin(
             "Rating.sab", silent=not cfg.rating_enable())
         if self.version == 1:
             ratings = {}
             for k, v in self.ratings.iteritems():
                 ratings[k] = NzbRatingV2().to_v2(v)
             self.ratings = ratings
             self.version = 2
         if (self.version != Rating.VERSION):
             raise Exception()
     except:
         self.version = Rating.VERSION
         self.ratings = {}
         self.nzo_indexer_map = {}
     Thread.__init__(self)
     if not _HAVE_SSL:
         logging.warning(T('Ratings server requires secure connection'))
         self.stop()
Esempio n. 30
0
    def __init__(self):
        def check_str(p):
            return p is None or p == '' or isinstance(p, basestring)

        def check_int(p):
            try:
                int(p)
                return True
            except:
                return False

        self.jobs = {}
        self.next_run = time.time()
        self.shutdown = False

        try:
            defined = config.get_rss().keys()
            feeds = sabnzbd.load_admin(RSS_FILE_NAME)
            if type(feeds) == type({}):
                for feed in feeds:
                    if feed not in defined:
                        logging.debug('Dropping obsolete data for feed "%s"', feed)
                        continue
                    self.jobs[feed] = {}
                    for link in feeds[feed]:
                        data = feeds[feed][link]
                        # Consistency check on data
                        try:
                            item = feeds[feed][link]
                            if not isinstance(item, dict) or not isinstance(item.get('title'), unicode):
                                raise IndexError
                            self.jobs[feed][link] = item
                        except (KeyError, IndexError):
                            logging.info('Incorrect entry in %s detected, discarding %s', RSS_FILE_NAME, item)
                    remove_obsolete(self.jobs[feed], self.jobs[feed].keys())
        except IOError:
            logging.debug('Cannot read file %s', RSS_FILE_NAME)
Esempio n. 31
0
    def __init__(self):
        def check_str(p):
            return p is None or p == '' or isinstance(p, basestring)

        def check_int(p):
            try:
                int(p)
                return True
            except:
                return False

        self.jobs = {}
        self.next_run = time.time()

        try:
            defined = config.get_rss().keys()
            feeds = sabnzbd.load_admin(RSS_FILE_NAME)
            if type(feeds) == type({}):
                for feed in feeds:
                    if feed not in defined:
                        logging.debug('Dropping obsolete data for feed "%s"',
                                      feed)
                        continue
                    self.jobs[feed] = {}
                    for link in feeds[feed]:
                        data = feeds[feed][link]
                        if type(data) == type([]):
                            # Convert previous list-based store to dictionary
                            new = {}
                            try:
                                new['status'] = data[0]
                                new['title'] = data[1]
                                new['url'] = data[2]
                                new['cat'] = data[3]
                                new['pp'] = data[4]
                                new['script'] = data[5]
                                new['time'] = data[6]
                                new['prio'] = str(NORMAL_PRIORITY)
                                new['rule'] = 0
                                self.jobs[feed][link] = new
                            except IndexError:
                                del new
                        else:
                            # Consistency check on data
                            try:
                                item = feeds[feed][link]
                                if not isinstance(
                                        item, dict) or not isinstance(
                                            item.get('title'), unicode):
                                    raise IndexError
                                if item.get('status',
                                            ' ')[0] not in ('D', 'G', 'B',
                                                            'X'):
                                    item['status'] = 'X'
                                if not isinstance(item.get('url'), unicode):
                                    item['url'] = ''
                                if not check_str(item.get('cat')):
                                    item['cat'] = ''
                                if not check_str(item.get('orgcat')):
                                    item['orgcat'] = ''
                                if not check_str(item.get('pp')):
                                    item['pp'] = '3'
                                if not check_str(item.get('script')):
                                    item['script'] = 'None'
                                if not check_str(item.get('prio')):
                                    item['prio'] = '-100'
                                if not check_int(item.get('rule', 0)):
                                    item['rule'] = 0
                                if not check_int(item.get('size', 0L)):
                                    item['size'] = 0L
                                if not isinstance(item.get('time'), float):
                                    item['time'] = time.time()
                                if not check_int(item.get('order', 0)):
                                    item.get['order'] = 0
                                self.jobs[feed][link] = item
                            except (KeyError, IndexError):
                                logging.info(
                                    'Incorrect entry in %s detected, discarding %s',
                                    RSS_FILE_NAME, item)
Esempio n. 32
0
    def __init__(self):
        def check_str(p):
            return p is None or p == '' or isinstance(p, basestring)

        def check_int(p):
            try:
                int(p)
                return True
            except:
                return False

        self.jobs = {}
        self.next_run = time.time()

        try:
            defined = config.get_rss().keys()
            feeds = sabnzbd.load_admin(RSS_FILE_NAME)
            if type(feeds) == type({}):
                for feed in feeds:
                    if feed not in defined:
                        logging.debug('Dropping obsolete data for feed "%s"', feed)
                        continue
                    self.jobs[feed] = {}
                    for link in feeds[feed]:
                        data = feeds[feed][link]
                        if type(data) == type([]):
                            # Convert previous list-based store to dictionary
                            new = {}
                            try:
                                new['status'] = data[0]
                                new['title'] = data[1]
                                new['url'] = data[2]
                                new['cat'] = data[3]
                                new['pp'] = data[4]
                                new['script'] = data[5]
                                new['time'] = data[6]
                                new['prio'] = str(NORMAL_PRIORITY)
                                new['rule'] = 0
                                self.jobs[feed][link] = new
                            except IndexError:
                                del new
                        else:
                            # Consistency check on data
                            try:
                                item = feeds[feed][link]
                                if not isinstance(item, dict) or not isinstance(item.get('title'), unicode):
                                    raise IndexError
                                if item.get('status', ' ')[0] not in ('D', 'G', 'B', 'X'):
                                    item['status'] = 'X'
                                if not isinstance(item.get('url'), unicode):
                                    item['url'] = ''
                                if not check_str(item.get('cat')):
                                    item['cat'] = ''
                                if not check_str(item.get('orgcat')):
                                    item['orgcat'] = ''
                                if not check_str(item.get('pp')):
                                    item['pp'] = '3'
                                if not check_str(item.get('script')):
                                    item['script'] = 'None'
                                if not check_str(item.get('prio')):
                                    item['prio'] = '-100'
                                if not check_int(item.get('rule', 0)):
                                    item['rule'] = 0
                                if not check_int(item.get('size', 0L)):
                                    item['size'] = 0L
                                if not isinstance(item.get('time'), float):
                                    item['time'] = time.time()
                                if not check_int(item.get('order', 0)):
                                    item.get['order'] = 0
                                self.jobs[feed][link] = item
                            except (KeyError, IndexError):
                                logging.info('Incorrect entry in %s detected, discarding %s', RSS_FILE_NAME, item)
Esempio n. 33
0
    def __init__(self):
        def check_str(p):
            return p is None or p == '' or isinstance(p, str)

        def check_int(p):
            try:
                int(p)
                return True
            except:
                return False

        self.jobs = {}
        try:
            defined = config.get_rss().keys()
            feeds = sabnzbd.load_admin(RSS_FILE_NAME)
            if type(feeds) == type({}):
                for feed in feeds:
                    if feed not in defined:
                        logging.debug('Dropping obsolete data for feed "%s"',
                                      feed)
                        continue
                    self.jobs[feed] = {}
                    for link in feeds[feed]:
                        data = feeds[feed][link]
                        if type(data) == type([]):
                            # Convert previous list-based store to dictionary
                            new = {}
                            try:
                                new['status'] = data[0]
                                new['title'] = data[1]
                                new['url'] = data[2]
                                new['cat'] = data[3]
                                new['pp'] = data[4]
                                new['script'] = data[5]
                                new['time'] = data[6]
                                new['prio'] = str(NORMAL_PRIORITY)
                                new['rule'] = 0
                                self.jobs[feed][link] = new
                            except IndexError:
                                del new
                        else:
                            # Consistency check on data
                            try:
                                item = feeds[feed][link]
                                if not isinstance(
                                        item, dict) or not isinstance(
                                            item.get('title'), unicode):
                                    raise IndexError
                                if item.get('status',
                                            ' ')[0] not in ('D', 'G', 'B',
                                                            'X'):
                                    item['status'] = 'X'
                                if not isinstance(item.get('url'), unicode):
                                    item['url'] = ''
                                item['url'] = item['url'].replace(
                                    'www.newzbin.com', cfg.newzbin_url())
                                if not check_str(item.get('cat')):
                                    item['cat'] = ''
                                if not check_str(item.get('orgcat')):
                                    item['orgcat'] = ''
                                if not check_str(item.get('pp')):
                                    item['pp'] = '3'
                                if not check_str(item.get('script')):
                                    item['script'] = 'None'
                                if not check_str(item.get('prio')):
                                    item['prio'] = '-100'
                                if not check_int(item.get('rule', 0)):
                                    item['rule'] = 0
                                if not isinstance(item.get('time'), float):
                                    item['time'] = time.time()
                                if not check_int(item.get('order', 0)):
                                    item.get['order'] = 0
                                self.jobs[feed][link] = item
                            except (KeyError, IndexError):
                                logging.info(
                                    'Incorrect entry in %s detected, discarding %s',
                                    RSS_FILE_NAME, item)

                    remove_obsolete(self.jobs[feed], self.jobs[feed].keys())

        except IOError:
            logging.debug('Cannot read file %s', RSS_FILE_NAME)

        # jobs is a NAME-indexed dictionary
        #    Each element is link-indexed dictionary
        #        Each element is another dictionary:
        #           status : 'D', 'G', 'B', 'X' (downloaded, good-match, bad-match, obsolete)
        #               '*' added means: from the initial batch
        #               '-' added to 'D' means downloaded, but not displayed anymore
        #           title : Title
        #           url : URL or MsgId
        #           cat : category
        #           orgcat : category as read from feed
        #           pp : pp
        #           script : script
        #           prio : priority
        #           time : timestamp (used for time-based clean-up)
        #           order : order in the RSS feed

        self.shutdown = False
Esempio n. 34
0
    def read_queue(self, repair):
        """ Read queue from disk, supporting repair modes
            0 = no repairs
            1 = use existing queue, add missing "incomplete" folders
            2 = Discard all queue admin, reconstruct from "incomplete" folders
        """
        nzo_ids = []
        if repair < 2:
            # Read the queue from the saved files
            data = sabnzbd.load_admin(QUEUE_FILE_NAME)
            if not data:
                try:
                    # Try previous queue file
                    queue_vers, nzo_ids, dummy = sabnzbd.load_admin(QUEUE_FILE_TMPL % '9')
                except:
                    nzo_ids = []
                if nzo_ids:
                    logging.warning(T('Old queue detected, use Status->Repair to convert the queue'))
                    nzo_ids = []
            else:
                try:
                    queue_vers, nzo_ids, dummy = data
                    if not queue_vers == QUEUE_VERSION:
                        nzo_ids = []
                        logging.error(T('Incompatible queuefile found, cannot proceed'))
                        if not repair:
                            panic_queue(os.path.join(cfg.admin_dir.get_path(), QUEUE_FILE_NAME))
                            exit_sab(2)
                except ValueError:
                    nzo_ids = []
                    logging.error(T('Error loading %s, corrupt file detected'),
                                  os.path.join(cfg.admin_dir.get_path(), QUEUE_FILE_NAME))
                    if not repair:
                        return

        # First handle jobs in the queue file
        folders = []
        for nzo_id in nzo_ids:
            folder, _id = os.path.split(nzo_id)
            # Try as normal job
            path = get_admin_path(folder, False)
            nzo = sabnzbd.load_data(_id, path, remove=False)
            if not nzo:
                # Try as future job
                path = get_admin_path(folder, True)
                nzo = sabnzbd.load_data(_id, path)
            if nzo:
                self.add(nzo, save=False, quiet=True)
                folders.append(folder)

        # Scan for any folders in "incomplete" that are not yet in the queue
        if repair:
            self.scan_jobs(not folders)
            # Handle any lost future jobs
            for item in globber_full(os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER)):
                path, nzo_id = os.path.split(item)
                if nzo_id not in self.__nzo_table:
                    if nzo_id.startswith('SABnzbd_nzo'):
                        nzo = sabnzbd.load_data(nzo_id, path, remove=True)
                        if nzo:
                            self.add(nzo, save=True)
                    else:
                        try:
                            os.remove(item)
                        except:
                            pass
Esempio n. 35
0
 def __init__(self):
     self.bookmarks = sabnzbd.load_admin(BOOKMARK_FILE_NAME)
     if not self.bookmarks:
         self.bookmarks = []
     self.__busy = False
     Bookmarks.do = self
Esempio n. 36
0
 def __init__(self):
     self.bookmarks = sabnzbd.load_admin(BOOKMARK_FILE_NAME)
     if not self.bookmarks:
         self.bookmarks = []
     self.__busy = False
     Bookmarks.do = self
Esempio n. 37
0
    def __init__(self):
        def check_str(p):
            return p is None or p == '' or isinstance(p, str)
        def check_int(p):
            try:
                int(p)
                return True
            except:
                return False

        self.jobs = {}
        try:
            defined = config.get_rss().keys()
            feeds = sabnzbd.load_admin(RSS_FILE_NAME)
            if type(feeds) == type({}):
                for feed in feeds:
                    if feed not in defined:
                        logging.debug('Dropping obsolete data for feed "%s"', feed)
                        continue
                    self.jobs[feed] = {}
                    for link in feeds[feed]:
                        data = feeds[feed][link]
                        if type(data) == type([]):
                            # Convert previous list-based store to dictionary
                            new = {}
                            try:
                                new['status'] = data[0]
                                new['title'] = data[1]
                                new['url'] = data[2]
                                new['cat'] = data[3]
                                new['pp'] = data[4]
                                new['script'] = data[5]
                                new['time'] = data[6]
                                new['prio'] = str(NORMAL_PRIORITY)
                                new['rule'] = 0
                                self.jobs[feed][link] = new
                            except IndexError:
                                del new
                        else:
                            # Consistency check on data
                            try:
                                item = feeds[feed][link]
                                if not isinstance(item, dict) or not isinstance(item.get('title'), unicode):
                                    raise IndexError
                                if item.get('status', ' ')[0] not in ('D', 'G', 'B', 'X'):
                                    item['status'] = 'X'
                                if not isinstance(item.get('url'), unicode): item['url'] = ''
                                item['url'] = item['url'].replace('www.newzbin.com', cfg.newzbin_url())
                                if not check_str(item.get('cat')): item['cat'] = ''
                                if not check_str(item.get('orgcat')): item['orgcat'] = ''
                                if not check_str(item.get('pp')): item['pp'] = '3'
                                if not check_str(item.get('script')): item['script'] = 'None'
                                if not check_str(item.get('prio')): item['prio'] = '-100'
                                if not check_int(item.get('rule', 0)): item['rule'] = 0
                                if not isinstance(item.get('time'), float): item['time'] = time.time()
                                if not check_int(item.get('order', 0)): item.get['order'] = 0
                                self.jobs[feed][link] = item
                            except (KeyError, IndexError):
                                logging.info('Incorrect entry in %s detected, discarding %s', RSS_FILE_NAME, item)

                    remove_obsolete(self.jobs[feed], self.jobs[feed].keys())

        except IOError:
            logging.debug('Cannot read file %s', RSS_FILE_NAME)

        # jobs is a NAME-indexed dictionary
        #    Each element is link-indexed dictionary
        #        Each element is another dictionary:
        #           status : 'D', 'G', 'B', 'X' (downloaded, good-match, bad-match, obsolete)
        #               '*' added means: from the initial batch
        #               '-' added to 'D' means downloaded, but not displayed anymore
        #           title : Title
        #           url : URL or MsgId
        #           cat : category
        #           orgcat : category as read from feed
        #           pp : pp
        #           script : script
        #           prio : priority
        #           time : timestamp (used for time-based clean-up)
        #           order : order in the RSS feed

        self.shutdown = False