Beispiel #1
0
def cleanup_list(wdir, skip_nzb):
    """ Remove all files whose extension matches the cleanup list,
        optionally ignoring the nzb extension
    """
    if cfg.cleanup_list():
        try:
            files = os.listdir(wdir)
        except:
            files = ()
        for filename in files:
            path = os.path.join(wdir, filename)
            if os.path.isdir(path):
                cleanup_list(path, skip_nzb)
            else:
                if on_cleanup_list(filename, skip_nzb):
                    try:
                        logging.info("Removing unwanted file %s", path)
                        remove_file(path)
                    except:
                        logging.error(T('Removing %s failed'), clip_path(path))
                        logging.info("Traceback: ", exc_info=True)
        if files:
            try:
                remove_dir(wdir)
            except:
                pass
Beispiel #2
0
def cleanup_list(wdir, skip_nzb):
    """ Remove all files whose extension matches the cleanup list,
        optionally ignoring the nzb extension
    """
    if cfg.cleanup_list():
        try:
            files = os.listdir(wdir)
        except:
            files = ()
        for filename in files:
            path = os.path.join(wdir, filename)
            if os.path.isdir(path):
                cleanup_list(path, skip_nzb)
            else:
                if on_cleanup_list(filename, skip_nzb):
                    try:
                        logging.info("Removing unwanted file %s", path)
                        remove_file(path)
                    except:
                        logging.error(T('Removing %s failed'), clip_path(path))
                        logging.info("Traceback: ", exc_info=True)
        if files:
            try:
                remove_dir(wdir)
            except:
                pass
Beispiel #3
0
def load_admin(_id, remove=False, silent=False):
    """ Read data in admin folder in specified format """
    path = os.path.join(cfg.admin_dir.get_path(), _id)
    logging.debug("[%s] Loading data for %s from %s", misc.caller_name(), _id, path)

    if not os.path.exists(path):
        logging.info("[%s] %s missing", misc.caller_name(), path)
        return None

    try:
        with open(path, 'rb') as data_file:
            if cfg.use_pickle():
                data = pickle.load(data_file)
            else:
                data = cPickle.load(data_file)
        if remove:
            misc.remove_file(path)
    except:
        if not silent:
            excepterror = str(sys.exc_info()[0])
            logging.error(T('Loading %s failed with error %s'), path, excepterror)
            logging.info("Traceback: ", exc_info=True)
        return None

    return data
Beispiel #4
0
def load_data(_id, path, remove=True, do_pickle=True, silent=False):
    """ Read data from disk file """
    path = os.path.join(path, _id)

    if not os.path.exists(path):
        logging.info("[%s] %s missing", misc.caller_name(), path)
        return None

    if not silent:
        logging.debug("[%s] Loading data for %s from %s", misc.caller_name(), _id, path)

    try:
        with open(path, 'rb') as data_file:
            if do_pickle:
                if cfg.use_pickle():
                    data = pickle.load(data_file)
                else:
                    data = cPickle.load(data_file)
            else:
                data = data_file.read()

        if remove:
            misc.remove_file(path)
    except:
        logging.error(T('Loading %s failed'), path)
        logging.info("Traceback: ", exc_info=True)
        return None

    return data
Beispiel #5
0
def load_data(_id, path, remove=True, do_pickle=True, silent=False):
    """ Read data from disk file """
    path = os.path.join(path, _id)

    if not os.path.exists(path):
        logging.info("[%s] %s missing", misc.caller_name(), path)
        return None

    if not silent:
        logging.debug("[%s] Loading data for %s from %s", misc.caller_name(),
                      _id, path)

    try:
        with open(path, 'rb') as data_file:
            if do_pickle:
                if cfg.use_pickle():
                    data = pickle.load(data_file)
                else:
                    data = cPickle.load(data_file)
            else:
                data = data_file.read()

        if remove:
            misc.remove_file(path)
    except:
        logging.error(T('Loading %s failed'), path)
        logging.info("Traceback: ", exc_info=True)
        return None

    return data
Beispiel #6
0
def load_admin(_id, remove=False, silent=False):
    """ Read data in admin folder in specified format """
    path = os.path.join(cfg.admin_dir.get_path(), _id)
    logging.debug("[%s] Loading data for %s from %s", misc.caller_name(), _id,
                  path)

    if not os.path.exists(path):
        logging.info("[%s] %s missing", misc.caller_name(), path)
        return None

    try:
        with open(path, 'rb') as data_file:
            if cfg.use_pickle():
                data = pickle.load(data_file)
            else:
                data = cPickle.load(data_file)
        if remove:
            misc.remove_file(path)
    except:
        if not silent:
            excepterror = str(sys.exc_info()[0])
            logging.error(T('Loading %s failed with error %s'), path,
                          excepterror)
            logging.info("Traceback: ", exc_info=True)
        return None

    return data
Beispiel #7
0
def remove_data(_id, path):
    """ Remove admin file """
    path = os.path.join(path, _id)
    try:
        if os.path.exists(path):
            misc.remove_file(path)
    except:
        logging.debug("Failed to remove %s", path)
Beispiel #8
0
def remove_data(_id, path):
    """ Remove admin file """
    path = os.path.join(path, _id)
    try:
        if os.path.exists(path):
            misc.remove_file(path)
    except:
        logging.debug("Failed to remove %s", path)
Beispiel #9
0
def del_marker(path):
    """ Remove marker file """
    if path and os.path.exists(path):
        logging.debug('Removing marker file %s', path)
        try:
            remove_file(path)
        except:
            logging.info('Cannot remove marker file %s', path)
            logging.info("Traceback: ", exc_info=True)
Beispiel #10
0
def del_marker(path):
    """ Remove marker file """
    if path and os.path.exists(path):
        logging.debug('Removing marker file %s', path)
        try:
            remove_file(path)
        except:
            logging.info('Cannot remove marker file %s', path)
            logging.info("Traceback: ", exc_info=True)
Beispiel #11
0
def check_repair_request():
    """ Return True if repair request found, remove afterwards """
    path = os.path.join(cfg.admin_dir.get_path(), REPAIR_REQUEST)
    if os.path.exists(path):
        try:
            misc.remove_file(path)
        except:
            pass
        return True
    return False
Beispiel #12
0
def check_repair_request():
    """ Return True if repair request found, remove afterwards """
    path = os.path.join(cfg.admin_dir.get_path(), REPAIR_REQUEST)
    if os.path.exists(path):
        try:
            misc.remove_file(path)
        except:
            pass
        return True
    return False
Beispiel #13
0
def remove_samples(path):
    """ Remove all files that match the sample pattern """
    for root, _dirs, files in os.walk(path):
        for file_ in files:
            if RE_SAMPLE.search(file_):
                path = os.path.join(root, file_)
                try:
                    logging.info("Removing unwanted sample file %s", path)
                    remove_file(path)
                except:
                    logging.error(T('Removing %s failed'), clip_path(path))
                    logging.info("Traceback: ", exc_info=True)
Beispiel #14
0
    def abort(self):
        """ Abort running instance and delete generated files """
        if not self.killed:
            logging.info('Aborting DirectUnpack for %s', self.cur_setname)
            self.killed = True

            # Save reference to the first rarfile
            rarfile_nzf = self.rarfile_nzf

            # Abort Unrar
            if self.active_instance:
                self.active_instance.kill()
                # We need to wait for it to kill the process
                self.active_instance.wait()

            # Wake up the thread
            with self.next_file_lock:
                self.next_file_lock.notify()

            # No new sets
            self.next_sets = []
            self.success_sets = {}

            # Remove files
            if self.unpack_dir_info:
                extraction_path, _, _, one_folder, _ = self.unpack_dir_info
                # In case of flat-unpack we need to remove the files manually
                if one_folder:
                    # RarFile can fail for mysterious reasons
                    try:
                        rar_contents = RarFile(os.path.join(
                            self.nzo.downpath, rarfile_nzf.filename),
                                               all_names=True).filelist()
                        for rm_file in rar_contents:
                            # Flat-unpack, so remove foldername from RarFile output
                            f = os.path.join(extraction_path,
                                             os.path.basename(rm_file))
                            remove_file(f)
                    except:
                        # The user will have to remove it themselves
                        logging.info(
                            'Failed to clean Direct Unpack after aborting %s',
                            rarfile_nzf.filename,
                            exc_info=True)
                        pass
                else:
                    # We can just remove the whole path
                    remove_all(extraction_path, recursive=True)
                # Remove dir-info
                self.unpack_dir_info = None

            # Reset settings
            self.reset_active()
Beispiel #15
0
def remove_samples(path):
    """ Remove all files that match the sample pattern """
    for root, _dirs, files in os.walk(path):
        for file_ in files:
            if RE_SAMPLE.search(file_):
                path = os.path.join(root, file_)
                try:
                    logging.info("Removing unwanted sample file %s", path)
                    remove_file(path)
                except:
                    logging.error(T('Removing %s failed'), clip_path(path))
                    logging.info("Traceback: ", exc_info=True)
Beispiel #16
0
    def check_compatibility(self, data):
        """ Do compatibility checks on the loaded data """
        nzo_ids = []
        if not data:
            # Warn about old queue
            if sabnzbd.OLD_QUEUE and cfg.warned_old_queue() < QUEUE_VERSION:
                logging.warning(
                    T('Old queue detected, use Status->Repair to convert the queue'
                      ))
                cfg.warned_old_queue.set(QUEUE_VERSION)
                sabnzbd.config.save_config()
        else:
            # Try to process
            try:
                queue_vers, nzo_ids, dummy = data
                if not queue_vers == QUEUE_VERSION:
                    nzo_ids = []
                    logging.error(
                        T('Incompatible queuefile found, cannot proceed'))
                    if not repair:
                        panic_queue(
                            os.path.join(cfg.admin_dir.get_path(),
                                         QUEUE_FILE_NAME))
                        exit_sab(2)
            except ValueError:
                nzo_ids = []
                logging.error(
                    T('Error loading %s, corrupt file detected'),
                    os.path.join(cfg.admin_dir.get_path(), QUEUE_FILE_NAME))

        # We need to do a repair in case of old-style pickles
        if not cfg.converted_nzo_pickles():
            for nzo_id in nzo_ids:
                folder, _id = os.path.split(nzo_id)
                path = get_admin_path(folder, future=False)
                # This will update them but preserve queue-order
                if os.path.exists(os.path.join(path, _id)):
                    self.repair_job(os.path.dirname(path))
                continue

            # Remove any future-jobs, we can't save those
            for item in globber_full(
                    os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER)):
                remove_file(item)

            # Done converting
            cfg.converted_nzo_pickles.set(True)
            sabnzbd.config.save_config()
            nzo_ids = []
        return nzo_ids
Beispiel #17
0
 def execute(self, command, args=(), save=False):
     ''' Wrapper for executing SQL commands '''
     for tries in xrange(5, 0, -1):
         try:
             if args and isinstance(args, tuple):
                 self.c.execute(command, args)
             else:
                 self.c.execute(command)
             if save:
                 self.con.commit()
             return True
         except:
             error = str(sys.exc_value)
             if tries >= 0 and 'is locked' in error:
                 logging.debug('Database locked, wait and retry')
                 time.sleep(0.5)
                 continue
             elif 'readonly' in error:
                 logging.error(
                     T('Cannot write to History database, check access rights!'
                       ))
                 # Report back success, because there's no recovery possible
                 return True
             elif 'not a database' in error or 'malformed' in error or 'duplicate column name' in error:
                 logging.error(
                     T('Damaged History database, created empty replacement'
                       ))
                 logging.info("Traceback: ", exc_info=True)
                 self.close()
                 try:
                     remove_file(HistoryDB.db_path)
                 except:
                     pass
                 self.connect()
                 # Return False in case of "duplicate column" error
                 # because the column addition in connect() must be terminated
                 return 'duplicate column name' not in error
             else:
                 logging.error(T('SQL Command Failed, see log'))
                 logging.info("SQL: %s", command)
                 logging.info("Arguments: %s", repr(args))
                 logging.info("Traceback: ", exc_info=True)
                 try:
                     self.con.rollback()
                 except:
                     logging.debug("Rollback Failed:", exc_info=True)
         return False
Beispiel #18
0
    def abort(self):
        """ Abort running instance and delete generated files """
        if not self.killed and self.cur_setname:
            logging.info('Aborting DirectUnpack for %s', self.cur_setname)
            self.killed = True

            # Save reference to the first rarfile
            rarfile_nzf = self.rarfile_nzf

            # Abort Unrar
            if self.active_instance:
                self.active_instance.kill()
                # We need to wait for it to kill the process
                self.active_instance.wait()

            # Wake up the thread
            with self.next_file_lock:
                self.next_file_lock.notify()

            # No new sets
            self.next_sets = []
            self.success_sets = {}

            # Remove files
            if self.unpack_dir_info:
                extraction_path, _, _, one_folder, _ = self.unpack_dir_info
                # In case of flat-unpack we need to remove the files manually
                if one_folder:
                    # RarFile can fail for mysterious reasons
                    try:
                        rar_contents = RarFile(os.path.join(self.nzo.downpath, rarfile_nzf.filename), all_names=True).filelist()
                        for rm_file in rar_contents:
                            # Flat-unpack, so remove foldername from RarFile output
                            f = os.path.join(extraction_path, os.path.basename(rm_file))
                            remove_file(f)
                    except:
                        # The user will have to remove it themselves
                        logging.info('Failed to clean Direct Unpack after aborting %s', rarfile_nzf.filename, exc_info=True)
                        pass
                else:
                    # We can just remove the whole path
                    remove_all(extraction_path, recursive=True)
                # Remove dir-info
                self.unpack_dir_info = None

            # Reset settings
            self.reset_active()
Beispiel #19
0
    def read_queue(self, repair):
        """ Read queue from disk, supporting repair modes
            0 = no repairs
            1 = use existing queue, add missing "incomplete" folders
            2 = Discard all queue admin, reconstruct from "incomplete" folders
        """
        nzo_ids = []
        if repair < 2:
            # Read the queue from the saved files
            data = sabnzbd.load_admin(QUEUE_FILE_NAME)

            # Process the data and check compatibility
            nzo_ids = self.check_compatibility(data)

        # First handle jobs in the queue file
        folders = []
        for nzo_id in nzo_ids:
            folder, _id = os.path.split(nzo_id)
            path = get_admin_path(folder, future=False)

            # Try as normal job
            nzo = sabnzbd.load_data(_id, path, remove=False)
            if not nzo:
                # Try as future job
                path = get_admin_path(folder, future=True)
                nzo = sabnzbd.load_data(_id, path)
            if nzo:
                self.add(nzo, save=False, quiet=True)
                folders.append(folder)

        # Scan for any folders in "incomplete" that are not yet in the queue
        if repair:
            self.scan_jobs(not folders)
            # Handle any lost future jobs
            for item in globber_full(
                    os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER)):
                path, nzo_id = os.path.split(item)
                if nzo_id not in self.__nzo_table:
                    if nzo_id.startswith('SABnzbd_nzo'):
                        nzo = sabnzbd.load_data(nzo_id, path, remove=True)
                        if nzo:
                            self.add(nzo, save=True)
                    else:
                        try:
                            remove_file(item)
                        except:
                            pass
Beispiel #20
0
    def read_queue(self, repair):
        """ Read queue from disk, supporting repair modes
            0 = no repairs
            1 = use existing queue, add missing "incomplete" folders
            2 = Discard all queue admin, reconstruct from "incomplete" folders
        """
        nzo_ids = []
        if repair < 2:
            # Read the queue from the saved files
            data = sabnzbd.load_admin(QUEUE_FILE_NAME)

            # Process the data and check compatibility
            nzo_ids = self.check_compatibility(repair, data)

        # First handle jobs in the queue file
        folders = []
        for nzo_id in nzo_ids:
            folder, _id = os.path.split(nzo_id)
            path = get_admin_path(folder, future=False)

            # Try as normal job
            nzo = sabnzbd.load_data(_id, path, remove=False)
            if not nzo:
                # Try as future job
                path = get_admin_path(folder, future=True)
                nzo = sabnzbd.load_data(_id, path)
            if nzo:
                self.add(nzo, save=False, quiet=True)
                folders.append(folder)

        # Scan for any folders in "incomplete" that are not yet in the queue
        if repair:
            self.scan_jobs(not folders)
            # Handle any lost future jobs
            for item in globber_full(os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER)):
                path, nzo_id = os.path.split(item)
                if nzo_id not in self.__nzo_table:
                    if nzo_id.startswith('SABnzbd_nzo'):
                        nzo = sabnzbd.load_data(nzo_id, path, remove=True)
                        if nzo:
                            self.add(nzo, save=True)
                    else:
                        try:
                            remove_file(item)
                        except:
                            pass
Beispiel #21
0
def pid_file(pid_path=None, pid_file=None, port=0):
    """ Create or remove pid file """
    global DIR_PID
    if not sabnzbd.WIN32:
        if pid_path and pid_path.startswith('/'):
            DIR_PID = os.path.join(pid_path, 'sabnzbd-%s.pid' % port)
        elif pid_file and pid_file.startswith('/'):
            DIR_PID = pid_file

    if DIR_PID:
        try:
            if port:
                f = open(DIR_PID, 'w')
                f.write('%d\n' % os.getpid())
                f.close()
            else:
                misc.remove_file(DIR_PID)
        except:
            logging.warning('Cannot access PID file %s', DIR_PID)
Beispiel #22
0
def pid_file(pid_path=None, pid_file=None, port=0):
    """ Create or remove pid file """
    global DIR_PID
    if not sabnzbd.WIN32:
        if pid_path and pid_path.startswith('/'):
            DIR_PID = os.path.join(pid_path, 'sabnzbd-%s.pid' % port)
        elif pid_file and pid_file.startswith('/'):
            DIR_PID = pid_file

    if DIR_PID:
        try:
            if port:
                f = open(DIR_PID, 'w')
                f.write('%d\n' % os.getpid())
                f.close()
            else:
                misc.remove_file(DIR_PID)
        except:
            logging.warning('Cannot access PID file %s', DIR_PID)
Beispiel #23
0
    def check_compatibility(self, repair, data):
        """ Do compatibility checks on the loaded data """
        nzo_ids = []
        if not data:
            # Warn about old queue
            if sabnzbd.OLD_QUEUE and cfg.warned_old_queue() < QUEUE_VERSION:
                logging.warning(T('Old queue detected, use Status->Repair to convert the queue'))
                cfg.warned_old_queue.set(QUEUE_VERSION)
                sabnzbd.config.save_config()
        else:
            # Try to process
            try:
                queue_vers, nzo_ids, dummy = data
                if not queue_vers == QUEUE_VERSION:
                    nzo_ids = []
                    logging.error(T('Incompatible queuefile found, cannot proceed'))
                    if not repair:
                        panic_queue(os.path.join(cfg.admin_dir.get_path(), QUEUE_FILE_NAME))
                        exit_sab(2)
            except ValueError:
                nzo_ids = []
                logging.error(T('Error loading %s, corrupt file detected'),
                              os.path.join(cfg.admin_dir.get_path(), QUEUE_FILE_NAME))

        # We need to do a repair in case of old-style pickles
        if not cfg.converted_nzo_pickles():
            for nzo_id in nzo_ids:
                folder, _id = os.path.split(nzo_id)
                path = get_admin_path(folder, future=False)
                # This will update them but preserve queue-order
                if os.path.exists(os.path.join(path, _id)):
                    self.repair_job(os.path.dirname(path))
                continue

            # Remove any future-jobs, we can't save those
            for item in globber_full(os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER)):
                remove_file(item)

            # Done converting
            cfg.converted_nzo_pickles.set(True)
            sabnzbd.config.save_config()
            nzo_ids = []
        return nzo_ids
Beispiel #24
0
 def execute(self, command, args=(), save=False):
     """ Wrapper for executing SQL commands """
     for tries in xrange(5, 0, -1):
         try:
             if args and isinstance(args, tuple):
                 self.c.execute(command, args)
             else:
                 self.c.execute(command)
             if save:
                 self.con.commit()
             return True
         except:
             error = str(sys.exc_value)
             if tries >= 0 and 'is locked' in error:
                 logging.debug('Database locked, wait and retry')
                 time.sleep(0.5)
                 continue
             elif 'readonly' in error:
                 logging.error(T('Cannot write to History database, check access rights!'))
                 # Report back success, because there's no recovery possible
                 return True
             elif 'not a database' in error or 'malformed' in error or 'duplicate column name' in error:
                 logging.error(T('Damaged History database, created empty replacement'))
                 logging.info("Traceback: ", exc_info=True)
                 self.close()
                 try:
                     remove_file(HistoryDB.db_path)
                 except:
                     pass
                 self.connect()
                 # Return False in case of "duplicate column" error
                 # because the column addition in connect() must be terminated
                 return 'duplicate column name' not in error
             else:
                 logging.error(T('SQL Command Failed, see log'))
                 logging.info("SQL: %s", command)
                 logging.info("Arguments: %s", repr(args))
                 logging.info("Traceback: ", exc_info=True)
                 try:
                     self.con.rollback()
                 except:
                     logging.debug("Rollback Failed:", exc_info=True)
         return False
Beispiel #25
0
def remove_samples(path):
    """ Remove all files that match the sample pattern
        Skip deleting if it matches all files or there is only 1 file
    """
    files_to_delete = []
    nr_files = 0
    for root, _dirs, files in os.walk(path):
        for file_to_match in files:
            nr_files += 1
            if RE_SAMPLE.search(file_to_match):
                files_to_delete.append(os.path.join(root, file_to_match))

    # Make sure we skip false-positives
    if 1 < len(files_to_delete) < nr_files:
        for path in files_to_delete:
            try:
                logging.info("Removing unwanted sample file %s", path)
                remove_file(path)
            except:
                logging.error(T('Removing %s failed'), clip_path(path))
                logging.info("Traceback: ", exc_info=True)
    else:
        logging.info("Skipping sample-removal, false-positive")
Beispiel #26
0
def ProcessSingleFile(filename,
                      path,
                      pp=None,
                      script=None,
                      cat=None,
                      catdir=None,
                      keep=False,
                      priority=None,
                      nzbname=None,
                      reuse=False,
                      nzo_info=None,
                      dup_check=True,
                      url='',
                      password=None,
                      nzo_id=None):
    """ Analyze file and create a job from it
        Supports NZB, NZB.BZ2, NZB.GZ and GZ.NZB-in-disguise
        returns (status, nzo_ids)
            status: -2==Error/retry, -1==Error, 0==OK, 1==OK-but-ignorecannot-delete
    """
    nzo_ids = []
    if catdir is None:
        catdir = cat

    try:
        f = open(path, 'rb')
        b1 = f.read(1)
        b2 = f.read(1)
        f.close()

        if b1 == '\x1f' and b2 == '\x8b':
            # gzip file or gzip in disguise
            name = filename.replace('.nzb.gz', '.nzb')
            f = gzip.GzipFile(path, 'rb')
        elif b1 == 'B' and b2 == 'Z':
            # bz2 file or bz2 in disguise
            name = filename.replace('.nzb.bz2', '.nzb')
            f = bz2.BZ2File(path, 'rb')
        else:
            name = filename
            f = open(path, 'rb')
        data = f.read()
        f.close()
    except:
        logging.warning(T('Cannot read %s'), misc.clip_path(path))
        logging.info("Traceback: ", exc_info=True)
        return -2, nzo_ids

    if name:
        name, cat = name_to_cat(name, catdir)
        # The name is used as the name of the folder, so sanitize it using folder specific santization
        if not nzbname:
            # Prevent embedded password from being damaged by sanitize and trimming
            nzbname = os.path.split(name)[1]

    try:
        nzo = nzbstuff.NzbObject(name,
                                 pp,
                                 script,
                                 data,
                                 cat=cat,
                                 priority=priority,
                                 nzbname=nzbname,
                                 nzo_info=nzo_info,
                                 url=url,
                                 reuse=reuse,
                                 dup_check=dup_check)
        if not nzo.password:
            nzo.password = password
    except TypeError:
        # Duplicate, ignore
        if nzo_id:
            sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False)
        nzo = None
    except ValueError:
        # Empty, but correct file
        return -1, nzo_ids
    except:
        if data.find("<nzb") >= 0 > data.find("</nzb"):
            # Looks like an incomplete file, retry
            return -2, nzo_ids
        else:
            # Something else is wrong, show error
            logging.error(T('Error while adding %s, removing'),
                          name,
                          exc_info=True)
            return -1, nzo_ids

    if nzo:
        if nzo_id:
            # Re-use existing nzo_id, when a "future" job gets it payload
            sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False)
            nzo.nzo_id = nzo_id
        nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo, quiet=reuse))
        nzo.update_rating()
    try:
        if not keep:
            misc.remove_file(path)
    except:
        logging.error(T('Error removing %s'), misc.clip_path(path))
        logging.info("Traceback: ", exc_info=True)
        return 1, nzo_ids

    return 0, nzo_ids
Beispiel #27
0
def ProcessArchiveFile(filename,
                       path,
                       pp=None,
                       script=None,
                       cat=None,
                       catdir=None,
                       keep=False,
                       priority=None,
                       url='',
                       nzbname=None,
                       password=None,
                       nzo_id=None):
    """ Analyse ZIP file and create job(s).
        Accepts ZIP files with ONLY nzb/nfo/folder files in it.
        returns (status, nzo_ids)
            status: -1==Error/Retry, 0==OK, 1==Ignore
    """
    nzo_ids = []
    if catdir is None:
        catdir = cat

    filename, cat = name_to_cat(filename, catdir)

    status, zf, extension = is_archive(path)

    if status != 0:
        return status, []

    status = 1
    names = zf.namelist()
    nzbcount = 0
    for name in names:
        name = name.lower()
        if name.endswith('.nzb'):
            status = 0
            nzbcount += 1

    if status == 0:
        if nzbcount != 1:
            nzbname = None
        for name in names:
            if name.lower().endswith('.nzb'):
                try:
                    data = zf.read(name)
                except:
                    logging.error(T('Cannot read %s'), name, exc_info=True)
                    zf.close()
                    return -1, []
                name = os.path.basename(name)
                if data:
                    nzo = None
                    try:
                        nzo = nzbstuff.NzbObject(name,
                                                 pp,
                                                 script,
                                                 data,
                                                 cat=cat,
                                                 url=url,
                                                 priority=priority,
                                                 nzbname=nzbname)
                        if not nzo.password:
                            nzo.password = password
                    except (TypeError, ValueError):
                        # Duplicate or empty, ignore
                        pass
                    except:
                        # Something else is wrong, show error
                        logging.error(T('Error while adding %s, removing'),
                                      name,
                                      exc_info=True)

                    if nzo:
                        if nzo_id:
                            # Re-use existing nzo_id, when a "future" job gets it payload
                            sabnzbd.nzbqueue.NzbQueue.do.remove(
                                nzo_id, add_to_history=False)
                            nzo.nzo_id = nzo_id
                            nzo_id = None
                        nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo))
                        nzo.update_rating()
        zf.close()
        try:
            if not keep:
                misc.remove_file(path)
        except:
            logging.error(T('Error removing %s'), misc.clip_path(path))
            logging.info("Traceback: ", exc_info=True)
            status = 1
    else:
        zf.close()
        status = 1

    return status, nzo_ids
Beispiel #28
0
def ProcessSingleFile(filename, path, pp=None, script=None, cat=None, catdir=None, keep=False,
                      priority=None, nzbname=None, reuse=False, nzo_info=None, dup_check=True, url='',
                      password=None, nzo_id=None):
    """ Analyze file and create a job from it
        Supports NZB, NZB.BZ2, NZB.GZ and GZ.NZB-in-disguise
        returns (status, nzo_ids)
            status: -2==Error/retry, -1==Error, 0==OK, 1==OK-but-ignorecannot-delete
    """
    nzo_ids = []
    if catdir is None:
        catdir = cat

    try:
        f = open(path, 'rb')
        b1 = f.read(1)
        b2 = f.read(1)
        f.close()

        if b1 == '\x1f' and b2 == '\x8b':
            # gzip file or gzip in disguise
            name = filename.replace('.nzb.gz', '.nzb')
            f = gzip.GzipFile(path, 'rb')
        elif b1 == 'B' and b2 == 'Z':
            # bz2 file or bz2 in disguise
            name = filename.replace('.nzb.bz2', '.nzb')
            f = bz2.BZ2File(path, 'rb')
        else:
            name = filename
            f = open(path, 'rb')
        data = f.read()
        f.close()
    except:
        logging.warning(T('Cannot read %s'), misc.clip_path(path))
        logging.info("Traceback: ", exc_info=True)
        return -2, nzo_ids

    if name:
        name, cat = name_to_cat(name, catdir)
        # The name is used as the name of the folder, so sanitize it using folder specific santization
        if not nzbname:
            # Prevent embedded password from being damaged by sanitize and trimming
            nzbname = os.path.split(name)[1]

    try:
        nzo = nzbstuff.NzbObject(name, pp, script, data, cat=cat, priority=priority, nzbname=nzbname,
                                 nzo_info=nzo_info, url=url, reuse=reuse, dup_check=dup_check)
        if not nzo.password:
            nzo.password = password
    except TypeError:
        # Duplicate, ignore
        if nzo_id:
            sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False)
        nzo = None
    except ValueError:
        # Empty, but correct file
        return -1, nzo_ids
    except:
        if data.find("<nzb") >= 0 > data.find("</nzb"):
            # Looks like an incomplete file, retry
            return -2, nzo_ids
        else:
            # Something else is wrong, show error
            logging.error(T('Error while adding %s, removing'), name, exc_info=True)
            return -1, nzo_ids

    if nzo:
        if nzo_id:
            # Re-use existing nzo_id, when a "future" job gets it payload
            sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False)
            nzo.nzo_id = nzo_id
        nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo, quiet=reuse))
        nzo.update_rating()
    try:
        if not keep:
            misc.remove_file(path)
    except:
        logging.error(T('Error removing %s'), misc.clip_path(path))
        logging.info("Traceback: ", exc_info=True)
        return 1, nzo_ids

    return 0, nzo_ids
Beispiel #29
0
def ProcessArchiveFile(filename, path, pp=None, script=None, cat=None, catdir=None, keep=False,
                       priority=None, url='', nzbname=None, password=None, nzo_id=None):
    """ Analyse ZIP file and create job(s).
        Accepts ZIP files with ONLY nzb/nfo/folder files in it.
        returns (status, nzo_ids)
            status: -1==Error/Retry, 0==OK, 1==Ignore
    """
    nzo_ids = []
    if catdir is None:
        catdir = cat

    filename, cat = name_to_cat(filename, catdir)

    status, zf, extension = is_archive(path)

    if status != 0:
        return status, []

    status = 1
    names = zf.namelist()
    nzbcount = 0
    for name in names:
        name = name.lower()
        if name.endswith('.nzb'):
            status = 0
            nzbcount += 1

    if status == 0:
        if nzbcount != 1:
            nzbname = None
        for name in names:
            if name.lower().endswith('.nzb'):
                try:
                    data = zf.read(name)
                except:
                    logging.error(T('Cannot read %s'), name, exc_info=True)
                    zf.close()
                    return -1, []
                name = os.path.basename(name)
                if data:
                    nzo = None
                    try:
                        nzo = nzbstuff.NzbObject(name, pp, script, data, cat=cat, url=url,
                                                 priority=priority, nzbname=nzbname)
                        if not nzo.password:
                            nzo.password = password
                    except (TypeError, ValueError):
                        # Duplicate or empty, ignore
                        pass
                    except:
                        # Something else is wrong, show error
                        logging.error(T('Error while adding %s, removing'), name, exc_info=True)

                    if nzo:
                        if nzo_id:
                            # Re-use existing nzo_id, when a "future" job gets it payload
                            sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False)
                            nzo.nzo_id = nzo_id
                            nzo_id = None
                        nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo))
                        nzo.update_rating()
        zf.close()
        try:
            if not keep:
                misc.remove_file(path)
        except:
            logging.error(T('Error removing %s'), misc.clip_path(path))
            logging.info("Traceback: ", exc_info=True)
            status = 1
    else:
        zf.close()
        status = 1

    return status, nzo_ids
Beispiel #30
0
    def abort(self):
        """ Abort running instance and delete generated files """
        if not self.killed and self.cur_setname:
            logging.info('Aborting DirectUnpack for %s', self.cur_setname)
            self.killed = True

            # Save reference to the first rarfile
            rarfile_nzf = self.rarfile_nzf

            # Abort Unrar
            if self.active_instance:
                # First we try to abort gracefully
                try:
                    self.active_instance.stdin.write('Q\n')
                    time.sleep(0.2)
                except IOError:
                    pass

                # Now force kill and give it a bit of time
                try:
                    self.active_instance.kill()
                    time.sleep(0.2)
                except AttributeError:
                    # Already killed by the Quit command
                    pass

            # Wake up the thread
            with self.next_file_lock:
                self.next_file_lock.notify()

            # No new sets
            self.next_sets = []
            self.success_sets = {}

            # Remove files
            if self.unpack_dir_info:
                extraction_path, _, _, one_folder, _ = self.unpack_dir_info
                # In case of flat-unpack we need to remove the files manually
                if one_folder:
                    # RarFile can fail for mysterious reasons
                    try:
                        rar_contents = RarFile(os.path.join(
                            self.nzo.downpath, rarfile_nzf.filename),
                                               all_names=True).filelist()
                        for rm_file in rar_contents:
                            # Flat-unpack, so remove foldername from RarFile output
                            f = os.path.join(extraction_path,
                                             os.path.basename(rm_file))
                            remove_file(f)
                    except:
                        # The user will have to remove it themselves
                        logging.info(
                            'Failed to clean Direct Unpack after aborting %s',
                            rarfile_nzf.filename,
                            exc_info=True)
                else:
                    # We can just remove the whole path
                    remove_all(extraction_path, recursive=True)
                # Remove dir-info
                self.unpack_dir_info = None

            # Reset settings
            self.reset_active()
Beispiel #31
0
    def abort(self):
        """ Abort running instance and delete generated files """
        if not self.killed and self.cur_setname:
            logging.info('Aborting DirectUnpack for %s', self.cur_setname)
            self.killed = True

            # Save reference to the first rarfile
            rarfile_nzf = self.rarfile_nzf

            # Abort Unrar
            if self.active_instance:
                # First we try to abort gracefully
                try:
                    self.active_instance.stdin.write('Q\n')
                    time.sleep(0.2)
                except IOError:
                    pass

                # Now force kill and give it a bit of time
                self.active_instance.kill()
                time.sleep(0.2)

                # Have to collect the return-code to avoid zombie
                # But it will block forever if the process is in special state.
                # That should never happen, but it can happen on broken unrar's
                if self.active_instance.poll():
                    self.active_instance.communicate()
                else:
                    # It is still running?!? This should never happen
                    # Wait a little bit longer just to be sure..
                    time.sleep(2.0)
                    if not self.active_instance.poll():
                        logging.warning(T('Unable to stop the unrar process.'))

            # Wake up the thread
            with self.next_file_lock:
                self.next_file_lock.notify()

            # No new sets
            self.next_sets = []
            self.success_sets = {}

            # Remove files
            if self.unpack_dir_info:
                extraction_path, _, _, one_folder, _ = self.unpack_dir_info
                # In case of flat-unpack we need to remove the files manually
                if one_folder:
                    # RarFile can fail for mysterious reasons
                    try:
                        rar_contents = RarFile(os.path.join(
                            self.nzo.downpath, rarfile_nzf.filename),
                                               all_names=True).filelist()
                        for rm_file in rar_contents:
                            # Flat-unpack, so remove foldername from RarFile output
                            f = os.path.join(extraction_path,
                                             os.path.basename(rm_file))
                            remove_file(f)
                    except:
                        # The user will have to remove it themselves
                        logging.info(
                            'Failed to clean Direct Unpack after aborting %s',
                            rarfile_nzf.filename,
                            exc_info=True)
                else:
                    # We can just remove the whole path
                    remove_all(extraction_path, recursive=True)
                # Remove dir-info
                self.unpack_dir_info = None

            # Reset settings
            self.reset_active()