Пример #1
0
    def run(self):
        import sabnzbd.nzbqueue
        while 1:
            job = self.queue.get()
            if not job:
                logging.info("Shutting down")
                break

            nzo, nzf = job

            if nzf:
                sabnzbd.CheckFreeSpace()
                filename = sanitize_filename(nzf.filename)
                nzf.filename = filename

                dupe = nzo.check_for_dupe(nzf)

                filepath = get_filepath(cfg.download_dir.get_path(), nzo, filename)

                if filepath:
                    logging.info('Decoding %s %s', filepath, nzf.type)
                    try:
                        filepath = _assemble(nzf, filepath, dupe)
                    except IOError, (errno, strerror):
                        # 28 == disk full => pause downloader
                        if errno == 28:
                            logging.error(Ta('Disk full! Forcing Pause'))
                        else:
                            logging.error(Ta('Disk error on creating file %s'), latin1(filepath))
                        # Pause without saving
                        sabnzbd.downloader.Downloader.do.pause(save=False)
                    except:
                        logging.error('Fatal error in Assembler', exc_info = True)
                        break
Пример #2
0
    def run(self):
        while 1:
            job = self.queue.get()
            if not job:
                logging.info("Shutting down")
                break

            nzo, nzf = job

            if nzf:
                # Check if enough disk space is free, if not pause downloader and send email
                if diskspace(force=True)['download_dir'][1] < (
                        cfg.download_free.get_float() + nzf.bytes) / GIGI:
                    # Only warn and email once
                    if not sabnzbd.downloader.Downloader.do.paused:
                        logging.warning(
                            T('Too little diskspace forcing PAUSE'))
                        # Pause downloader, but don't save, since the disk is almost full!
                        sabnzbd.downloader.Downloader.do.pause()
                        sabnzbd.emailer.diskfull()
                        # Abort all direct unpackers, just to be sure
                        sabnzbd.directunpacker.abort_all()

                    # Place job back in queue and wait 30 seconds to hope it gets resolved
                    self.process(job)
                    sleep(30)
                    continue

                # Prepare filename
                nzo.verify_nzf_filename(nzf)
                nzf.filename = sanitize_filename(nzf.filename)
                filepath = get_filepath(long_path(cfg.download_dir.get_path()),
                                        nzo, nzf.filename)
                nzf.filename = get_filename(filepath)

                if filepath:
                    logging.info('Decoding %s %s', filepath, nzf.type)
                    try:
                        filepath = self.assemble(nzf, filepath)
                    except IOError, (errno, strerror):
                        # If job was deleted or in active post-processing, ignore error
                        if not nzo.deleted and not nzo.is_gone(
                        ) and not nzo.pp_active:
                            # 28 == disk full => pause downloader
                            if errno == 28:
                                logging.error(T('Disk full! Forcing Pause'))
                            else:
                                logging.error(
                                    T('Disk error on creating file %s'),
                                    clip_path(filepath))
                            # Log traceback
                            logging.info('Traceback: ', exc_info=True)
                            # Pause without saving
                            sabnzbd.downloader.Downloader.do.pause()
                        continue
                    except:
                        logging.error(T('Fatal error in Assembler'),
                                      exc_info=True)
                        break
Пример #3
0
    def run(self):
        while 1:
            job = self.queue.get()
            if not job:
                logging.info("Shutting down")
                break

            nzo, nzf = job

            if nzf:
                # Check if enough disk space is free, if not pause downloader and send email
                if diskspace(force=True)['download_dir'][1] < (cfg.download_free.get_float() + nzf.bytes) / GIGI:
                    # Only warn and email once
                    if not sabnzbd.downloader.Downloader.do.paused:
                        logging.warning(T('Too little diskspace forcing PAUSE'))
                        # Pause downloader, but don't save, since the disk is almost full!
                        sabnzbd.downloader.Downloader.do.pause()
                        sabnzbd.emailer.diskfull()
                        # Abort all direct unpackers, just to be sure
                        sabnzbd.directunpacker.abort_all()

                    # Place job back in queue and wait 30 seconds to hope it gets resolved
                    self.process(job)
                    sleep(30)
                    continue

                # Prepare filename
                nzo.verify_nzf_filename(nzf)
                nzf.filename = sanitize_filename(nzf.filename)
                filepath = get_filepath(long_path(cfg.download_dir.get_path()), nzo, nzf.filename)
                nzf.filename = get_filename(filepath)

                if filepath:
                    logging.info('Decoding %s %s', filepath, nzf.type)
                    try:
                        filepath = self.assemble(nzf, filepath)
                    except IOError, (errno, strerror):
                        # If job was deleted or in active post-processing, ignore error
                        if not nzo.deleted and not nzo.is_gone() and not nzo.pp_active:
                            # 28 == disk full => pause downloader
                            if errno == 28:
                                logging.error(T('Disk full! Forcing Pause'))
                            else:
                                logging.error(T('Disk error on creating file %s'), clip_path(filepath))
                            # Log traceback
                            logging.info('Traceback: ', exc_info=True)
                            # Pause without saving
                            sabnzbd.downloader.Downloader.do.pause()
                        continue
                    except:
                        logging.error(T('Fatal error in Assembler'), exc_info=True)
                        break
Пример #4
0
    def run(self):
        import sabnzbd.nzbqueue
        while 1:
            job = self.queue.get()
            if not job:
                logging.info("Shutting down")
                break

            nzo, nzf = job

            if nzf:
                sabnzbd.CheckFreeSpace()
                filename = sanitize_filename(nzf.filename)
                nzf.filename = filename

                dupe = nzo.check_for_dupe(nzf)

                filepath = get_filepath(cfg.download_dir.get_path(), nzo,
                                        filename)

                if filepath:
                    logging.info('Decoding %s %s', filepath, nzf.type)
                    try:
                        filepath = _assemble(nzf, filepath, dupe)
                    except IOError, (errno, strerror):
                        if nzo.deleted:
                            # Job was deleted, ignore error
                            pass
                        else:
                            # 28 == disk full => pause downloader
                            if errno == 28:
                                logging.error(Ta('Disk full! Forcing Pause'))
                            else:
                                logging.error(
                                    Ta('Disk error on creating file %s'),
                                    latin1(filepath))
                            # Pause without saving
                            sabnzbd.downloader.Downloader.do.pause(save=False)
                    except:
                        logging.error('Fatal error in Assembler',
                                      exc_info=True)
                        break
Пример #5
0
    def run(self):
        while 1:
            job = self.queue.get()
            if not job:
                logging.info("Shutting down")
                break

            nzo, nzf = job

            if nzf:
                sabnzbd.CheckFreeSpace()
                # We allow win_devices because otherwise par2cmdline fails to repair
                filename = sanitize_filename(nzf.filename,
                                             allow_win_devices=True)
                nzf.filename = filename

                dupe = nzo.check_for_dupe(nzf)

                filepath = get_filepath(long_path(cfg.download_dir.get_path()),
                                        nzo, filename)

                if filepath:
                    logging.info('Decoding %s %s', filepath, nzf.type)
                    try:
                        filepath = _assemble(nzf, filepath, dupe)
                    except IOError, (errno, strerror):
                        # If job was deleted, ignore error
                        if not nzo.is_gone():
                            # 28 == disk full => pause downloader
                            if errno == 28:
                                logging.error(T('Disk full! Forcing Pause'))
                            else:
                                logging.error(
                                    T('Disk error on creating file %s'),
                                    clip_path(filepath))
                            # Pause without saving
                            sabnzbd.downloader.Downloader.do.pause(save=False)
                        continue
                    except:
                        logging.error(T('Fatal error in Assembler'),
                                      exc_info=True)
                        break
Пример #6
0
    def run(self):
        while 1:
            job = self.queue.get()
            if not job:
                logging.info("Shutting down")
                break

            nzo, nzf = job

            if nzf:
                sabnzbd.CheckFreeSpace()
                # We allow win_devices because otherwise par2cmdline fails to repair
                filename = sanitize_filename(nzf.filename, allow_win_devices=True)
                nzf.filename = filename

                dupe = nzo.check_for_dupe(nzf)

                filepath = get_filepath(long_path(cfg.download_dir.get_path()), nzo, filename)

                if filepath:
                    logging.info('Decoding %s %s', filepath, nzf.type)
                    try:
                        filepath = _assemble(nzf, filepath, dupe)
                    except IOError, (errno, strerror):
                        # If job was deleted, ignore error
                        if not nzo.is_gone():
                            # 28 == disk full => pause downloader
                            if errno == 28:
                                logging.error(T('Disk full! Forcing Pause'))
                            else:
                                logging.error(T('Disk error on creating file %s'), clip_path(filepath))
                            # Pause without saving
                            sabnzbd.downloader.Downloader.do.pause(save=False)
                        continue
                    except:
                        logging.error(T('Fatal error in Assembler'), exc_info=True)
                        break
Пример #7
0
    def run(self):
        logging.info('URLGrabber starting up')
        self.shutdown = False

        while not self.shutdown:
            (url, future_nzo) = self.queue.get()

            if not url:
                # stop signal, go test self.shutdown
                continue

            if future_nzo:
                # Re-queue when too early and still active
                if future_nzo.wait and future_nzo.wait > time.time():
                    self.add(url, future_nzo)
                    time.sleep(1.0)
                    continue
                # Paused
                if future_nzo.status == Status.PAUSED:
                    self.add(url, future_nzo)
                    time.sleep(1.0)
                    continue

            url = url.replace(' ', '')

            try:
                if future_nzo:
                    # If nzo entry deleted, give up
                    try:
                        deleted = future_nzo.deleted
                    except AttributeError:
                        deleted = True
                    if deleted:
                        logging.debug('Dropping URL %s, job entry missing', url)
                        continue

                filename = None
                category = None
                gzipped = False
                nzo_info = {}
                wait = 0
                retry = True
                fn = None

                logging.info('Grabbing URL %s', url)
                try:
                    fn = _build_request(url)
                except Exception, e:
                    # Cannot list exceptions here, because of unpredictability over platforms
                    error0 = str(sys.exc_info()[0]).lower()
                    error1 = str(sys.exc_info()[1]).lower()
                    logging.debug('Error "%s" trying to get the url %s', error1, url)
                    if 'certificate_verify_failed' in error1 or 'certificateerror' in error0:
                        msg = T('Server %s uses an untrusted HTTPS certificate') % ''
                        msg += ' - https://sabnzbd.org/certificate-errors'
                        retry = False
                    elif 'nodename nor servname provided' in error1:
                        msg = T('Server name does not resolve')
                        retry = False
                    elif '401' in error1 or 'unauthorized' in error1:
                        msg = T('Unauthorized access')
                        retry = False
                    elif '404' in error1:
                        msg = T('File not on server')
                        retry = False
                    elif hasattr(e, 'headers') and 'retry-after' in e.headers:
                        # Catch if the server send retry (e.headers is case-INsensitive)
                        wait = misc.int_conv(e.headers['retry-after'])

                new_url = dereferring(url, fn)
                if new_url:
                    self.add(new_url, future_nzo)
                    continue

                if fn:
                    for hdr in fn.headers:
                        try:
                            item = hdr.lower()
                            value = fn.headers[hdr]
                        except:
                            continue
                        if item in ('content-encoding',) and value == 'gzip':
                            gzipped = True
                        if item in ('category_id', 'x-dnzb-category'):
                            category = value
                        elif item in ('x-dnzb-moreinfo',):
                            nzo_info['more_info'] = value
                        elif item in ('x-dnzb-name',):
                            filename = value
                            if not filename.endswith('.nzb'):
                                filename += '.nzb'
                        elif item == 'x-dnzb-propername':
                            nzo_info['propername'] = value
                        elif item == 'x-dnzb-episodename':
                            nzo_info['episodename'] = value
                        elif item == 'x-dnzb-year':
                            nzo_info['year'] = value
                        elif item == 'x-dnzb-failure':
                            nzo_info['failure'] = value
                        elif item == 'x-dnzb-details':
                            nzo_info['details'] = value
                        elif item == 'x-dnzb-password':
                            nzo_info['password'] = value
                        elif item == 'retry-after':
                            wait = misc.int_conv(value)

                        # Rating fields
                        if item in _RARTING_FIELDS:
                            nzo_info[item] = value

                        if not filename and "filename=" in value:
                            filename = value[value.index("filename=") + 9:].strip(';').strip('"')

                if wait:
                    # For sites that have a rate-limiting attribute
                    msg = ''
                    retry = True
                    fn = None
                elif retry:
                    fn, msg, retry, wait, data = _analyse(fn, url)

                if not fn:
                    if retry:
                        logging.info('Retry URL %s', url)
                        self.add(url, future_nzo, wait)
                    else:
                        bad_fetch(future_nzo, url, msg)
                    continue

                if not filename:
                    filename = os.path.basename(url)
                elif '&nzbname=' in filename:
                    # Sometimes the filename contains the full URL, duh!
                    filename = filename[filename.find('&nzbname=') + 9:]

                pp = future_nzo.pp
                script = future_nzo.script
                cat = future_nzo.cat
                if (cat is None or cat == '*') and category:
                    cat = misc.cat_convert(category)
                priority = future_nzo.priority
                nzbname = future_nzo.custom_name

                # process data
                if gzipped:
                    filename += '.gz'
                if not data:
                    try:
                        data = fn.read()
                    except (IncompleteRead, IOError):
                        bad_fetch(future_nzo, url, T('Server could not complete request'))
                        fn.close()
                        continue
                fn.close()

                if '<nzb' in data and misc.get_ext(filename) != '.nzb':
                    filename += '.nzb'

                # Sanitize filename first (also removing forbidden Windows-names)
                filename = misc.sanitize_filename(filename)

                # Write data to temp file
                path = os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER)
                path = os.path.join(path, filename)
                f = open(path, 'wb')
                f.write(data)
                f.close()
                del data

                # Check if nzb file
                if misc.get_ext(filename) in ('.nzb', '.gz', 'bz2'):
                    res = dirscanner.ProcessSingleFile(filename, path, pp=pp, script=script, cat=cat, priority=priority,
                                                       nzbname=nzbname, nzo_info=nzo_info, url=future_nzo.url, keep=False,
                                                       nzo_id=future_nzo.nzo_id)[0]
                    if res:
                        if res == -2:
                            logging.info('Incomplete NZB, retry after 5 min %s', url)
                            when = 300
                        elif res == -1:
                            # Error, but no reason to retry. Warning is already given
                            NzbQueue.do.remove(future_nzo.nzo_id, add_to_history=False)
                            continue
                        else:
                            logging.info('Unknown error fetching NZB, retry after 2 min %s', url)
                            when = 120
                        self.add(url, future_nzo, when)
                # Check if a supported archive
                else:
                    status, zf, exp_ext = dirscanner.is_archive(path)
                    if status == 0:
                        if misc.get_ext(filename) not in ('.rar', '.zip', '.7z'):
                            filename = filename + exp_ext
                            os.rename(path, path + exp_ext)
                            path = path + exp_ext

                        dirscanner.ProcessArchiveFile(filename, path, pp, script, cat, priority=priority,
                                                     nzbname=nzbname, url=future_nzo.url, keep=False,
                                                     nzo_id=future_nzo.nzo_id)
                        # Not a supported filetype, not an nzb (text/html ect)
                        try:
                            os.remove(fn)
                        except:
                            pass
                        logging.info('Unknown filetype when fetching NZB, retry after 30s %s', url)
                        self.add(url, future_nzo, 30)
            except:
Пример #8
0
    def run(self):
        logging.info('URLGrabber starting up')
        self.shutdown = False

        while not self.shutdown:
            (url, future_nzo) = self.queue.get()

            if not url:
                # stop signal, go test self.shutdown
                continue

            if future_nzo:
                # Re-queue when too early and still active
                if future_nzo.url_wait and future_nzo.url_wait > time.time():
                    self.add(url, future_nzo)
                    time.sleep(1.0)
                    continue
                # Paused
                if future_nzo.status == Status.PAUSED:
                    self.add(url, future_nzo)
                    time.sleep(1.0)
                    continue

            url = url.replace(' ', '')

            try:
                if future_nzo:
                    # If nzo entry deleted, give up
                    try:
                        deleted = future_nzo.deleted
                    except AttributeError:
                        deleted = True
                    if deleted:
                        logging.debug('Dropping URL %s, job entry missing', url)
                        continue

                filename = None
                category = None
                gzipped = False
                nzo_info = {}
                wait = 0
                retry = True
                fetch_request = None

                logging.info('Grabbing URL %s', url)
                try:
                    fetch_request = _build_request(url)
                except Exception, e:
                    # Cannot list exceptions here, because of unpredictability over platforms
                    error0 = str(sys.exc_info()[0]).lower()
                    error1 = str(sys.exc_info()[1]).lower()
                    logging.debug('Error "%s" trying to get the url %s', error1, url)
                    if 'certificate_verify_failed' in error1 or 'certificateerror' in error0:
                        msg = T('Server %s uses an untrusted HTTPS certificate') % ''
                        msg += ' - https://sabnzbd.org/certificate-errors'
                        retry = False
                    elif 'nodename nor servname provided' in error1:
                        msg = T('Server name does not resolve')
                        retry = False
                    elif '401' in error1 or 'unauthorized' in error1:
                        msg = T('Unauthorized access')
                        retry = False
                    elif '404' in error1:
                        msg = T('File not on server')
                        retry = False
                    elif hasattr(e, 'headers') and 'retry-after' in e.headers:
                        # Catch if the server send retry (e.headers is case-INsensitive)
                        wait = misc.int_conv(e.headers['retry-after'])

                # Check if dereference is used
                new_url = dereferring(url, fetch_request)
                if new_url:
                    self.add(new_url, future_nzo)
                    continue

                if fetch_request:
                    for hdr in fetch_request.headers:
                        try:
                            item = hdr.lower()
                            value = fetch_request.headers[hdr]
                        except:
                            continue
                        if item in ('content-encoding',) and value == 'gzip':
                            gzipped = True
                        if item in ('category_id', 'x-dnzb-category'):
                            category = value
                        elif item in ('x-dnzb-moreinfo',):
                            nzo_info['more_info'] = value
                        elif item in ('x-dnzb-name',):
                            filename = value
                            if not filename.endswith('.nzb'):
                                filename += '.nzb'
                        elif item == 'x-dnzb-propername':
                            nzo_info['propername'] = value
                        elif item == 'x-dnzb-episodename':
                            nzo_info['episodename'] = value
                        elif item == 'x-dnzb-year':
                            nzo_info['year'] = value
                        elif item == 'x-dnzb-failure':
                            nzo_info['failure'] = value
                        elif item == 'x-dnzb-details':
                            nzo_info['details'] = value
                        elif item == 'x-dnzb-password':
                            nzo_info['password'] = value
                        elif item == 'retry-after':
                            wait = misc.int_conv(value)

                        # Rating fields
                        if item in _RARTING_FIELDS:
                            nzo_info[item] = value

                        # Get filename from Content-Disposition header
                        if not filename and "filename=" in value:
                            filename = value[value.index("filename=") + 9:].strip(';').strip('"')

                if wait:
                    # For sites that have a rate-limiting attribute
                    msg = ''
                    retry = True
                    fetch_request = None
                elif retry:
                    fetch_request, msg, retry, wait, data = _analyse(fetch_request, future_nzo)

                if not fetch_request:
                    if retry:
                        logging.info('Retry URL %s', url)
                        self.add(url, future_nzo, wait)
                    else:
                        self.fail_to_history(future_nzo, url, msg)
                    continue

                if not filename:
                    filename = os.path.basename(urllib2.unquote(url))

                    # URL was redirected, maybe the redirect has better filename?
                    # Check if the original URL has extension
                    if url != fetch_request.url and misc.get_ext(filename) not in VALID_NZB_FILES:
                        filename = os.path.basename(urllib2.unquote(fetch_request.url))
                elif '&nzbname=' in filename:
                    # Sometimes the filename contains the full URL, duh!
                    filename = filename[filename.find('&nzbname=') + 9:]

                pp = future_nzo.pp
                script = future_nzo.script
                cat = future_nzo.cat
                if (cat is None or cat == '*') and category:
                    cat = misc.cat_convert(category)
                priority = future_nzo.priority
                nzbname = future_nzo.custom_name

                # process data
                if gzipped:
                    filename += '.gz'
                if not data:
                    try:
                        data = fetch_request.read()
                    except (IncompleteRead, IOError):
                        self.fail_to_history(future_nzo, url, T('Server could not complete request'))
                        fetch_request.close()
                        continue
                fetch_request.close()

                if '<nzb' in data and misc.get_ext(filename) != '.nzb':
                    filename += '.nzb'

                # Sanitize filename first (also removing forbidden Windows-names)
                filename = misc.sanitize_filename(filename)

                # Write data to temp file
                path = os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER)
                path = os.path.join(path, filename)
                f = open(path, 'wb')
                f.write(data)
                f.close()
                del data

                # Check if nzb file
                if misc.get_ext(filename) in VALID_NZB_FILES:
                    res = dirscanner.ProcessSingleFile(filename, path, pp=pp, script=script, cat=cat, priority=priority,
                                                       nzbname=nzbname, nzo_info=nzo_info, url=future_nzo.url, keep=False,
                                                       nzo_id=future_nzo.nzo_id)[0]
                    if res:
                        if res == -2:
                            logging.info('Incomplete NZB, retry after 5 min %s', url)
                            when = 300
                        elif res == -1:
                            # Error, but no reason to retry. Warning is already given
                            NzbQueue.do.remove(future_nzo.nzo_id, add_to_history=False)
                            continue
                        else:
                            logging.info('Unknown error fetching NZB, retry after 2 min %s', url)
                            when = 120
                        self.add(url, future_nzo, when)

                else:
                    # Check if a supported archive
                    status, zf, exp_ext = dirscanner.is_archive(path)
                    if status == 0:
                        if misc.get_ext(filename) not in ('.rar', '.zip', '.7z'):
                            filename = filename + exp_ext
                            os.rename(path, path + exp_ext)
                            path = path + exp_ext

                        dirscanner.ProcessArchiveFile(filename, path, pp, script, cat, priority=priority,
                                                     nzbname=nzbname, url=future_nzo.url, keep=False,
                                                     nzo_id=future_nzo.nzo_id)
                    else:
                        # Not a supported filetype, not an nzb (text/html ect)
                        try:
                            os.remove(fetch_request)
                        except:
                            pass
                        logging.info('Unknown filetype when fetching NZB, retry after 30s %s', url)
                        self.add(url, future_nzo, 30)
            except:
Пример #9
0
    def run(self):
        logging.info('URLGrabber starting up')
        self.shutdown = False

        while not self.shutdown:
            # Don't pound the website!
            time.sleep(5.0)

            (url, future_nzo) = self.queue.get()

            if not url:
                # stop signal, go test self.shutdown
                continue
            if future_nzo and future_nzo.wait and future_nzo.wait > time.time():
                # Re-queue when too early and still active

                self.add(url, future_nzo)
                continue
            url = url.replace(' ', '')

            try:
                if future_nzo:
                    # If nzo entry deleted, give up
                    try:
                        deleted = future_nzo.deleted
                    except AttributeError:
                        deleted = True
                    if deleted:
                        logging.debug('Dropping URL %s, job entry missing', url)
                        continue

                logging.info('Grabbing URL %s', url)
                req = urllib2.Request(url)
                req.add_header('User-Agent', 'SABnzbd+/%s' % sabnzbd.version.__version__)
                if not [True for item in _BAD_GZ_HOSTS if item in url]:
                    req.add_header('Accept-encoding', 'gzip')
                filename = None
                category = None
                gzipped = False
                nzo_info = {}
                wait = 0
                retry = True
                fn = None
                try:
                    fn = urllib2.urlopen(req)
                except:
                    # Cannot list exceptions here, because of unpredictability over platforms
                    error0 = str(sys.exc_info()[0]).lower()
                    error1 = str(sys.exc_info()[1]).lower()
                    logging.debug('Error "%s" trying to get the url %s', error1, url)
                    if 'certificate_verify_failed' in error1 or 'certificateerror' in error0:
                        msg = T('Server %s uses an untrusted HTTPS certificate') % ''
                        retry = False
                    elif 'nodename nor servname provided' in error1:
                        msg = T('Server name does not resolve')
                        retry = False
                    elif '401' in error1 or 'unauthorized' in error1:
                        msg = T('Unauthorized access')
                        retry = False
                    elif '404' in error1:
                        msg = T('File not on server')
                        retry = False

                new_url = dereferring(url, fn)
                if new_url:
                    self.add(new_url, future_nzo)
                    continue

                if fn:
                    for hdr in fn.headers:
                        try:
                            item = hdr.lower()
                            value = fn.headers[hdr]
                        except:
                            continue
                        if item in ('content-encoding',) and value == 'gzip':
                            gzipped = True
                        if item in ('category_id', 'x-dnzb-category'):
                            category = value
                        elif item in ('x-dnzb-moreinfo',):
                            nzo_info['more_info'] = value
                        elif item in ('x-dnzb-name',):
                            filename = value
                            if not filename.endswith('.nzb'):
                                filename += '.nzb'
                        elif item == 'x-dnzb-propername':
                            nzo_info['propername'] = value
                        elif item == 'x-dnzb-episodename':
                            nzo_info['episodename'] = value
                        elif item == 'x-dnzb-year':
                            nzo_info['year'] = value
                        elif item == 'x-dnzb-failure':
                            nzo_info['failure'] = value
                        elif item == 'x-dnzb-details':
                            nzo_info['details'] = value
                        elif item == 'retry-after':
                            # For NZBFinder
                            wait = misc.int_conv(value)

                        if not filename and "filename=" in value:
                            filename = value[value.index("filename=") + 9:].strip(';').strip('"')

                if wait:
                    # For sites that have a rate-limiting attribute
                    msg = ''
                    retry = True
                    fn = None
                elif retry:
                    fn, msg, retry, wait, data = _analyse(fn, url)

                if not fn:
                    if retry:
                        logging.info('Retry URL %s', url)
                        self.add(url, future_nzo, wait)
                    else:
                        bad_fetch(future_nzo, url, msg)
                    continue

                if not filename:
                    filename = os.path.basename(url) + '.nzb'
                elif '&nzbname=' in filename:
                    # Sometimes the filename contains the full URL, duh!
                    filename = filename[filename.find('&nzbname=') + 9:]

                pp = future_nzo.pp
                script = future_nzo.script
                cat = future_nzo.cat
                if (cat is None or cat == '*') and category:
                    cat = misc.cat_convert(category)
                priority = future_nzo.priority
                nzbname = future_nzo.custom_name

                # process data
                if gzipped:
                    filename = filename + '.gz'
                if not data:
                    data = fn.read()
                fn.close()

                # Sanatize filename first
                filename = misc.sanitize_filename(filename)

                # Write data to temp file
                path = os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER)
                path = os.path.join(path, filename)
                f = open(path, 'wb')
                f.write(data)
                f.close()
                del data

                # Check if nzb file
                if os.path.splitext(filename)[1].lower() in ('.nzb', '.gz', 'bz2'):
                    res = dirscanner.ProcessSingleFile(filename, path, pp=pp, script=script, cat=cat, priority=priority,
                                                       nzbname=nzbname, nzo_info=nzo_info, url=future_nzo.url, keep=False,
                                                       nzo_id=future_nzo.nzo_id)[0]
                    if res:
                        if res == -2:
                            logging.info('Incomplete NZB, retry after 5 min %s', url)
                            when = 300
                        elif res == -1:
                            # Error, but no reason to retry. Warning is already given
                            NzbQueue.do.remove(future_nzo.nzo_id, add_to_history=False)
                            continue
                        else:
                            logging.info('Unknown error fetching NZB, retry after 2 min %s', url)
                            when = 120
                        self.add(url, future_nzo, when)
                # Check if a supported archive
                else:
                    if dirscanner.ProcessArchiveFile(filename, path, pp, script, cat, priority=priority,
                                                     nzbname=nzbname, url=future_nzo.url, keep=False,
                                                     nzo_id=future_nzo.nzo_id)[0]:
                        # Not a supported filetype, not an nzb (text/html ect)
                        try:
                            os.remove(fn)
                        except:
                            pass
                        logging.info('Unknown filetype when fetching NZB, retry after 30s %s', url)
                        self.add(url, future_nzo, 30)
            except:
                logging.error(T('URLGRABBER CRASHED'), exc_info=True)
                logging.debug("URLGRABBER Traceback: ", exc_info=True)
Пример #10
0
    def run(self):
        logging.info('URLGrabber starting up')
        self.shutdown = False

        while not self.shutdown:
            # Don't pound the website!
            time.sleep(5.0)

            (url, future_nzo) = self.queue.get()

            if not url:
                # stop signal, go test self.shutdown
                continue
            if future_nzo and future_nzo.wait and future_nzo.wait > time.time(
            ):
                # Re-queue when too early and still active

                self.add(url, future_nzo)
                continue
            url = url.replace(' ', '')

            try:
                if future_nzo:
                    # If nzo entry deleted, give up
                    try:
                        deleted = future_nzo.deleted
                    except AttributeError:
                        deleted = True
                    if deleted:
                        logging.debug('Dropping URL %s, job entry missing',
                                      url)
                        continue

                logging.info('Grabbing URL %s', url)
                req = urllib2.Request(url)
                req.add_header('User-Agent',
                               'SABnzbd+/%s' % sabnzbd.version.__version__)
                if not [True for item in _BAD_GZ_HOSTS if item in url]:
                    req.add_header('Accept-encoding', 'gzip')
                filename = None
                category = None
                gzipped = False
                nzo_info = {}
                wait = 0
                retry = True
                fn = None
                try:
                    fn = urllib2.urlopen(req)
                except:
                    # Cannot list exceptions here, because of unpredictability over platforms
                    error0 = str(sys.exc_info()[0]).lower()
                    error1 = str(sys.exc_info()[1]).lower()
                    logging.debug('Error "%s" trying to get the url %s',
                                  error1, url)
                    if 'certificate_verify_failed' in error1 or 'certificateerror' in error0:
                        msg = T('Server %s uses an untrusted HTTPS certificate'
                                ) % ''
                        retry = False
                    elif 'nodename nor servname provided' in error1:
                        msg = T('Server name does not resolve')
                        retry = False
                    elif '401' in error1 or 'unauthorized' in error1:
                        msg = T('Unauthorized access')
                        retry = False
                    elif '404' in error1:
                        msg = T('File not on server')
                        retry = False

                new_url = dereferring(url, fn)
                if new_url:
                    self.add(new_url, future_nzo)
                    continue

                if fn:
                    for hdr in fn.headers:
                        try:
                            item = hdr.lower()
                            value = fn.headers[hdr]
                        except:
                            continue
                        if item in ('content-encoding', ) and value == 'gzip':
                            gzipped = True
                        if item in ('category_id', 'x-dnzb-category'):
                            category = value
                        elif item in ('x-dnzb-moreinfo', ):
                            nzo_info['more_info'] = value
                        elif item in ('x-dnzb-name', ):
                            filename = value
                            if not filename.endswith('.nzb'):
                                filename += '.nzb'
                        elif item == 'x-dnzb-propername':
                            nzo_info['propername'] = value
                        elif item == 'x-dnzb-episodename':
                            nzo_info['episodename'] = value
                        elif item == 'x-dnzb-year':
                            nzo_info['year'] = value
                        elif item == 'x-dnzb-failure':
                            nzo_info['failure'] = value
                        elif item == 'x-dnzb-details':
                            nzo_info['details'] = value
                        elif item == 'x-dnzb-password':
                            nzo_info['password'] = value
                        elif item == 'retry-after':
                            # For NZBFinder
                            wait = misc.int_conv(value)

                        if not filename and "filename=" in value:
                            filename = value[value.index("filename=") +
                                             9:].strip(';').strip('"')

                if wait:
                    # For sites that have a rate-limiting attribute
                    msg = ''
                    retry = True
                    fn = None
                elif retry:
                    fn, msg, retry, wait, data = _analyse(fn, url)

                if not fn:
                    if retry:
                        logging.info('Retry URL %s', url)
                        self.add(url, future_nzo, wait)
                    else:
                        bad_fetch(future_nzo, url, msg)
                    continue

                if not filename:
                    filename = os.path.basename(url) + '.nzb'
                elif '&nzbname=' in filename:
                    # Sometimes the filename contains the full URL, duh!
                    filename = filename[filename.find('&nzbname=') + 9:]

                pp = future_nzo.pp
                script = future_nzo.script
                cat = future_nzo.cat
                if (cat is None or cat == '*') and category:
                    cat = misc.cat_convert(category)
                priority = future_nzo.priority
                nzbname = future_nzo.custom_name

                # process data
                if gzipped:
                    filename = filename + '.gz'
                if not data:
                    data = fn.read()
                fn.close()

                # Sanatize filename first
                filename = misc.sanitize_filename(filename)

                # Write data to temp file
                path = os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER)
                path = os.path.join(path, filename)
                f = open(path, 'wb')
                f.write(data)
                f.close()
                del data

                # Check if nzb file
                if os.path.splitext(filename)[1].lower() in ('.nzb', '.gz',
                                                             'bz2'):
                    res = dirscanner.ProcessSingleFile(
                        filename,
                        path,
                        pp=pp,
                        script=script,
                        cat=cat,
                        priority=priority,
                        nzbname=nzbname,
                        nzo_info=nzo_info,
                        url=future_nzo.url,
                        keep=False,
                        nzo_id=future_nzo.nzo_id)[0]
                    if res:
                        if res == -2:
                            logging.info(
                                'Incomplete NZB, retry after 5 min %s', url)
                            when = 300
                        elif res == -1:
                            # Error, but no reason to retry. Warning is already given
                            NzbQueue.do.remove(future_nzo.nzo_id,
                                               add_to_history=False)
                            continue
                        else:
                            logging.info(
                                'Unknown error fetching NZB, retry after 2 min %s',
                                url)
                            when = 120
                        self.add(url, future_nzo, when)
                # Check if a supported archive
                else:
                    if dirscanner.ProcessArchiveFile(
                            filename,
                            path,
                            pp,
                            script,
                            cat,
                            priority=priority,
                            nzbname=nzbname,
                            url=future_nzo.url,
                            keep=False,
                            nzo_id=future_nzo.nzo_id)[0]:
                        # Not a supported filetype, not an nzb (text/html ect)
                        try:
                            os.remove(fn)
                        except:
                            pass
                        logging.info(
                            'Unknown filetype when fetching NZB, retry after 30s %s',
                            url)
                        self.add(url, future_nzo, 30)
            except:
                logging.error(T('URLGRABBER CRASHED'), exc_info=True)
                logging.debug("URLGRABBER Traceback: ", exc_info=True)