Ejemplo n.º 1
0
def _analyse_others(fn, url):
    """ Analyse respons of indexer
        returns fn|None, error-message|None, retry, wait-seconds
    """
    msg = ''
    wait = 0
    if not fn:
        logging.debug('No response from indexer, retry after 60 sec')
        return None, msg, True, 60
    try:
        f = open(fn, 'r')
        data = f.read(100)
        f.close()
    except:
        logging.debug('Problem with tempfile %s from indexer, retry after 60 sec', fn)
        return None, msg, True, 60

    # Check for an error response
    if not data:
        logging.debug('Received nothing from indexer, retry after 60 sec')
        return None, msg, True, 60

    if '.nzbsrus.' in url:
        # Partial support for nzbsrus.com's API
        if misc.match_str(data, RUS_FATAL):
            logging.debug('nzbsrus says: %s, abort', data)
            return None, data, False, 0
        if misc.match_str(data, RUS_15M):
            logging.debug('nzbsrus says: %s, wait 15m', data)
            return None, data, True, 900
        if misc.match_str(data, RUS_60M):
            logging.debug('nzbsrus says: %s, wait 60m', data)
            return None, data, True, 3600

    return fn, msg, False, 0
Ejemplo n.º 2
0
def _analyse_matrix(fn, matrix_id):
    """ Analyse respons of nzbmatrix
        returns fn|None, error-message|None, retry, wait-seconds
    """
    msg = ''
    wait = 0
    if not fn:
        logging.debug('No response from nzbmatrix, retry after 60 sec')
        return None, msg, True, 60
    try:
        f = open(fn, 'r')
        data = f.read(40).lower()
        f.close()
    except:
        logging.debug(
            'Problem with tempfile %s from nzbmatrix, retry after 60 sec', fn)
        return None, msg, True, 60

    # Check for an error response
    if data and '<!DOCTYPE' in data:
        # We got HTML, probably a temporary problem, keep trying
        msg = Ta('Invalid nzbmatrix report number %s') % matrix_id
        wait = 300
    elif data and data.startswith('error'):
        txt = misc.match_str(
            data,
            ('invalid_login', 'invalid_api', 'disabled_account', 'vip_only'))
        if txt:
            if 'vip' in txt:
                msg = Ta('You need an nzbmatrix VIP account to use the API')
            else:
                msg = (Ta('Invalid nzbmatrix credentials') + ' (%s)') % txt
            return None, msg, False, 0
        elif 'limit_reached' in data:
            msg = 'Too many nzbmatrix hits, waiting 10 min'
            wait = 600
        elif misc.match_str(data, ('daily_limit', 'limit is reached')):
            # Daily limit reached, just wait an hour before trying again
            msg = 'Daily limit nzbmatrix reached, waiting 1 hour'
            wait = 3600
        elif 'no_nzb_found' in data:
            msg = Ta('Invalid nzbmatrix report number %s') % matrix_id
            wait = 300
        else:
            # Check if we are required to wait - if so sleep the urlgrabber
            m = _RE_MATRIX_ERR.search(data)
            if m:
                wait = min(int(m.group(1)), 600)
            else:
                msg = Ta('Problem accessing nzbmatrix server (%s)') % data
                wait = 60
    if wait:
        # Return, but tell the urlgrabber to retry
        return None, msg, True, wait

    return fn, msg, False, 0
Ejemplo n.º 3
0
def _analyse_matrix(fn, matrix_id):
    """ Analyse respons of nzbmatrix
        returns fn|None, error-message|None, retry, wait-seconds
    """
    msg = ''
    wait = 0
    if not fn:
        logging.debug('No response from nzbmatrix, retry after 60 sec')
        return None, msg, True, 60
    try:
        f = open(fn, 'r')
        data = f.read(40).lower()
        f.close()
    except:
        logging.debug('Problem with tempfile %s from nzbmatrix, retry after 60 sec', fn)
        return None, msg, True, 60

    # Check for an error response
    if data and '<!DOCTYPE' in data:
        # We got HTML, probably a temporary problem, keep trying
        msg = Ta('Invalid nzbmatrix report number %s') % matrix_id
        wait = 300
    elif data and data.startswith('error'):
        txt = misc.match_str(data, ('invalid_login', 'invalid_api', 'disabled_account', 'vip_only'))
        if txt:
            if 'vip' in txt:
                msg = Ta('You need an nzbmatrix VIP account to use the API')
            else:
                msg = (Ta('Invalid nzbmatrix credentials') + ' (%s)') % txt
            return None, msg, False, 0
        elif 'limit_reached' in data:
            msg = 'Too many nzbmatrix hits, waiting 10 min'
            wait = 600
        elif misc.match_str(data, ('daily_limit', 'limit is reached')):
            # Daily limit reached, just wait an hour before trying again
            msg = 'Daily limit nzbmatrix reached, waiting 1 hour'
            wait = 3600
        elif 'no_nzb_found' in data:
            msg = Ta('Invalid nzbmatrix report number %s') % matrix_id
            wait = 300
        else:
            # Check if we are required to wait - if so sleep the urlgrabber
            m = _RE_MATRIX_ERR.search(data)
            if m:
                wait = min(int(m.group(1)), 600)
            else:
                msg = Ta('Problem accessing nzbmatrix server (%s)') % data
                wait = 60
    if wait:
        # Return, but tell the urlgrabber to retry
        return None, msg, True, wait

    return fn, msg, False, 0
Ejemplo n.º 4
0
def set_bonjour(host=None, port=None):
    """ Publish host/port combo through Bonjour """
    global _HOST_PORT, _BONJOUR_OBJECT

    if not _HAVE_BONJOUR or not cfg.enable_bonjour():
        logging.info('No Bonjour/ZeroConfig support installed')
        return

    if host is None and port is None:
        host, port = _HOST_PORT
    else:
        _HOST_PORT = (host, port)

    scope = pybonjour.kDNSServiceInterfaceIndexAny
    zhost = None
    domain = None

    if match_str(host, ('localhost', '127.0.', '::1')):
        logging.info('Bonjour/ZeroConfig does not support "localhost"')
        # All implementations fail to implement "localhost" properly
        # A false address is published even when scope==kDNSServiceInterfaceIndexLocalOnly
        return

    name = hostname()
    if '.local' in name:
        suffix = ''
    else:
        suffix = '.local'

    logging.debug('Try to publish in Bonjour as "%s" (%s:%s)', name, host,
                  port)
    try:
        refObject = pybonjour.DNSServiceRegister(
            interfaceIndex=scope,
            name='SABnzbd on %s:%s' % (name, port),
            regtype='_http._tcp',
            domain=domain,
            host=zhost,
            port=int(port),
            txtRecord=pybonjour.TXTRecord({
                'path': cfg.url_base(),
                'https': cfg.enable_https()
            }),
            callBack=_zeroconf_callback)
    except sabnzbd.utils.pybonjour.BonjourError as e:
        _BONJOUR_OBJECT = None
        logging.debug('Failed to start Bonjour service: %s', str(e))
    except:
        _BONJOUR_OBJECT = None
        logging.debug(
            'Failed to start Bonjour service due to non-pybonjour related problem',
            exc_info=True)
    else:
        Thread(target=_bonjour_server, args=(refObject, ))
        _BONJOUR_OBJECT = refObject
        logging.debug('Successfully started Bonjour service')
Ejemplo n.º 5
0
def _analyse_others(fn, url):
    """ Analyse respons of indexer
        returns fn|None, error-message|None, retry, wait-seconds
    """
    msg = ''
    wait = 0
    if not fn:
        logging.debug('No response from indexer, retry after 60 sec')
        return None, msg, True, 60
    try:
        f = open(fn, 'r')
        data = f.read(100)
        f.close()
    except:
        logging.debug(
            'Problem with tempfile %s from indexer, retry after 60 sec', fn)
        return None, msg, True, 60

    # Check for an error response
    if not data:
        logging.debug('Received nothing from indexer, retry after 60 sec')
        return None, msg, True, 60

    if '.nzbsrus.' in url:
        # Partial support for nzbsrus.com's API
        if misc.match_str(data, RUS_FATAL):
            logging.debug('nzbsrus says: %s, abort', data)
            return None, data, False, 0
        if misc.match_str(data, RUS_15M):
            logging.debug('nzbsrus says: %s, wait 15m', data)
            return None, data, True, 900
        if misc.match_str(data, RUS_60M):
            logging.debug('nzbsrus says: %s, wait 60m', data)
            return None, data, True, 3600

    return fn, msg, False, 0
Ejemplo n.º 6
0
def _analyse_others(fn, url):
    """ Analyse respons of indexer
        returns fn|None, error-message|None, retry, wait-seconds
    """
    msg = ''
    wait = 0
    if not fn:
        logging.debug('No response from indexer, retry after 60 sec')
        return None, msg, True, 60
    try:
        f = open(fn, 'r')
        data = f.read(100)
        f.close()
    except:
        logging.debug('Problem with tempfile %s from indexer, retry after 60 sec', fn)
        return None, msg, True, 60

    # Check for an error response
    if not data:
        logging.debug('Received nothing from indexer, retry after 60 sec')
        return None, msg, True, 60

    if '.nzbsrus.' in url:
        # Provisional support for nzbsrus.com's lack of an API
        # Trying to make sense of their response
        # Their non-VIP limiting is particularly weak
        f = open(fn, 'r')
        data = f.read(10000)
        f.close()
        ldata = data[:100].lower()
        if misc.match_str(ldata, ('invalid link', 'nuked', 'deleted')):
            logging.debug('nzbsrus says: %s, abort', data)
            return None, data, False, 0
        if 'temporarily' in ldata:
            logging.debug('nzbsrus says: %s, retry', data)
            return None, data, True, 600
        if 'Upgrade To ViP' in data:
            logging.debug('nzbsrus says: upgrade to VIP, retry after an hour')
            return None, 'upgrade to VIP', True, 3600
        if 'Maintenance' in data:
            logging.debug('nzbsrus says: Maintenance, retry after an hour')
            return None, 'Maintenance', True, 3600
        if '<nzb' not in ldata and '<!doctype' in ldata:
            msg = Ta('Invalid URL for nzbsrus')
            logging.debug(msg)
            return None, msg, False, 0

    return fn, msg, False, 0
Ejemplo n.º 7
0
def set_bonjour(host=None, port=None):
    """ Publish host/port combo through Bonjour """
    global _HOST_PORT, _BONJOUR_OBJECT

    if not _HAVE_BONJOUR or not cfg.enable_bonjour():
        logging.info('No Bonjour/ZeroConfig support installed')
        return

    if host is None and port is None:
        host, port = _HOST_PORT
    else:
        _HOST_PORT = (host, port)

    scope = pybonjour.kDNSServiceInterfaceIndexAny
    zhost = None
    domain = None

    if match_str(host, ('localhost', '127.0.', '::1')):
        logging.info('Bonjour/ZeroConfig does not support "localhost"')
        # All implementations fail to implement "localhost" properly
        # A false address is published even when scope==kDNSServiceInterfaceIndexLocalOnly
        return

    name = hostname()
    if '.local' in name:
        suffix = ''
    else:
        suffix = '.local'
    if hasattr(cherrypy.wsgiserver, 'redirect_url'):
        cherrypy.wsgiserver.redirect_url("https://%s%s:%s/sabnzbd" % (name, suffix, port))
    logging.debug('Try to publish in Bonjour as "%s" (%s:%s)', name, host, port)
    try:
        refObject = pybonjour.DNSServiceRegister(
            interfaceIndex=scope,
            name='SABnzbd on %s:%s' % (name, port),
            regtype='_http._tcp',
            domain=domain,
            host=zhost,
            port=int(port),
            txtRecord=pybonjour.TXTRecord({'path': '/sabnzbd/'}),
            callBack=_zeroconf_callback)
    except sabnzbd.utils.pybonjour.BonjourError:
        _BONJOUR_OBJECT = None
        logging.debug('Failed to start Bonjour service')
    else:
        Thread(target=_bonjour_server, args=(refObject,))
        _BONJOUR_OBJECT = refObject
        logging.debug('Successfully started Bonjour service')
Ejemplo n.º 8
0
def special_rss_site(url):
    """ Return True if url describes an RSS site with odd titles """
    return cfg.rss_filenames() or match_str(url, cfg.rss_odd_titles())
Ejemplo n.º 9
0
    def run(self):
        while 1:
            # Sleep to allow decoder/assembler switching
            sleep(0.0001)
            art_tup = self.queue.get()
            if not art_tup:
                break

            article, lines, raw_data = art_tup
            nzf = article.nzf
            nzo = nzf.nzo
            art_id = article.article
            killed = False

            # Check if the space that's now free can let us continue the queue?
            qsize = self.queue.qsize()
            if (sabnzbd.articlecache.ArticleCache.do.free_reserve_space(lines) or qsize < MAX_DECODE_QUEUE) and \
               (qsize < LIMIT_DECODE_QUEUE) and sabnzbd.downloader.Downloader.do.delayed:
                sabnzbd.downloader.Downloader.do.undelay()

            data = None
            register = True  # Finish article
            found = False  # Proper article found
            logme = None

            if lines or raw_data:
                try:
                    if nzo.precheck:
                        raise BadYenc
                    register = True

                    if self.__log_decoding:
                        logging.debug("Decoding %s", art_id)

                    data = self.decode(article, lines, raw_data)
                    nzf.article_count += 1
                    found = True

                except IOError:
                    logme = T('Decoding %s failed') % art_id
                    logging.warning(logme)
                    logging.info("Traceback: ", exc_info=True)

                    sabnzbd.downloader.Downloader.do.pause()
                    sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(article)
                    register = False

                except MemoryError:
                    logme = T('Decoder failure: Out of memory')
                    logging.warning(logme)
                    anfo = sabnzbd.articlecache.ArticleCache.do.cache_info()
                    logging.info("Decoder-Queue: %d, Cache: %d, %d, %d",
                                 self.queue.qsize(), anfo.article_sum,
                                 anfo.cache_size, anfo.cache_limit)
                    logging.info("Traceback: ", exc_info=True)

                    sabnzbd.downloader.Downloader.do.pause()
                    sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(article)
                    register = False

                except CrcError, e:
                    logme = 'CRC Error in %s' % art_id
                    logging.info(logme)

                    data = e.data

                except (BadYenc, ValueError):
                    # Handles precheck and badly formed articles
                    killed = False
                    found = False
                    data_to_check = lines or raw_data
                    if nzo.precheck and data_to_check and data_to_check[
                            0].startswith('223 '):
                        # STAT was used, so we only get a status code
                        found = True
                    else:
                        # Examine headers (for precheck) or body (for download)
                        # And look for DMCA clues (while skipping "X-" headers)
                        for line in data_to_check:
                            lline = line.lower()
                            if 'message-id:' in lline:
                                found = True
                            if not line.startswith('X-') and match_str(
                                    lline,
                                ('dmca', 'removed', 'cancel', 'blocked')):
                                killed = True
                                break
                    if killed:
                        logme = 'Article removed from server (%s)'
                        logging.info(logme, art_id)
                    if nzo.precheck:
                        if found and not killed:
                            # Pre-check, proper article found, just register
                            if sabnzbd.LOG_ALL:
                                logging.debug('Server %s has article %s',
                                              article.fetcher, art_id)
                            register = True
                    elif not killed and not found:
                        logme = T('Badly formed yEnc article in %s') % art_id
                        logging.info(logme)

                    if not found or killed:
                        new_server_found = self.search_new_server(article)
                        if new_server_found:
                            register = False
                            logme = None

                except:
Ejemplo n.º 10
0
Archivo: rss.py Proyecto: rivy/sabnzbd
def special_rss_site(url):
    """ Return True if url describes an RSS site with odd titles """
    return cfg.rss_filenames() or match_str(url, cfg.rss_odd_titles())
Ejemplo n.º 11
0
class Decoder(Thread):
    def __init__(self, servers):
        Thread.__init__(self)

        self.queue = Queue.Queue()
        self.servers = servers

    def decode(self, article, lines):
        self.queue.put((article, lines))
        # See if there's space left in cache, pause otherwise
        # But do allow some articles to enter queue, in case of full cache
        qsize = self.queue.qsize()
        if (not ArticleCache.do.reserve_space(lines)
                and qsize > MAX_DECODE_QUEUE) or (qsize > LIMIT_DECODE_QUEUE):
            sabnzbd.downloader.Downloader.do.delay()

    def stop(self):
        self.queue.put(None)

    def run(self):
        from sabnzbd.nzbqueue import NzbQueue
        while 1:
            # Sleep to allow decoder/assembler switching
            sleep(0.001)
            art_tup = self.queue.get()
            if not art_tup:
                break

            article, lines = art_tup
            nzf = article.nzf
            nzo = nzf.nzo
            art_id = article.article
            killed = False

            # Check if the space that's now free can let us continue the queue?
            qsize = self.queue.qsize()
            if (ArticleCache.do.free_reserve_space(lines)
                    or qsize < MAX_DECODE_QUEUE) and (
                        qsize < LIMIT_DECODE_QUEUE
                    ) and sabnzbd.downloader.Downloader.do.delayed:
                sabnzbd.downloader.Downloader.do.undelay()

            data = None
            register = True  # Finish article
            found = False  # Proper article found
            logme = None

            if lines:
                try:
                    if nzo.precheck:
                        raise BadYenc
                    register = True
                    logging.debug("Decoding %s", art_id)

                    data = decode(article, lines)
                    nzf.article_count += 1
                    found = True

                except IOError, e:
                    logme = T('Decoding %s failed') % art_id
                    logging.warning(logme)
                    logging.info("Traceback: ", exc_info=True)

                    sabnzbd.downloader.Downloader.do.pause()
                    article.fetcher = None
                    NzbQueue.do.reset_try_lists(nzf, nzo)
                    register = False

                except MemoryError, e:
                    logme = T('Decoder failure: Out of memory')
                    logging.warning(logme)
                    anfo = sabnzbd.articlecache.ArticleCache.do.cache_info()
                    logging.info("Decoder-Queue: %d, Cache: %d, %d, %d",
                                 self.queue.qsize(), anfo.article_sum,
                                 anfo.cache_size, anfo.cache_limit)
                    logging.info("Traceback: ", exc_info=True)

                    sabnzbd.downloader.Downloader.do.pause()
                    article.fetcher = None
                    NzbQueue.do.reset_try_lists(nzf, nzo)
                    register = False

                except CrcError, e:
                    logme = T('CRC Error in %s (%s -> %s)') % (
                        art_id, e.needcrc, e.gotcrc)
                    logging.info(logme)

                    data = e.data

                except BadYenc:
                    # Handles precheck and badly formed articles
                    killed = False
                    found = False
                    if nzo.precheck and lines and lines[0].startswith('223 '):
                        # STAT was used, so we only get a status code
                        found = True
                    else:
                        # Examine headers (for precheck) or body (for download)
                        # And look for DMCA clues (while skipping "X-" headers)
                        for line in lines:
                            lline = line.lower()
                            if 'message-id:' in lline:
                                found = True
                            if not line.startswith('X-') and match_str(
                                    lline,
                                ('dmca', 'removed', 'cancel', 'blocked')):
                                killed = True
                                break
                    if killed:
                        logme = 'Article removed from server (%s)'
                        logging.info(logme, art_id)
                    if nzo.precheck:
                        if found and not killed:
                            # Pre-check, proper article found, just register
                            logging.debug('Server has article %s', art_id)
                            register = True
                    elif not killed and not found:
                        logme = T('Badly formed yEnc article in %s') % art_id
                        logging.info(logme)

                    if not found or killed:
                        new_server_found = self.__search_new_server(article)
                        if new_server_found:
                            register = False
                            logme = None
Ejemplo n.º 12
0
    def run(self):
        from sabnzbd.nzbqueue import NzbQueue
        while 1:
            # Sleep to allow decoder/assembler switching
            sleep(0.001)
            art_tup = self.queue.get()
            if not art_tup:
                break

            article, lines = art_tup
            nzf = article.nzf
            nzo = nzf.nzo
            art_id = article.article
            killed = False

            # Check if the space that's now free can let us continue the queue?
            if (ArticleCache.do.free_reserve_space(lines) or self.queue.qsize() < MAX_DECODE_QUEUE) and sabnzbd.downloader.Downloader.do.delayed:
                sabnzbd.downloader.Downloader.do.undelay()

            data = None
            register = True  # Finish article
            found = False    # Proper article found
            logme = None

            if lines:
                try:
                    if nzo.precheck:
                        raise BadYenc
                    register = True
                    logging.debug("Decoding %s", art_id)

                    data = decode(article, lines)
                    nzf.article_count += 1
                    found = True
                except IOError, e:
                    logme = T('Decoding %s failed') % art_id
                    logging.warning(logme)
                    logging.info("Traceback: ", exc_info=True)

                    sabnzbd.downloader.Downloader.do.pause()

                    article.fetcher = None

                    NzbQueue.do.reset_try_lists(nzf, nzo)

                    register = False

                except CrcError, e:
                    logme = T('CRC Error in %s (%s -> %s)') % (art_id, e.needcrc, e.gotcrc)
                    logging.info(logme)

                    data = e.data

                except BadYenc:
                    # Handles precheck and badly formed articles
                    killed = False
                    found = False
                    if nzo.precheck and lines and lines[0].startswith('223 '):
                        # STAT was used, so we only get a status code
                        found = True
                    else:
                        # Examine headers (for precheck) or body (for download)
                        # And look for DMCA clues (while skipping "X-" headers)
                        for line in lines:
                            lline = line.lower()
                            if 'message-id:' in lline:
                                found = True
                            if not line.startswith('X-') and match_str(lline, ('dmca', 'removed', 'cancel', 'blocked')):
                                killed = True
                                break
                    if killed:
                        logme = 'Article removed from server (%s)'
                        logging.info(logme, art_id)
                    if nzo.precheck:
                        if found and not killed:
                            # Pre-check, proper article found, just register
                            logging.debug('Server has article %s', art_id)
                            register = True
                    elif not killed and not found:
                        logme = T('Badly formed yEnc article in %s') % art_id
                        logging.info(logme)

                    if not found or killed:
                        new_server_found = self.__search_new_server(article)
                        if new_server_found:
                            register = False
                            logme = None
Ejemplo n.º 13
0
    def run(self):
        from sabnzbd.nzbqueue import NzbQueue
        while 1:
            sleep(0.001)
            art_tup = self.queue.get()
            if not art_tup:
                break

            if self.queue.qsize(
            ) < MIN_DECODE_QUEUE and sabnzbd.downloader.Downloader.do.delayed:
                sabnzbd.downloader.Downloader.do.undelay()

            article, lines = art_tup
            nzf = article.nzf
            nzo = nzf.nzo
            art_id = article.article
            killed = False

            data = None

            register = True  # Finish article
            found = False  # Proper article found

            if lines:
                logme = None
                try:
                    if nzo.precheck:
                        raise BadYenc
                    register = True
                    logging.debug("Decoding %s", art_id)

                    data = decode(article, lines)
                    nzf.article_count += 1
                    found = True
                except IOError, e:
                    logme = Ta('Decoding %s failed') % art_id
                    logging.warning(logme)
                    logging.info("Traceback: ", exc_info=True)

                    sabnzbd.downloader.Downloader.do.pause()

                    article.fetcher = None

                    NzbQueue.do.reset_try_lists(nzf, nzo)

                    register = False

                except CrcError, e:
                    logme = Ta('CRC Error in %s (%s -> %s)') % (
                        art_id, e.needcrc, e.gotcrc)
                    logging.info(logme)

                    data = e.data

                    if cfg.fail_on_crc():
                        new_server_found = self.__search_new_server(article)
                        if new_server_found:
                            register = False
                            logme = None

                except BadYenc:
                    # Handles precheck and badly formed articles
                    killed = False
                    found = False
                    if nzo.precheck and lines and lines[0].startswith('223 '):
                        # STAT was used, so we only get a status code
                        found = True
                    else:
                        # Examine headers (for precheck) or body (for download)
                        # And look for DMCA clues (while skipping "X-" headers)
                        for line in lines:
                            lline = line.lower()
                            if 'message-id:' in lline:
                                found = True
                            if not line.startswith('X-') and match_str(
                                    lline,
                                ('dmca', 'removed', 'cancel', 'blocked')):
                                killed = True
                                break
                    if killed:
                        logme = 'Article removed from server (%s)'
                        logging.info(logme, art_id)
                    if nzo.precheck:
                        if found and not killed:
                            # Pre-check, proper article found, just register
                            logging.debug('Server has article %s', art_id)
                            register = True
                    elif not killed and not found:
                        logme = Ta('Badly formed yEnc article in %s') % art_id
                        logging.info(logme)

                    if not found or killed:
                        new_server_found = self.__search_new_server(article)
                        if new_server_found:
                            register = False
                            logme = None
Ejemplo n.º 14
0
    def run(self):
        from sabnzbd.nzbqueue import NzbQueue
        while 1:
            sleep(0.001)
            art_tup = self.queue.get()
            if not art_tup:
                break

            if self.queue.qsize() < MIN_DECODE_QUEUE and sabnzbd.downloader.Downloader.do.delayed:
                sabnzbd.downloader.Downloader.do.undelay()

            article, lines = art_tup
            nzf = article.nzf
            nzo = nzf.nzo

            data = None

            register = True  # Finish article
            found = False    # Proper article found

            if lines:
                logme = None
                try:
                    if nzo.precheck:
                        raise BadYenc
                    register = True
                    logging.debug("Decoding %s", article)

                    data = decode(article, lines)
                    nzf.article_count += 1
                    found = True
                except IOError, e:
                    logme = Ta('Decoding %s failed') % article
                    logging.info(logme)
                    sabnzbd.downloader.Downloader.do.pause()

                    article.fetcher = None

                    NzbQueue.do.reset_try_lists(nzf, nzo)

                    register = False

                except CrcError, e:
                    logme = Ta('CRC Error in %s (%s -> %s)') % (article, e.needcrc, e.gotcrc)
                    logging.info(logme)

                    data = e.data

                    if cfg.fail_on_crc():
                        new_server_found = self.__search_new_server(article)
                        if new_server_found:
                            register = False
                            logme = None

                except BadYenc:
                    # Handles precheck and badly formed articles
                    killed = False
                    found = False
                    if nzo.precheck and lines and lines[0].startswith('223 '):
                        # STAT was used, so we only get a status code
                        found = True
                    else:
                        # Examine headers (for precheck) or body (for download)
                        # And look for DMCA clues (while skipping "X-" headers)
                        for line in lines:
                            if not line.startswith('X-') and match_str(line.lower(), ('dmca', 'removed', 'cancel', 'blocked')):
                                logging.info('Article removed from server (%s)', article)
                                killed = True
                                break
                    if nzo.precheck:
                        if found or not killed:
                            # Pre-check, proper article found, just register
                            logging.debug('Server has article %s', article)
                            register = True
                    elif not killed and not found:
                        logme = Ta('Badly formed yEnc article in %s') % article
                        logging.info(logme)

                    if not found:
                        new_server_found = self.__search_new_server(article)
                        if new_server_found:
                            register = False
                            logme = None
Ejemplo n.º 15
0
    def run(self):
        while 1:
            # Sleep to allow decoder/assembler switching
            sleep(0.0001)
            art_tup = self.queue.get()
            if not art_tup:
                break

            article, lines, raw_data = art_tup
            nzf = article.nzf
            nzo = nzf.nzo
            art_id = article.article
            killed = False

            # Check if the space that's now free can let us continue the queue?
            qsize = self.queue.qsize()
            if (sabnzbd.articlecache.ArticleCache.do.free_reserve_space(lines) or qsize < MAX_DECODE_QUEUE) and \
               (qsize < LIMIT_DECODE_QUEUE) and sabnzbd.downloader.Downloader.do.delayed:
                sabnzbd.downloader.Downloader.do.undelay()

            data = None
            register = True  # Finish article
            found = False    # Proper article found
            logme = None

            if lines or raw_data:
                try:
                    if nzo.precheck:
                        raise BadYenc
                    register = True

                    if self.__log_decoding:
                        logging.debug("Decoding %s", art_id)

                    data = self.decode(article, lines, raw_data)
                    nzf.article_count += 1
                    found = True

                except IOError:
                    logme = T('Decoding %s failed') % art_id
                    logging.warning(logme)
                    logging.info("Traceback: ", exc_info=True)

                    sabnzbd.downloader.Downloader.do.pause()
                    sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(article)
                    register = False

                except MemoryError:
                    logme = T('Decoder failure: Out of memory')
                    logging.warning(logme)
                    anfo = sabnzbd.articlecache.ArticleCache.do.cache_info()
                    logging.info("Decoder-Queue: %d, Cache: %d, %d, %d", self.queue.qsize(), anfo.article_sum, anfo.cache_size, anfo.cache_limit)
                    logging.info("Traceback: ", exc_info=True)

                    sabnzbd.downloader.Downloader.do.pause()
                    sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(article)
                    register = False

                except CrcError, e:
                    logme = 'CRC Error in %s' % art_id
                    logging.info(logme)

                    data = e.data

                except (BadYenc, ValueError):
                    # Handles precheck and badly formed articles
                    killed = False
                    found = False
                    data_to_check = lines or raw_data
                    if nzo.precheck and data_to_check and data_to_check[0].startswith('223 '):
                        # STAT was used, so we only get a status code
                        found = True
                    else:
                        # Examine headers (for precheck) or body (for download)
                        # And look for DMCA clues (while skipping "X-" headers)
                        for line in data_to_check:
                            lline = line.lower()
                            if 'message-id:' in lline:
                                found = True
                            if not line.startswith('X-') and match_str(lline, ('dmca', 'removed', 'cancel', 'blocked')):
                                killed = True
                                break
                    if killed:
                        logme = 'Article removed from server (%s)'
                        logging.info(logme, art_id)
                    if nzo.precheck:
                        if found and not killed:
                            # Pre-check, proper article found, just register
                            logging.debug('Server %s has article %s', article.fetcher, art_id)
                            register = True
                    elif not killed and not found:
                        logme = T('Badly formed yEnc article in %s') % art_id
                        logging.info(logme)

                    if not found or killed:
                        new_server_found = self.search_new_server(article)
                        if new_server_found:
                            register = False
                            logme = None

                except:
Ejemplo n.º 16
0
    def run(self):
        while 1:
            # Set Article and NzbObject objects to None so references from this
            # thread do not keep the parent objects alive (see #1628)
            decoded_data = raw_data = article = nzo = None
            article, raw_data = self.decoder_queue.get()
            if not article:
                logging.info("Shutting down decoder %s", self.name)
                break

            nzo = article.nzf.nzo
            art_id = article.article

            # Free space in the decoder-queue
            sabnzbd.ArticleCache.free_reserved_space(article.bytes)

            # Keeping track
            article_success = False

            try:
                if nzo.precheck:
                    raise BadYenc

                if sabnzbd.LOG_ALL:
                    logging.debug("Decoding %s", art_id)

                decoded_data = decode(article, raw_data)
                article_success = True

            except MemoryError:
                logging.warning(T("Decoder failure: Out of memory"))
                logging.info("Decoder-Queue: %d", self.decoder_queue.qsize())
                logging.info("Cache: %d, %d, %d",
                             *sabnzbd.ArticleCache.cache_info())
                logging.info("Traceback: ", exc_info=True)
                sabnzbd.Downloader.pause()

                # This article should be fetched again
                sabnzbd.NzbQueue.reset_try_lists(article)
                continue

            except CrcError as crc_error:
                logging.info("CRC Error in %s" % art_id)

                # Continue to the next one if we found new server
                if search_new_server(article):
                    continue

                # Store data, maybe par2 can still fix it
                decoded_data = crc_error.data

            except (BadYenc, ValueError):
                # Handles precheck and badly formed articles
                if nzo.precheck and raw_data and raw_data[0].startswith(
                        b"223 "):
                    # STAT was used, so we only get a status code
                    article_success = True
                else:
                    # Examine headers (for precheck) or body (for download)
                    # Look for DMCA clues (while skipping "X-" headers)
                    # Detect potential UUencode
                    for line in raw_data:
                        lline = line.lower()
                        if b"message-id:" in lline:
                            article_success = True
                        if not lline.startswith(b"x-") and match_str(
                                lline,
                            (b"dmca", b"removed", b"cancel", b"blocked")):
                            article_success = False
                            logging.info("Article removed from server (%s)",
                                         art_id)
                            break
                        if lline.find(b"\nbegin ") >= 0:
                            logme = T(
                                "UUencode detected, only yEnc encoding is supported [%s]"
                            ) % nzo.final_name
                            logging.error(logme)
                            nzo.fail_msg = logme
                            sabnzbd.NzbQueue.end_job(nzo)
                            break

                # Pre-check, proper article found so just register
                if nzo.precheck and article_success and sabnzbd.LOG_ALL:
                    logging.debug("Server %s has article %s", article.fetcher,
                                  art_id)
                elif not article_success:
                    # If not pre-check, this must be a bad article
                    if not nzo.precheck:
                        logging.info("Badly formed yEnc article in %s",
                                     art_id,
                                     exc_info=True)

                    # Continue to the next one if we found new server
                    if search_new_server(article):
                        continue

            except:
                logging.warning(T("Unknown Error while decoding %s"), art_id)
                logging.info("Traceback: ", exc_info=True)

                # Continue to the next one if we found new server
                if search_new_server(article):
                    continue

            if decoded_data:
                # If the data needs to be written to disk due to full cache, this will be slow
                # Causing the decoder-queue to fill up and delay the downloader
                sabnzbd.ArticleCache.save_article(article, decoded_data)

            sabnzbd.NzbQueue.register_article(article, article_success)