Example #1
0
    def __init__(self):
        bottle.debug(True)
        bottle.TEMPLATE_PATH = join(dirname(abspath(__file__)), 'views'),

        bottle.route('/')(self.index)
        bottle.route('/static/<filename>')(self.send_static)
        bottle.route('/search', method='get')(self.search)
        bottle.route('/queue', method='post')(self.queue)

        self._logger = get_logger('tita')
        self._sess = create_session(dbpath=dbpath)
        self._bypass = BypassApp()
Example #2
0
    def run(self, **kwargs):
        sess = create_session(dbpath)

        limit = kwargs.get('items', -1)
        filter_type = kwargs.get('filter_type')

        query = sess.query(Torrent)
        query = query.order_by(sqlalchemy.desc(Torrent.timestamp))
        if filter_type:
            query = query.filter_by(type = filter_type)

        if limit >= 0:
            query = query.limit(limit)


        items = ''

        for row in [row2dict(row) for row in query.all()]:
            qs = urllib.parse.parse_qs(urllib.parse.urlparse(row['uri']).query)

            qs_xt_p = qs['xt'][0].split(':')
            if qs_xt_p[0] != 'urn' or qs_xt_p[1] != 'btih':
                self._logger.warning(_("Magnet URI doen't follow expected schema"))
                continue

            d = row
            d['infohash'] = qs_xt_p[2]
            d['timestamp_formated'] =  utils.formatdate(d['timestamp'])
            d['name_encoded'] = urllib.parse.quote_plus(d['name'])
            d['uri_encoded'] = d['uri'].replace('&','&amp;')

            d['trackers'] = ''
            if 'trackers' in qs:
                for tracker in qs['tr']:
                    d['trackers'] += templates.tracker % {'tracker' : tracker}

            self._logger.debug(_("Feed item for {}").format(d['name']))
            items += templates.item % d

        buff = templates.feed % {'items' : items}
        if kwargs.get('output'):
            with open(kwargs.get('output'), 'w') as fh:
                fh.write(buff)
        else:
            sys.stdout.write(buff)
Example #3
0
 def __init__(self, *args, **kw):
     self._sess = kw.get('db_session', create_session(dbpath=dbpath))
     self._backend = kw.get('backend', Backend())
     self._logger = kw.get('logger', get_logger('bypass'))
Example #4
0
    def run(self, analizer,
            seed_url=None, iterations=1, forced_type=None, forced_language=None,
            reset_db=False, dry_run=False):

        def get_analisys(analizer, url):
            buff_src = None

            # Fetch buffer for url
            buff = None
            if self._cachebag:
                buff = self._cachebag.get(url.encode('utf-8'))
                buff_src = 'cache' if buff else buff_src

            if not buff:
                try:
                    buff = self._fetcher.fetch(url)
                except FetchError as e:
                    self._logger.warning("Unable to fetch '{}': {}".format(url, e))
                    return []
                buff_src = 'fetcher' if buff else buff_src

            if not buff:
                raise Exception('Cannot get url')

            self._logger.info(_('Got {} from {}').format(shortify(url), buff_src))

            # Try analizer
            try:
                analisys = analizer.process(buff)
            except InvalidMarkup:
                self._logger.warning(_('Invalid markup on {}').format(shortify(url)))
                return []
            except Exception as e:
                self._logger.warning(_('Unknow error {} on {}').format(e.__class__.__name__, shortify(url)))
                return []

            # Save to cachebag
            if self._cachebag:
                if type(buff) != bytes:
                    buff = bytes(buff, encoding='utf-8')
                self._cachebag.set(url.encode('utf-8'), buff)

            return analisys

        sess = create_session(dbpath=dbpath)

        if reset_db:
            if not dry_run:
                for t in sess.query(Torrent):
                    t.state = Torrent.State.NONE
            else:
                self._logger.info('Reset db')

        # Build analizer
        self._analizers = AnalizersMng()
        try:
            analizer = self._analizers.get_addon(analizer)()
        except KeyError as e:
            raise InvalidAnalizer("Analizer {} not found: {}".format(analizer, e))

        # Prepare loop
        url = seed_url or analizer.get_default_url()
        iter_ = 0
        counters = {}

        while iter_ < max(1, iterations):
            self._logger.debug(_("Analizing {0}").format(url))

            counters[url] = 0

            torrents = get_analisys(analizer, url)

            for torrent in torrents:
                torrent.provider = analizer.PROVIDER
                if forced_type:
                    torrent.type = forced_type
                if forced_language:
                    torrent.language = forced_language

            self._logger.info(_("{0} torrents from {1} added").format(len(torrents), shortify(url)))
            if not dry_run:
                for torrent in torrents:
                    prev_torrent = sess.query(Torrent).filter_by(uri=torrent.uri).first()
                    if not prev_torrent:
                        counters[url] += 1
                        sess.add(torrent)
                    else:
                        # This is… emmm
                        for k in 'name type language timestamp seeds leechers size provider'.split(' '):
                            setattr(torrent, k, getattr(prev_torrent, k))
                sess.commit()

            iter_ += 1
            url = analizer.get_next_url(url)

        total = sum(counters.values())
        if total:
            notify(msg=_('Found {} torrent(s) from {}').format(total, analizer.PROVIDER))