Example #1
0
    def sync_states(self, dry_run=False):
        states = {
            'paused':      Torrent.State.PAUSED,
            'downloading': Torrent.State.DOWNLOADING,
            'seeding':     Torrent.State.SEEDING,
            'done':        Torrent.State.DONE,
            'queued':      Torrent.State.PAUSED
        }

        q = self._sess.query(Torrent)
        q = q.filter(~Torrent.state.in_((Torrent.State.NONE, Torrent.State.ARCHIVED)))

        for obj in q.all():
            must_update = False

            # Fetch info from backend
            hash_str = obj.hash_string
            if not hash_str:
                raise Exception("Invalid torrent {}".format(obj))
            info = self._backend.info(hash_str)

            if not info:
                # If info is not found user may be deleted it or move or
                # anyhing. It's better to archive it
                new_state = Torrent.State.ARCHIVED
                new_state_str = 'archived'
                must_update = True

            else:
                # Check for the new state
                # if any
                new_state_str = info['state']
                if new_state_str not in states:
                    raise Exception("Status unknow for {}: {}".format(obj, new_state_str))

                new_state = states[new_state_str]

                if obj.state != new_state:
                    must_update = True

            if must_update:
                self._logger.info("State change for {}: {} → {} ({})".format(
                    obj, obj.state, new_state, new_state_str))

                if obj.state in (Torrent.State.DOWNLOADING, Torrent.State.SEEDING, Torrent.State.DONE, Torrent.State.ARCHIVED):
                    notify("Torrent {}: {}".format(new_state_str, obj.name))

                if not dry_run:
                    obj.state = new_state
                    self._sess.commit()
Example #2
0
    def run_verbose_option(self, arguments=(), explain=False, items=0, dry_run=False, settings=None, *args, **kwargs):
        if 'fail' in arguments:
            raise InvalidArguments("fail is a forbidden word")

        if explain:
            print("Test App")
            print(("__init__ params \n" +
                   "         args:   %s\n" +
                   "         kwargs: %s") % (repr(self._args), repr(self._conf)))
            print("setings: %s" % repr(settings))

        if not dry_run:
            notify("hello there! args: {}".format(arguments), sender=self)
        else:
            print("Not notifing")
Example #3
0
    def run(self, db_shell=False, dry_run=False, settings=None):
        if db_shell:
            return self.run_db_session()

        self.sync_states(dry_run)

        accepted = self.filter(settings)
        if accepted and not dry_run:
            for t_obj in itertools.chain.from_iterable(accepted.values()):
                self.queue_torrent(t_obj)

        for (f, accepted) in accepted.items():
            if len(accepted) == 1:
                notify(msg="Accepted %s item: %s" % (f, accepted[0].name))
            if len(accepted) > 1:
                notify(msg="Accepted %s items for %s" % (len(accepted), f))
Example #4
0
    def run(self, analizer,
            seed_url=None, iterations=1, forced_type=None, forced_language=None,
            reset_db=False, dry_run=False):

        def get_analisys(analizer, url):
            buff_src = None

            # Fetch buffer for url
            buff = None
            if self._cachebag:
                buff = self._cachebag.get(url.encode('utf-8'))
                buff_src = 'cache' if buff else buff_src

            if not buff:
                try:
                    buff = self._fetcher.fetch(url)
                except FetchError as e:
                    self._logger.warning("Unable to fetch '{}': {}".format(url, e))
                    return []
                buff_src = 'fetcher' if buff else buff_src

            if not buff:
                raise Exception('Cannot get url')

            self._logger.info(_('Got {} from {}').format(shortify(url), buff_src))

            # Try analizer
            try:
                analisys = analizer.process(buff)
            except InvalidMarkup:
                self._logger.warning(_('Invalid markup on {}').format(shortify(url)))
                return []
            except Exception as e:
                self._logger.warning(_('Unknow error {} on {}').format(e.__class__.__name__, shortify(url)))
                return []

            # Save to cachebag
            if self._cachebag:
                if type(buff) != bytes:
                    buff = bytes(buff, encoding='utf-8')
                self._cachebag.set(url.encode('utf-8'), buff)

            return analisys

        sess = create_session(dbpath=dbpath)

        if reset_db:
            if not dry_run:
                for t in sess.query(Torrent):
                    t.state = Torrent.State.NONE
            else:
                self._logger.info('Reset db')

        # Build analizer
        self._analizers = AnalizersMng()
        try:
            analizer = self._analizers.get_addon(analizer)()
        except KeyError as e:
            raise InvalidAnalizer("Analizer {} not found: {}".format(analizer, e))

        # Prepare loop
        url = seed_url or analizer.get_default_url()
        iter_ = 0
        counters = {}

        while iter_ < max(1, iterations):
            self._logger.debug(_("Analizing {0}").format(url))

            counters[url] = 0

            torrents = get_analisys(analizer, url)

            for torrent in torrents:
                torrent.provider = analizer.PROVIDER
                if forced_type:
                    torrent.type = forced_type
                if forced_language:
                    torrent.language = forced_language

            self._logger.info(_("{0} torrents from {1} added").format(len(torrents), shortify(url)))
            if not dry_run:
                for torrent in torrents:
                    prev_torrent = sess.query(Torrent).filter_by(uri=torrent.uri).first()
                    if not prev_torrent:
                        counters[url] += 1
                        sess.add(torrent)
                    else:
                        # This is… emmm
                        for k in 'name type language timestamp seeds leechers size provider'.split(' '):
                            setattr(torrent, k, getattr(prev_torrent, k))
                sess.commit()

            iter_ += 1
            url = analizer.get_next_url(url)

        total = sum(counters.values())
        if total:
            notify(msg=_('Found {} torrent(s) from {}').format(total, analizer.PROVIDER))