예제 #1
0
파일: app.py 프로젝트: ldotlopez/zizi
    def filesystem_op(self, op, *args):
        """
        Filesystem interface
        """
        dry_run = self._dry_run

        if dry_run:
            print("[{}] {}".format(op, args))
            return

        try:
            if op == 'move':
                (src, dst) = (args[0], args[1])

                d = dirname(dst)
                if not isdir(d):
                    makedirs(d)

                if stat(src).st_dev == stat(d).st_dev:
                    rename(src, dst)
                else:
                    shutil.copy2(src, dst)
                    if isdir(dst):
                        shutil.rmtree(src)
                    else:
                        unlink(src)

                self.logger.info("[move] {} => {}".format(shortify(src), shortify(dst)))
                return

            if op == 'remove':
                target = args[0]
                if isdir(target):
                    rmdir(target)
                else:
                    unlink(target)
                self.logger.info("[remove] {}".format(shortify(target)))
                return

        except Exception as e:
            self.logger.error("Error with operation '{}' ({}): {}".format(op, args, e))
            return

        raise Exception("Invalid operation {}".format(op))
예제 #2
0
파일: app.py 프로젝트: ldotlopez/zizi
        def get_analisys(analizer, url):
            buff_src = None

            # Fetch buffer for url
            buff = None
            if self._cachebag:
                buff = self._cachebag.get(url.encode('utf-8'))
                buff_src = 'cache' if buff else buff_src

            if not buff:
                try:
                    buff = self._fetcher.fetch(url)
                except FetchError as e:
                    self._logger.warning("Unable to fetch '{}': {}".format(url, e))
                    return []
                buff_src = 'fetcher' if buff else buff_src

            if not buff:
                raise Exception('Cannot get url')

            self._logger.info(_('Got {} from {}').format(shortify(url), buff_src))

            # Try analizer
            try:
                analisys = analizer.process(buff)
            except InvalidMarkup:
                self._logger.warning(_('Invalid markup on {}').format(shortify(url)))
                return []
            except Exception as e:
                self._logger.warning(_('Unknow error {} on {}').format(e.__class__.__name__, shortify(url)))
                return []

            # Save to cachebag
            if self._cachebag:
                if type(buff) != bytes:
                    buff = bytes(buff, encoding='utf-8')
                self._cachebag.set(url.encode('utf-8'), buff)

            return analisys
예제 #3
0
파일: app.py 프로젝트: ldotlopez/zizi
    def run(self, analizer,
            seed_url=None, iterations=1, forced_type=None, forced_language=None,
            reset_db=False, dry_run=False):

        def get_analisys(analizer, url):
            buff_src = None

            # Fetch buffer for url
            buff = None
            if self._cachebag:
                buff = self._cachebag.get(url.encode('utf-8'))
                buff_src = 'cache' if buff else buff_src

            if not buff:
                try:
                    buff = self._fetcher.fetch(url)
                except FetchError as e:
                    self._logger.warning("Unable to fetch '{}': {}".format(url, e))
                    return []
                buff_src = 'fetcher' if buff else buff_src

            if not buff:
                raise Exception('Cannot get url')

            self._logger.info(_('Got {} from {}').format(shortify(url), buff_src))

            # Try analizer
            try:
                analisys = analizer.process(buff)
            except InvalidMarkup:
                self._logger.warning(_('Invalid markup on {}').format(shortify(url)))
                return []
            except Exception as e:
                self._logger.warning(_('Unknow error {} on {}').format(e.__class__.__name__, shortify(url)))
                return []

            # Save to cachebag
            if self._cachebag:
                if type(buff) != bytes:
                    buff = bytes(buff, encoding='utf-8')
                self._cachebag.set(url.encode('utf-8'), buff)

            return analisys

        sess = create_session(dbpath=dbpath)

        if reset_db:
            if not dry_run:
                for t in sess.query(Torrent):
                    t.state = Torrent.State.NONE
            else:
                self._logger.info('Reset db')

        # Build analizer
        self._analizers = AnalizersMng()
        try:
            analizer = self._analizers.get_addon(analizer)()
        except KeyError as e:
            raise InvalidAnalizer("Analizer {} not found: {}".format(analizer, e))

        # Prepare loop
        url = seed_url or analizer.get_default_url()
        iter_ = 0
        counters = {}

        while iter_ < max(1, iterations):
            self._logger.debug(_("Analizing {0}").format(url))

            counters[url] = 0

            torrents = get_analisys(analizer, url)

            for torrent in torrents:
                torrent.provider = analizer.PROVIDER
                if forced_type:
                    torrent.type = forced_type
                if forced_language:
                    torrent.language = forced_language

            self._logger.info(_("{0} torrents from {1} added").format(len(torrents), shortify(url)))
            if not dry_run:
                for torrent in torrents:
                    prev_torrent = sess.query(Torrent).filter_by(uri=torrent.uri).first()
                    if not prev_torrent:
                        counters[url] += 1
                        sess.add(torrent)
                    else:
                        # This is… emmm
                        for k in 'name type language timestamp seeds leechers size provider'.split(' '):
                            setattr(torrent, k, getattr(prev_torrent, k))
                sess.commit()

            iter_ += 1
            url = analizer.get_next_url(url)

        total = sum(counters.values())
        if total:
            notify(msg=_('Found {} torrent(s) from {}').format(total, analizer.PROVIDER))
예제 #4
0
def process(path, classifier, settings=None):
    """
    Computes the destination filename form path using provided info and suplied rename_patterns
    Returns destination path or
        UnknowFileType if path is not recognised
        InvalidPattern if the aplied pattern is invalid (eg. contains invalid keys)
    """
    def generate_tokens(info):
        tokens = info.copy()

        # Fix title
        if 'title' not in tokens:
            tokens['title'] = ''

        # Generate uc/lc/cc variants
        tmp = {}
        for (k, v) in [(k, v) for (k, v) in tokens.items() if isinstance(v, str)]:
            tmp[k+'_uc'] = v.capitalize()
            tmp[k+'_lc'] = v.lower()
            tmp[k+'_cc'] = string.capwords(v)

        tokens.update(tmp)

        # Generate white space variants
        tmp = {}
        variants = ((' {}', 'prefix'), ('{} ', 'suffix'), (' {} ', 'surround'))
        for (variant, name) in variants:
            for (token_name, token_value) in [(k, v) for (k, v) in tokens.items() if isinstance(v, str)]:
                toktype = token_name.split('_')[0]
                if toktype in ('series', 'title'):
                    tmp[token_name+'_ws_'+name] = variant.format(token_value) if token_value else ''
        tokens.update(tmp)

        # Some tokens must be overriden
        replacements = settings.get_section_dict('pattern_renaming:replacements')
        for (pattern, replacement) in replacements.items():
            for (token_name, token_value) in [(k, v) for (k, v) in tokens.items() if isinstance(v, str)]:
                tokens[token_name] = re.subn(pattern, replacement, token_value, flags=re.IGNORECASE)[0]

        return tokens

    if not isfile(path):
        return [path]

    info = guessit.guess_video_info(path)
    ftype = info.get('type', 'unknown')
    if ftype == 'unknown':
        classifier.logger.warning('Unknow filetype for {}'.format(shortify(path)))
        return [path]

    pattern = None
    if ftype == 'episode':
        series_name = info.get('series', None)

        if series_name:
            pattern = settings.get('pattern_renaming', "{}[{}]".format(ftype, series_name.lower()), fallback=None)

    if not pattern:
        pattern = settings.get('pattern_renaming', ftype, fallback=None)

    if not pattern:
        return [path]

    dst = None
    try:
        tokens = generate_tokens(info)
        dst = expanduser(pattern.format(**tokens))
    except KeyError as e:
        classifier.logger.warning("Cannot use pattern for '{}' for '{}': Missing key {}'".format(pattern, path, e))
        return [path]

    classifier.filesystem_op('move', path, dst)
    return [dst]