Пример #1
0
def test_air_by_date_parsing(p, monkeypatch_function_return, create_tvshow):
    monkeypatch_function_return(p['mocks'])
    parser = NameParser()
    guess = guessit.guessit(p['name'])
    result = parser.to_parse_result(p['name'], guess)

    # confirm passed in show object indexer id matches result show object indexer id
    result.series = create_tvshow(name=p['series_info']['name'])

    actual = parser._parse_anime(result)

    expected = p['expected']

    assert expected == actual
Пример #2
0
 def update_old_propers(self):
     # This is called once when we create proper_tags columns
     log.debug(u'Checking for old propers without proper tags')
     query = "SELECT resource FROM history WHERE (proper_tags IS NULL OR proper_tags = '') " + \
             "AND (action LIKE '%2' OR action LIKE '%9') AND " + \
             "(resource LIKE '%REPACK%' OR resource LIKE '%PROPER%' OR resource LIKE '%REAL%')"
     sql_results = self.connection.select(query)
     if sql_results:
         for sql_result in sql_results:
             proper_release = sql_result['resource']
             log.debug(u'Found old propers without proper tags: {0}',
                       proper_release)
             parse_result = NameParser()._parse_string(proper_release)
             if parse_result.proper_tags:
                 proper_tags = '|'.join(parse_result.proper_tags)
                 log.debug(u'Add proper tags {0!r} to {1!r}', proper_tags,
                           proper_release)
                 self.connection.action(
                     'UPDATE history SET proper_tags = ? WHERE resource = ?',
                     [proper_tags, proper_release])
Пример #3
0
    def process(self):
        """
        Do the actual work.

        :return: True
        """
        self.log(logger.INFO, u'Failed download detected: ({nzb}, {dir})'.format(nzb=self.nzb_name, dir=self.dir_name))

        releaseName = naming.determine_release_name(self.dir_name, self.nzb_name)
        if not releaseName:
            self.log(logger.WARNING, u'Warning: unable to find a valid release name.')
            raise FailedPostProcessingFailedException()

        try:
            parse_result = NameParser().parse(releaseName)
        except (InvalidNameException, InvalidShowException):
            self.log(logger.WARNING, u'Not enough information to parse release name into a valid show. '
                     u'Consider adding scene exceptions or improve naming for: {release}'.format
                     (release=releaseName))
            raise FailedPostProcessingFailedException()

        self.log(logger.DEBUG, u'Parsed info: {result}'.format(result=parse_result))

        segment = []
        if not parse_result.episode_numbers:
            # Get all episode objects from that season
            self.log(logger.DEBUG, 'Detected as season pack: {release}'.format(release=releaseName))
            segment.extend(parse_result.series.get_all_episodes(parse_result.season_number))
        else:
            self.log(logger.DEBUG, u'Detected as single/multi episode: {release}'.format(release=releaseName))
            for episode in parse_result.episode_numbers:
                segment.append(parse_result.series.get_episode(parse_result.season_number, episode))

        if segment:
            self.log(logger.DEBUG, u'Adding this release to failed queue: {release}'.format(release=releaseName))
            cur_failed_queue_item = FailedQueueItem(parse_result.series, segment)
            app.forced_search_queue_scheduler.action.add_item(cur_failed_queue_item)

        return True
Пример #4
0
    def subtitles_enabled(*args):
        """Try to parse names to a show and check whether the show has subtitles enabled.

        :param args:
        :return:
        :rtype: bool
        """
        for name in args:
            if not name:
                continue

            try:
                parse_result = NameParser().parse(name)
                if parse_result.series.indexerid:
                    main_db_con = db.DBConnection()
                    sql_results = main_db_con.select('SELECT subtitles FROM tv_shows WHERE indexer = ? AND indexer_id = ? LIMIT 1',
                                                     [parse_result.series.indexer, parse_result.series.indexerid])
                    return bool(sql_results[0]['subtitles']) if sql_results else False

                log.warning('Empty indexer ID for: {name}', {'name': name})
            except (InvalidNameException, InvalidShowException):
                log.warning('Not enough information to parse filename into a valid show. Consider adding scene '
                            'exceptions or improve naming for: {name}', {'name': name})
        return False
Пример #5
0
    def find_search_results(self,
                            show,
                            episodes,
                            search_mode,
                            forced_search=False,
                            download_current_quality=False,
                            manual_search=False,
                            manual_search_type='episode'):
        """Search episodes based on param."""
        self._check_auth()
        self.show = show

        results = {}
        items_list = []

        for episode in episodes:
            if not manual_search:
                cache_result = self.cache.search_cache(
                    episode,
                    forced_search=forced_search,
                    down_cur_quality=download_current_quality)
                if cache_result:
                    if episode.episode not in results:
                        results[episode.episode] = cache_result
                    else:
                        results[episode.episode].extend(cache_result)

                    continue

            search_strings = []
            season_search = (len(episodes) > 1 or manual_search_type
                             == 'season') and search_mode == 'sponly'
            if season_search:
                search_strings = self._get_season_search_strings(episode)
            elif search_mode == 'eponly':
                search_strings = self._get_episode_search_strings(episode)

            for search_string in search_strings:
                # Find results from the provider
                items_list += self.search(search_string,
                                          ep_obj=episode,
                                          manual_search=manual_search)

            # In season search, we can't loop in episodes lists as we only need one episode to get the season string
            if search_mode == 'sponly':
                break

        if len(results) == len(episodes):
            return results

        if items_list:
            # categorize the items into lists by quality
            items = defaultdict(list)
            for item in items_list:
                items[self.get_quality(item, anime=show.is_anime)].append(item)

            # temporarily remove the list of items with unknown quality
            unknown_items = items.pop(Quality.UNKNOWN, [])

            # make a generator to sort the remaining items by descending quality
            items_list = (items[quality]
                          for quality in sorted(items, reverse=True))

            # unpack all of the quality lists into a single sorted list
            items_list = list(chain(*items_list))

            # extend the list with the unknown qualities, now sorted at the bottom of the list
            items_list.extend(unknown_items)

        cl = []

        # Move through each item and parse it into a quality
        search_results = []
        for item in items_list:

            # Make sure we start with a TorrentSearchResult, NZBDataSearchResult or NZBSearchResult search result obj.
            search_result = self.get_result()
            search_results.append(search_result)
            search_result.item = item
            search_result.download_current_quality = download_current_quality
            # FIXME: Should be changed to search_result.search_type
            search_result.forced_search = forced_search

            (search_result.name,
             search_result.url) = self._get_title_and_url(item)
            (search_result.seeders,
             search_result.leechers) = self._get_result_info(item)

            search_result.size = self._get_size(item)
            search_result.pubdate = self._get_pubdate(item)

            search_result.result_wanted = True

            try:
                search_result.parsed_result = NameParser(
                    parse_method=('normal', 'anime')[show.is_anime]).parse(
                        search_result.name)
            except (InvalidNameException, InvalidShowException) as error:
                log.debug(
                    'Error during parsing of release name: {release_name}, with error: {error}',
                    {
                        'release_name': search_result.name,
                        'error': error
                    })
                search_result.add_cache_entry = False
                search_result.result_wanted = False
                continue

            # I don't know why i'm doing this. Maybe remove it later on all together, now i've added the parsed_result
            # to the search_result.
            search_result.show = search_result.parsed_result.show
            search_result.quality = search_result.parsed_result.quality
            search_result.release_group = search_result.parsed_result.release_group
            search_result.version = search_result.parsed_result.version
            search_result.actual_season = search_result.parsed_result.season_number
            search_result.actual_episodes = search_result.parsed_result.episode_numbers

            if not manual_search:
                if not (search_result.show.air_by_date
                        or search_result.show.sports):
                    if search_mode == 'sponly':
                        if search_result.parsed_result.episode_numbers:
                            log.debug(
                                'This is supposed to be a season pack search but the result {0} is not a valid '
                                'season pack, skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue
                        elif not [
                                ep for ep in episodes
                                if search_result.parsed_result.season_number ==
                            (ep.season, ep.scene_season)[ep.series.is_scene]
                        ]:
                            log.debug(
                                'This season result {0} is for a season we are not searching for, '
                                'skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue
                    else:
                        # I'm going to split these up for better readability
                        # Check if at least got a season parsed.
                        if search_result.parsed_result.season_number is None:
                            log.debug(
                                "The result {0} doesn't seem to have a valid season that we are currently trying to "
                                "snatch, skipping it", search_result.name)
                            search_result.result_wanted = False
                            continue

                        # Check if we at least got some episode numbers parsed.
                        if not search_result.parsed_result.episode_numbers:
                            log.debug(
                                "The result {0} doesn't seem to match an episode that we are currently trying to "
                                "snatch, skipping it", search_result.name)
                            search_result.result_wanted = False
                            continue

                        # Compare the episodes and season from the result with what was searched.
                        if not [
                                searched_episode
                                for searched_episode in episodes
                                if searched_episode.season ==
                                search_result.parsed_result.season_number and
                            (searched_episode.episode, searched_episode.
                             scene_episode)[searched_episode.series.is_scene]
                                in search_result.parsed_result.episode_numbers
                        ]:
                            log.debug(
                                "The result {0} doesn't seem to match an episode that we are currently trying to "
                                "snatch, skipping it", search_result.name)
                            search_result.result_wanted = False
                            continue

                    # We've performed some checks to decided if we want to continue with this result.
                    # If we've hit this, that means this is not an air_by_date and not a sports show. And it seems to be
                    # a valid result. Let's store the parsed season and episode number and continue.
                    search_result.actual_season = search_result.parsed_result.season_number
                    search_result.actual_episodes = search_result.parsed_result.episode_numbers
                else:
                    # air_by_date or sportshow.
                    search_result.same_day_special = False

                    if not search_result.parsed_result.is_air_by_date:
                        log.debug(
                            "This is supposed to be a date search but the result {0} didn't parse as one, "
                            "skipping it", search_result.name)
                        search_result.result_wanted = False
                        continue
                    else:
                        # Use a query against the tv_episodes table, to match the parsed air_date against.
                        air_date = search_result.parsed_result.air_date.toordinal(
                        )
                        db = DBConnection()
                        sql_results = db.select(
                            'SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?',
                            [search_result.show.indexerid, air_date])

                        if len(sql_results) == 2:
                            if int(sql_results[0][b'season']) == 0 and int(
                                    sql_results[1][b'season']) != 0:
                                search_result.actual_season = int(
                                    sql_results[1][b'season'])
                                search_result.actual_episodes = [
                                    int(sql_results[1][b'episode'])
                                ]
                                search_result.same_day_special = True
                            elif int(sql_results[1][b'season']) == 0 and int(
                                    sql_results[0][b'season']) != 0:
                                search_result.actual_season = int(
                                    sql_results[0][b'season'])
                                search_result.actual_episodes = [
                                    int(sql_results[0][b'episode'])
                                ]
                                search_result.same_day_special = True
                        elif len(sql_results) != 1:
                            log.warning(
                                "Tried to look up the date for the episode {0} but the database didn't return proper "
                                "results, skipping it", search_result.name)
                            search_result.result_wanted = False
                            continue

                        # @TODO: Need to verify and test this.
                        if search_result.result_wanted and not search_result.same_day_special:
                            search_result.actual_season = int(
                                sql_results[0][b'season'])
                            search_result.actual_episodes = [
                                int(sql_results[0][b'episode'])
                            ]

        # Iterate again over the search results, and see if there is anything we want.
        for search_result in search_results:

            # Try to cache the item if we want to.
            cache_result = search_result.add_result_to_cache(self.cache)
            if cache_result is not None:
                cl.append(cache_result)

            if not search_result.result_wanted:
                log.debug(
                    "We aren't interested in this result: {0} with url: {1}",
                    search_result.name, search_result.url)
                continue

            log.debug('Found result {0} at {1}', search_result.name,
                      search_result.url)

            episode_object = search_result.create_episode_object()
            # result = self.get_result(episode_object, search_result)
            search_result.finish_search_result(self)

            if not episode_object:
                episode_number = SEASON_RESULT
                log.debug('Found season pack result {0} at {1}',
                          search_result.name, search_result.url)
            elif len(episode_object) == 1:
                episode_number = episode_object[0].episode
                log.debug('Found single episode result {0} at {1}',
                          search_result.name, search_result.url)
            else:
                episode_number = MULTI_EP_RESULT
                log.debug(
                    'Found multi-episode ({0}) result {1} at {2}', ', '.join(
                        map(str, search_result.parsed_result.episode_numbers)),
                    search_result.name, search_result.url)
            if episode_number not in results:
                results[episode_number] = [search_result]
            else:
                results[episode_number].append(search_result)

        if cl:
            # Access to a protected member of a client class
            db = self.cache._get_db()
            db.mass_action(cl)

        return results
Пример #6
0
def validate_name(
        pattern,
        multi=None,
        anime_type=None,  # pylint: disable=too-many-arguments, too-many-return-statements
        file_only=False,
        abd=False,
        sports=False):
    """
    See if we understand a name

    :param pattern: Name to analyse
    :param multi: Is this a multi-episode name
    :param anime_type: Is this anime
    :param file_only: Is this just a file or a dir
    :param abd: Is air-by-date enabled
    :param sports: Is this sports
    :return: True if valid name, False if not
    """
    ep = generate_sample_ep(multi, abd, sports, anime_type)

    new_name = ep.formatted_filename(pattern, multi, anime_type) + '.ext'
    new_path = ep.formatted_dir(pattern, multi)
    if not file_only:
        new_name = os.path.join(new_path, new_name)

    if not new_name:
        logger.log(u"Unable to create a name out of " + pattern, logger.DEBUG)
        return False

    logger.log(u"Trying to parse " + new_name, logger.DEBUG)

    try:
        result = NameParser(show=ep.series,
                            naming_pattern=True).parse(new_name)
    except (InvalidNameException, InvalidShowException) as error:
        logger.log(u"{}".format(error), logger.DEBUG)
        return False

    logger.log(u"The name " + new_name + " parsed into " + str(result),
               logger.DEBUG)

    if abd or sports:
        if result.air_date != ep.airdate:
            logger.log(
                u"Air date incorrect in parsed episode, pattern isn't valid",
                logger.DEBUG)
            return False
    elif anime_type != 3:
        if len(result.ab_episode_numbers) and result.ab_episode_numbers != [
                x.absolute_number for x in [ep] + ep.related_episodes
        ]:
            logger.log(
                u"Absolute numbering incorrect in parsed episode, pattern isn't valid",
                logger.DEBUG)
            return False
    else:
        if result.season_number != ep.season:
            logger.log(
                u"Season number incorrect in parsed episode, pattern isn't valid",
                logger.DEBUG)
            return False
        if result.episode_numbers != [
                x.episode for x in [ep] + ep.related_episodes
        ]:
            logger.log(
                u"Episode numbering incorrect in parsed episode, pattern isn't valid",
                logger.DEBUG)
            return False

    return True
Пример #7
0
    def _get_proper_results(self):  # pylint: disable=too-many-locals, too-many-branches, too-many-statements
        """Retrieve a list of recently aired episodes, and search for these episodes in the different providers."""
        propers = {}

        # For each provider get the list of propers
        original_thread_name = threading.currentThread().name
        providers = enabled_providers('backlog')

        search_date = datetime.datetime.today() - datetime.timedelta(
            days=app.PROPERS_SEARCH_DAYS)
        main_db_con = db.DBConnection()
        if not app.POSTPONE_IF_NO_SUBS:
            # Get the recently aired (last 2 days) shows from DB
            search_q_params = ','.join('?' for _ in Quality.DOWNLOADED)
            recently_aired = main_db_con.select(
                b'SELECT showid, season, episode, status, airdate'
                b' FROM tv_episodes'
                b' WHERE airdate >= ?'
                b' AND status IN ({0})'.format(search_q_params),
                [search_date.toordinal()] + Quality.DOWNLOADED)
        else:
            # Get recently subtitled episodes (last 2 days) from DB
            # Episode status becomes downloaded only after found subtitles
            last_subtitled = search_date.strftime(History.date_format)
            recently_aired = main_db_con.select(
                b'SELECT showid, season, episode FROM history '
                b"WHERE date >= ? AND action LIKE '%10'", [last_subtitled])

        if not recently_aired:
            log.info('No recently aired new episodes, nothing to search for')
            return []

        # Loop through the providers, and search for releases
        for cur_provider in providers:
            threading.currentThread().name = '{thread} :: [{provider}]'.format(
                thread=original_thread_name, provider=cur_provider.name)

            log.info('Searching for any new PROPER releases from {provider}',
                     {'provider': cur_provider.name})

            try:
                cur_propers = cur_provider.find_propers(recently_aired)
            except AuthException as e:
                log.debug('Authentication error: {error}', {'error': ex(e)})
                continue

            # if they haven't been added by a different provider than add the proper to the list
            for proper in cur_propers:
                name = self._sanitize_name(proper.name)
                if name not in propers:
                    log.debug('Found new possible proper result: {name}',
                              {'name': proper.name})
                    propers[name] = proper

        threading.currentThread().name = original_thread_name

        # take the list of unique propers and get it sorted by
        sorted_propers = sorted(propers.values(),
                                key=operator.attrgetter('date'),
                                reverse=True)
        final_propers = []

        # Keep only items from last PROPER_SEARCH_DAYS setting in processed propers:
        latest_proper = datetime.datetime.now() - datetime.timedelta(
            days=app.PROPERS_SEARCH_DAYS)
        self.processed_propers = [
            p for p in self.processed_propers if p.get('date') >= latest_proper
        ]

        # Get proper names from processed propers
        processed_propers_names = [
            proper.get('name') for proper in self.processed_propers
            if proper.get('name')
        ]

        for cur_proper in sorted_propers:

            if not self.ignore_processed_propers and cur_proper.name in processed_propers_names:
                log.debug(u'Proper already processed. Skipping: {proper_name}',
                          {'proper_name': cur_proper.name})
                continue

            try:
                cur_proper.parse_result = NameParser().parse(cur_proper.name)
            except (InvalidNameException, InvalidShowException) as error:
                log.debug('{error}', {'error': error})
                continue

            if not cur_proper.parse_result.proper_tags:
                log.info('Skipping non-proper: {name}',
                         {'name': cur_proper.name})
                continue

            log.debug(
                'Proper tags for {proper}: {tags}', {
                    'proper': cur_proper.name,
                    'tags': cur_proper.parse_result.proper_tags
                })

            if not cur_proper.parse_result.series_name:
                log.debug('Ignoring invalid show: {name}',
                          {'name': cur_proper.name})
                if cur_proper.name not in processed_propers_names:
                    self.processed_propers.append({
                        'name': cur_proper.name,
                        'date': cur_proper.date
                    })
                continue

            if not cur_proper.parse_result.episode_numbers:
                log.debug('Ignoring full season instead of episode: {name}',
                          {'name': cur_proper.name})
                if cur_proper.name not in processed_propers_names:
                    self.processed_propers.append({
                        'name': cur_proper.name,
                        'date': cur_proper.date
                    })
                continue

            log.debug(
                'Successful match! Matched {original_name} to show {new_name}',
                {
                    'original_name': cur_proper.parse_result.original_name,
                    'new_name': cur_proper.parse_result.show.name
                })

            # Map the indexerid in the db to the show's indexerid
            cur_proper.indexerid = cur_proper.parse_result.show.indexerid

            # Map the indexer in the db to the show's indexer
            cur_proper.indexer = cur_proper.parse_result.show.indexer

            # Map our Proper instance
            cur_proper.show = cur_proper.parse_result.show
            cur_proper.actual_season = cur_proper.parse_result.season_number \
                if cur_proper.parse_result.season_number is not None else 1
            cur_proper.actual_episodes = cur_proper.parse_result.episode_numbers
            cur_proper.release_group = cur_proper.parse_result.release_group
            cur_proper.version = cur_proper.parse_result.version
            cur_proper.quality = cur_proper.parse_result.quality
            cur_proper.content = None
            cur_proper.proper_tags = cur_proper.parse_result.proper_tags

            # filter release, in this case, it's just a quality gate. As we only send one result.
            best_result = pick_best_result(cur_proper)

            if not best_result:
                log.info('Rejected proper: {name}', {'name': cur_proper.name})
                if cur_proper.name not in processed_propers_names:
                    self.processed_propers.append({
                        'name': cur_proper.name,
                        'date': cur_proper.date
                    })
                continue

            # only get anime proper if it has release group and version
            if best_result.show.is_anime:
                if not best_result.release_group and best_result.version == -1:
                    log.info(
                        'Ignoring proper without release group and version: {name}',
                        {'name': best_result.name})
                    if cur_proper.name not in processed_propers_names:
                        self.processed_propers.append({
                            'name': cur_proper.name,
                            'date': cur_proper.date
                        })
                    continue

            # check if we have the episode as DOWNLOADED
            main_db_con = db.DBConnection()
            sql_results = main_db_con.select(
                b"SELECT status, release_name FROM tv_episodes WHERE "
                b"showid = ? AND season = ? AND episode = ? AND status LIKE '%04'",
                [
                    best_result.indexerid, best_result.actual_season,
                    best_result.actual_episodes[0]
                ])
            if not sql_results:
                log.info(
                    "Ignoring proper because this episode doesn't have 'DOWNLOADED' status: {name}",
                    {'name': best_result.name})
                continue

            # only keep the proper if we have already downloaded an episode with the same quality
            _, old_quality = Quality.split_composite_status(
                int(sql_results[0][b'status']))
            if old_quality != best_result.quality:
                log.info(
                    'Ignoring proper because quality is different: {name}',
                    {'name': best_result.name})
                if cur_proper.name not in processed_propers_names:
                    self.processed_propers.append({
                        'name': cur_proper.name,
                        'date': cur_proper.date
                    })
                continue

            # only keep the proper if we have already downloaded an episode with the same codec
            release_name = sql_results[0][b'release_name']
            if release_name:
                current_codec = NameParser()._parse_string(
                    release_name).video_codec
                # Ignore proper if codec differs from downloaded release codec
                if all([
                        current_codec, best_result.parse_result.video_codec,
                        best_result.parse_result.video_codec != current_codec
                ]):
                    log.info(
                        'Ignoring proper because codec is different: {name}',
                        {'name': best_result.name})
                    if best_result.name not in processed_propers_names:
                        self.processed_propers.append({
                            'name': best_result.name,
                            'date': best_result.date
                        })
                    continue
            else:
                log.debug(
                    "Coudn't find a release name in database. Skipping codec comparison for: {name}",
                    {'name': best_result.name})

            # check if we actually want this proper (if it's the right release group and a higher version)
            if best_result.show.is_anime:
                main_db_con = db.DBConnection()
                sql_results = main_db_con.select(
                    b'SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
                    [
                        best_result.indexerid, best_result.actual_season,
                        best_result.actual_episodes[0]
                    ])

                old_version = int(sql_results[0][b'version'])
                old_release_group = (sql_results[0][b'release_group'])

                if -1 < old_version < best_result.version:
                    log.info(
                        'Found new anime version {new} to replace existing version {old}: {name}',
                        {
                            'old': old_version,
                            'new': best_result.version,
                            'name': best_result.name
                        })
                else:
                    log.info(
                        'Ignoring proper with the same or lower version: {name}',
                        {'name': best_result.name})
                    if cur_proper.name not in processed_propers_names:
                        self.processed_propers.append({
                            'name': best_result.name,
                            'date': best_result.date
                        })
                    continue

                if old_release_group != best_result.release_group:
                    log.info(
                        'Ignoring proper from release group {new} instead of current group {old}',
                        {
                            'new': best_result.release_group,
                            'old': old_release_group
                        })
                    if best_result.name not in processed_propers_names:
                        self.processed_propers.append({
                            'name': best_result.name,
                            'date': best_result.date
                        })
                    continue

            # if the show is in our list and there hasn't been a proper already added for that particular episode
            # then add it to our list of propers
            if best_result.indexerid != -1 and (
                    best_result.indexerid, best_result.actual_season,
                    best_result.actual_episodes[0]) not in map(
                        operator.attrgetter('indexerid', 'actual_season',
                                            'actual_episode'), final_propers):
                log.info('Found a desired proper: {name}',
                         {'name': best_result.name})
                final_propers.append(best_result)

            if best_result.name not in processed_propers_names:
                self.processed_propers.append({
                    'name': best_result.name,
                    'date': best_result.date
                })

        return final_propers
Пример #8
0
    def add_cache_entry(self,
                        name,
                        url,
                        seeders,
                        leechers,
                        size,
                        pubdate,
                        parsed_result=None):
        """Add item into cache database."""
        try:
            # Use the already passed parsed_result of possible.
            parse_result = parsed_result or NameParser().parse(name)
        except (InvalidNameException, InvalidShowException) as error:
            log.debug('{0}', error)
            return None

        if not parse_result or not parse_result.series_name:
            return None

        # add the parsed result to cache for usage later on
        season = 1
        if parse_result.season_number is not None:
            season = parse_result.season_number

        episodes = parse_result.episode_numbers

        if season is not None and episodes is not None:
            # store episodes as a separated string
            episode_text = '|{0}|'.format('|'.join(
                {str(episode)
                 for episode in episodes if episode}))

            # get the current timestamp
            cur_timestamp = int(time())

            # get quality of release
            quality = parse_result.quality

            assert isinstance(name, text_type)

            # get release group
            release_group = parse_result.release_group

            # get version
            version = parse_result.version

            # Store proper_tags as proper1|proper2|proper3
            proper_tags = '|'.join(parse_result.proper_tags)

            log.debug('Added RSS item: {0} to cache: {1}', name,
                      self.provider_id)
            return [
                b'INSERT OR REPLACE INTO [{name}] '
                b'   (name, season, episodes, indexerid, url, '
                b'    time, quality, release_group, version, '
                b'    seeders, leechers, size, pubdate, proper_tags) '
                b'VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)'.format(
                    name=self.provider_id),
                [
                    name, season, episode_text, parse_result.show.indexerid,
                    url, cur_timestamp, quality, release_group, version,
                    seeders, leechers, size, pubdate, proper_tags
                ]
            ]
Пример #9
0
    def find_search_results(self,
                            series,
                            episodes,
                            search_mode,
                            forced_search=False,
                            download_current_quality=False,
                            manual_search=False,
                            manual_search_type='episode'):
        """
        Search episodes based on param.

        Search the provider using http queries.
        :param series: Series object
        :param episodes: List of Episode objects
        :param search_mode: 'eponly' or 'sponly'
        :param forced_search: Flag if the search was triggered by a forced search
        :param download_current_quality: Flag if we want to include an already downloaded quality in the new search
        :param manual_search: Flag if the search was triggered by a manual search
        :param manual_search_type: How the manual search was started: For example an 'episode' or 'season'

        :return: A dict of search results, ordered by episode number.
        """
        self._check_auth()
        self.series = series

        season_search = (len(episodes) > 1 or manual_search_type
                         == 'season') and search_mode == 'sponly'
        results = []

        for episode in episodes:
            search_strings = []
            if season_search:
                search_strings = self._get_season_search_strings(episode)
            elif search_mode == 'eponly':
                search_strings = self._get_episode_search_strings(episode)

            for search_string in search_strings:
                # Find results from the provider
                items = self.search(search_string,
                                    ep_obj=episode,
                                    manual_search=manual_search)
                for item in items:
                    result = self.get_result(series=series, item=item)
                    if result not in results:
                        result.quality = Quality.quality_from_name(
                            result.name, series.is_anime)
                        results.append(result)

            # In season search, we can't loop in episodes lists as we
            # only need one episode to get the season string
            if search_mode == 'sponly':
                break

        log.debug('Found {0} unique search results', len(results))

        # sort qualities in descending order
        results.sort(key=operator.attrgetter('quality'), reverse=True)

        # Move through each item and parse with NameParser()
        for search_result in results:

            if forced_search:
                search_result.search_type = FORCED_SEARCH
            search_result.download_current_quality = download_current_quality
            search_result.result_wanted = True

            try:
                search_result.parsed_result = NameParser(
                    parse_method=('normal', 'anime')[series.is_anime]).parse(
                        search_result.name)
            except (InvalidNameException, InvalidShowException) as error:
                log.debug(
                    'Error during parsing of release name: {release_name}, with error: {error}',
                    {
                        'release_name': search_result.name,
                        'error': error
                    })
                search_result.add_cache_entry = False
                search_result.result_wanted = False
                continue

            # I don't know why i'm doing this. Maybe remove it later on all together, now i've added the parsed_result
            # to the search_result.
            search_result.series = search_result.parsed_result.series
            search_result.quality = search_result.parsed_result.quality
            search_result.release_group = search_result.parsed_result.release_group
            search_result.version = search_result.parsed_result.version
            search_result.actual_season = search_result.parsed_result.season_number
            search_result.actual_episodes = search_result.parsed_result.episode_numbers

            if not manual_search:
                if not (search_result.series.air_by_date
                        or search_result.series.sports):
                    if search_mode == 'sponly':
                        if search_result.parsed_result.episode_numbers:
                            log.debug(
                                'This is supposed to be a season pack search but the result {0} is not a valid '
                                'season pack, skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue
                        elif not [
                                ep for ep in episodes
                                if search_result.parsed_result.season_number ==
                            (ep.season, ep.scene_season)[ep.series.is_scene]
                        ]:
                            log.debug(
                                'This season result {0} is for a season we are not searching for, '
                                'skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue
                    else:
                        # I'm going to split these up for better readability
                        # Check if at least got a season parsed.
                        if search_result.parsed_result.season_number is None:
                            log.debug(
                                "The result {0} doesn't seem to have a valid season that we are currently trying to "
                                'snatch, skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue

                        # Check if we at least got some episode numbers parsed.
                        if not search_result.parsed_result.episode_numbers:
                            log.debug(
                                "The result {0} doesn't seem to match an episode that we are currently trying to "
                                'snatch, skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue

                        # Compare the episodes and season from the result with what was searched.
                        wanted_ep = False
                        for searched_ep in episodes:
                            if searched_ep.series.is_scene and searched_ep.scene_episode:
                                season = searched_ep.scene_season
                                episode = searched_ep.scene_episode
                            else:
                                season = searched_ep.season
                                episode = searched_ep.episode

                            if (season ==
                                    search_result.parsed_result.season_number
                                    and episode in search_result.parsed_result.
                                    episode_numbers):
                                wanted_ep = True
                                break

                        if not wanted_ep:
                            log.debug(
                                "The result {0} doesn't seem to match an episode that we are currently trying to "
                                'snatch, skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue

                    # We've performed some checks to decided if we want to continue with this result.
                    # If we've hit this, that means this is not an air_by_date and not a sports show. And it seems to be
                    # a valid result. Let's store the parsed season and episode number and continue.
                    search_result.actual_season = search_result.parsed_result.season_number
                    search_result.actual_episodes = search_result.parsed_result.episode_numbers
                else:
                    # air_by_date or sportshow.
                    search_result.same_day_special = False

                    if not search_result.parsed_result.is_air_by_date:
                        log.debug(
                            "This is supposed to be a date search but the result {0} didn't parse as one, "
                            'skipping it', search_result.name)
                        search_result.result_wanted = False
                        continue
                    else:
                        # Use a query against the tv_episodes table, to match the parsed air_date against.
                        air_date = search_result.parsed_result.air_date.toordinal(
                        )
                        db = DBConnection()
                        sql_results = db.select(
                            'SELECT season, episode FROM tv_episodes WHERE indexer = ? AND showid = ? AND airdate = ?',
                            [
                                search_result.series.indexer,
                                search_result.series.series_id, air_date
                            ])

                        if len(sql_results) == 2:
                            if int(sql_results[0]['season']) == 0 and int(
                                    sql_results[1]['season']) != 0:
                                search_result.actual_season = int(
                                    sql_results[1]['season'])
                                search_result.actual_episodes = [
                                    int(sql_results[1]['episode'])
                                ]
                                search_result.same_day_special = True
                            elif int(sql_results[1]['season']) == 0 and int(
                                    sql_results[0]['season']) != 0:
                                search_result.actual_season = int(
                                    sql_results[0]['season'])
                                search_result.actual_episodes = [
                                    int(sql_results[0]['episode'])
                                ]
                                search_result.same_day_special = True
                        elif len(sql_results) != 1:
                            log.warning(
                                "Tried to look up the date for the episode {0} but the database didn't return proper "
                                'results, skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue

                        # @TODO: Need to verify and test this.
                        if search_result.result_wanted and not search_result.same_day_special:
                            search_result.actual_season = int(
                                sql_results[0]['season'])
                            search_result.actual_episodes = [
                                int(sql_results[0]['episode'])
                            ]

        final_results = {}
        cl = []
        # Iterate again over the search results, and see if there is anything we want.
        for search_result in results:

            # Try to cache the item if we want to.
            cache_result = search_result.add_result_to_cache(self.cache)
            if cache_result is not None:
                cl.append(cache_result)

            if not search_result.result_wanted:
                log.debug(
                    "We aren't interested in this result: {0} with url: {1}",
                    search_result.name, search_result.url)
                continue

            log.debug('Found result {0} at {1}', search_result.name,
                      search_result.url)

            search_result.update_search_result()

            if search_result.episode_number == SEASON_RESULT:
                log.debug('Found season pack result {0} at {1}',
                          search_result.name, search_result.url)
            elif search_result.episode_number == MULTI_EP_RESULT:
                log.debug(
                    'Found multi-episode ({0}) result {1} at {2}', ', '.join(
                        map(str, search_result.parsed_result.episode_numbers)),
                    search_result.name, search_result.url)
            else:
                log.debug('Found single episode result {0} at {1}',
                          search_result.name, search_result.url)

            if search_result.episode_number not in final_results:
                final_results[search_result.episode_number] = [search_result]
            else:
                final_results[search_result.episode_number].append(
                    search_result)

        if cl:
            # Access to a protected member of a client class
            db = self.cache._get_db()
            db.mass_action(cl)

        return final_results
Пример #10
0
    def add_cache_entry(self, search_result, parsed_result=None):
        """Add item into cache database."""
        if parsed_result is None:
            try:
                parsed_result = NameParser().parse(search_result.name)
            except (InvalidNameException, InvalidShowException) as error:
                log.debug('{0}', error)
                return None

        if not parsed_result or not parsed_result.series_name:
            return None

        # add the parsed result to cache for usage later on
        season = 1
        if parsed_result.season_number is not None:
            season = parsed_result.season_number

        episodes = parsed_result.episode_numbers

        if season is not None and episodes is not None:
            # store episodes as a separated string
            episode_text = '|{0}|'.format(
                '|'.join({str(episode) for episode in episodes if episode})
            )

            # get the current timestamp
            cur_timestamp = int(time())

            # get quality of release
            quality = parsed_result.quality

            name = search_result.name
            assert isinstance(name, text_type)

            # get release group
            release_group = parsed_result.release_group

            # get version
            version = parsed_result.version

            # Store proper_tags as proper1|proper2|proper3
            proper_tags = '|'.join(parsed_result.proper_tags)

            identifier = self._get_identifier(search_result)
            url = search_result.url
            seeders = search_result.seeders
            leechers = search_result.leechers
            size = search_result.size
            pubdate = search_result.pubdate

            if not self.item_in_cache(identifier):
                log.debug('Added item: {0} to cache: {1} with url: {2}', name, self.provider_id, url)
                return [
                    'INSERT INTO [{name}] '
                    '   (identifier, name, season, episodes, indexerid, url, time, quality, '
                    '    release_group, version, seeders, leechers, size, pubdate, '
                    '    proper_tags, date_added, indexer ) '
                    'VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)'.format(
                        name=self.provider_id
                    ),
                    [identifier, name, season, episode_text, parsed_result.series.series_id, url,
                     cur_timestamp, quality, release_group, version,
                     seeders, leechers, size, pubdate, proper_tags, cur_timestamp, parsed_result.series.indexer]
                ]
            else:
                log.debug('Updating item: {0} to cache: {1}', name, self.provider_id)
                return [
                    'UPDATE [{name}] '
                    'SET name=?, url=?, season=?, episodes=?, indexer=?, indexerid=?, '
                    '    time=?, quality=?, release_group=?, version=?, '
                    '    seeders=?, leechers=?, size=?, pubdate=?, proper_tags=? '
                    'WHERE identifier=?'.format(
                        name=self.provider_id
                    ),
                    [name, url, season, episode_text, parsed_result.series.indexer, parsed_result.series.series_id,
                     cur_timestamp, quality, release_group, version,
                     seeders, leechers, size, pubdate, proper_tags, identifier]
                ]
Пример #11
0
def split_result(obj):
    """
    Split obj into separate episodes.

    :param obj: to search for results
    :return: a list of episode objects or an empty list
    """
    # TODO: Check if this needs exception handling.
    url_data = session.get(obj.url).content
    if url_data is None:
        log.error(u'Unable to load url {0}, can\'t download season NZB',
                  obj.url)
        return []

    # parse the season ep name
    try:
        parsed_obj = NameParser(series=obj.series).parse(obj.name)
    except (InvalidNameException, InvalidShowException) as error:
        log.debug(u'{}', error)
        return []

    # bust it up
    season = 1 if parsed_obj.season_number is None else parsed_obj.season_number

    separate_nzbs, xmlns = get_season_nzbs(obj.name, url_data, season)

    result_list = []

    # TODO: Re-evaluate this whole section
    #   If we have valid results and hit an exception, we ignore the results found so far.
    #   Maybe we should return the results found or possibly continue with the next iteration of the loop
    #   Also maybe turn this into a function and generate the results_list with a list comprehension instead
    for new_nzb in separate_nzbs:
        log.debug(u'Split out {new_nzb} from {name}', {
            'new_nzb': new_nzb,
            'name': obj.name
        })

        # parse the name
        try:
            parsed_obj = NameParser(series=obj.series).parse(new_nzb)
        except (InvalidNameException, InvalidShowException) as error:
            log.debug(u'{}', error)
            return []

        # make sure the result is sane
        if (parsed_obj.season_number !=
                season) or (parsed_obj.season_number is None and season != 1):
            # pylint: disable=no-member
            log.warning(
                u'Found {new_nzb} inside {name} but it doesn\'t seem to belong to the same season, ignoring it',
                {
                    'new_nzb': new_nzb,
                    'name': obj.name
                })
            continue
        elif not parsed_obj.episode_numbers:
            # pylint: disable=no-member
            log.warning(
                u'Found {new_nzb} inside {name} but it doesn\'t seem to be a valid episode NZB, ignoring it',
                {
                    'new_nzb': new_nzb,
                    'name': obj.name
                })
            continue

        want_ep = True
        for ep_num in parsed_obj.episode_numbers:
            if not obj.extraInfo[0].want_episode(season, ep_num, obj.quality):
                log.debug(u'Ignoring result: {0}', new_nzb)
                want_ep = False
                break
        if not want_ep:
            continue

        # get all the associated episode objects
        ep_obj_list = [
            obj.extraInfo[0].get_episode(season, ep)
            for ep in parsed_obj.episode_numbers
        ]

        # make a result
        cur_obj = classes.NZBDataSearchResult(ep_obj_list)
        cur_obj.name = new_nzb
        cur_obj.provider = obj.provider
        cur_obj.quality = obj.quality
        cur_obj.extraInfo = [create_nzb_string(separate_nzbs[new_nzb], xmlns)]

        result_list.append(cur_obj)

    return result_list