Beispiel #1
0
def search_for_needed_episodes(force=False):
    """
    Check providers for details on wanted episodes.

    :return: episodes we have a search hit for
    """
    found_results = {}

    show_list = app.showList
    from_date = datetime.date.fromordinal(1)
    episodes = []

    for cur_show in show_list:
        if cur_show.paused:
            log.debug(
                u'Not checking for needed episodes of {0} because the show is paused',
                cur_show.name)
            continue
        episodes.extend(wanted_episodes(cur_show, from_date))

    if not episodes and not force:
        # nothing wanted so early out, ie: avoid whatever arbitrarily
        # complex thing a provider cache update entails, for example,
        # reading rss feeds
        return list(itervalues(found_results))

    original_thread_name = threading.currentThread().name

    providers = enabled_providers(u'daily')

    if not providers:
        log.warning(
            u'No NZB/Torrent providers found or enabled in the application config for daily searches.'
            u' Please check your settings')
        return list(itervalues(found_results))

    log.info(u'Using daily search providers')
    for cur_provider in providers:
        threading.currentThread().name = u'{thread} :: [{provider}]'.format(
            thread=original_thread_name, provider=cur_provider.name)
        cur_provider.cache.update_cache()

    for cur_provider in providers:
        threading.currentThread().name = u'{thread} :: [{provider}]'.format(
            thread=original_thread_name, provider=cur_provider.name)
        try:
            cur_found_results = cur_provider.search_rss(episodes)
        except AuthException as error:
            log.error(u'Authentication error: {0}', ex(error))
            continue

        # pick a single result for each episode, respecting existing results
        for cur_ep in cur_found_results:
            if not cur_ep.series or cur_ep.series.paused:
                log.debug(u'Skipping {0} because the show is paused ',
                          cur_ep.pretty_name())
                continue

            best_result = pick_best_result(cur_found_results[cur_ep])

            # if all results were rejected move on to the next episode
            if not best_result:
                log.debug(u'All found results for {0} were rejected.',
                          cur_ep.pretty_name())
                continue

            # if it's already in the list (from another provider) and the newly found quality is no better then skip it
            if cur_ep in found_results and best_result.quality <= found_results[
                    cur_ep].quality:
                continue

            # Skip the result if search delay is enabled for the provider.
            if delay_search(best_result):
                continue

            found_results[cur_ep] = best_result

    threading.currentThread().name = original_thread_name

    return list(itervalues(found_results))
Beispiel #2
0
def search_providers(series_obj,
                     episodes,
                     forced_search=False,
                     down_cur_quality=False,
                     manual_search=False,
                     manual_search_type=u'episode'):
    """
    Walk providers for information on shows.

    :param series_obj: Show we are looking for
    :param episodes: List, episodes we hope to find
    :param forced_search: Boolean, is this a forced search?
    :param down_cur_quality: Boolean, should we re-download currently available quality file
    :param manual_search: Boolean, should we choose what to download?
    :param manual_search_type: Episode or Season search
    :return: results for search
    """
    found_results = {}
    final_results = []
    manual_search_results = []

    # build name cache for show
    name_cache.build_name_cache(series_obj)

    original_thread_name = threading.currentThread().name

    if manual_search:
        log.info(u'Using manual search providers')
        providers = enabled_providers(u'manualsearch')
    else:
        log.info(u'Using backlog search providers')
        providers = enabled_providers(u'backlog')

    if not providers:
        log.warning(
            u'No NZB/Torrent providers found or enabled in the application config for {0} searches.'
            u' Please check your settings',
            'manual' if manual_search else 'backlog')

    threading.currentThread().name = original_thread_name

    for cur_provider in providers:
        threading.currentThread(
        ).name = original_thread_name + u' :: [' + cur_provider.name + u']'

        if cur_provider.anime_only and not series_obj.is_anime:
            log.debug(u'{0} is not an anime, skipping', series_obj.name)
            continue

        found_results[cur_provider.name] = {}

        search_count = 0
        search_mode = cur_provider.search_mode

        # Always search for episode when manually searching when in sponly
        if search_mode == u'sponly' and (forced_search or manual_search):
            search_mode = u'eponly'

        if manual_search and manual_search_type == u'season':
            search_mode = u'sponly'

        while True:
            search_count += 1

            if search_mode == u'eponly':
                log.info(u'Performing episode search for {0}', series_obj.name)
            else:
                log.info(u'Performing season pack search for {0}',
                         series_obj.name)

            try:
                search_results = cur_provider.find_search_results(
                    series_obj, episodes, search_mode, forced_search,
                    down_cur_quality, manual_search, manual_search_type)
            except AuthException as error:
                log.error(u'Authentication error: {0}', ex(error))
                break

            if search_results:
                # make a list of all the results for this provider
                for cur_ep in search_results:
                    if cur_ep in found_results[cur_provider.name]:
                        found_results[cur_provider.
                                      name][cur_ep] += search_results[cur_ep]
                    else:
                        found_results[
                            cur_provider.name][cur_ep] = search_results[cur_ep]

                    # Sort the list by seeders if possible
                    if cur_provider.provider_type == u'torrent' or getattr(
                            cur_provider, u'torznab', None):
                        found_results[cur_provider.name][cur_ep].sort(
                            key=lambda d: int(d.seeders), reverse=True)

                break
            elif not cur_provider.search_fallback or search_count == 2:
                break

            # Don't fallback when doing manual season search
            if manual_search_type == u'season':
                break

            if search_mode == u'sponly':
                log.debug(u'Fallback episode search initiated')
                search_mode = u'eponly'
            else:
                log.debug(u'Fallback season pack search initiate')
                search_mode = u'sponly'

        # skip to next provider if we have no results to process
        if not found_results[cur_provider.name]:
            continue

        # Update the cache if a manual search is being run
        if manual_search:
            # Let's create a list with episodes that we where looking for
            if manual_search_type == u'season':
                # If season search type, we only want season packs
                searched_episode_list = [SEASON_RESULT]
            else:
                searched_episode_list = [
                    episode_obj.episode for episode_obj in episodes
                ] + [MULTI_EP_RESULT]
            for searched_episode in searched_episode_list:
                if (searched_episode in search_results
                        and cur_provider.cache.update_cache_manual_search(
                            search_results[searched_episode])):
                    # If we have at least a result from one provider, it's good enough to be marked as result
                    manual_search_results.append(True)
            # Continue because we don't want to pick best results as we are running a manual search by user
            continue

        # pick the best season NZB
        best_season_result = None
        if SEASON_RESULT in found_results[cur_provider.name]:
            best_season_result = pick_best_result(
                found_results[cur_provider.name][SEASON_RESULT])

        highest_quality_overall = 0
        for cur_episode in found_results[cur_provider.name]:
            for cur_result in found_results[cur_provider.name][cur_episode]:
                if cur_result.quality > highest_quality_overall:
                    highest_quality_overall = cur_result.quality
        log.debug(u'The highest quality of any match is {0}',
                  Quality.qualityStrings[highest_quality_overall])

        # see if every episode is wanted
        if best_season_result:
            searched_seasons = {str(x.season) for x in episodes}

            # get the quality of the season nzb
            season_quality = best_season_result.quality
            log.debug(u'The quality of the season {0} is {1}',
                      best_season_result.provider.provider_type,
                      Quality.qualityStrings[season_quality])
            main_db_con = db.DBConnection()
            selection = main_db_con.select(
                'SELECT episode '
                'FROM tv_episodes '
                'WHERE indexer = ?'
                ' AND showid = ?'
                ' AND ( season IN ( {0} ) )'.format(
                    ','.join(searched_seasons)),
                [series_obj.indexer, series_obj.series_id])
            all_eps = [int(x[b'episode']) for x in selection]
            log.debug(u'Episode list: {0}', all_eps)

            all_wanted = True
            any_wanted = False
            for cur_ep_num in all_eps:
                for season in {x.season for x in episodes}:
                    if not series_obj.want_episode(season, cur_ep_num,
                                                   season_quality,
                                                   down_cur_quality):
                        all_wanted = False
                    else:
                        any_wanted = True

            # if we need every ep in the season and there's nothing better then
            # just download this and be done with it (unless single episodes are preferred)
            if all_wanted and best_season_result.quality == highest_quality_overall:
                log.info(
                    u'All episodes in this season are needed, downloading {0} {1}',
                    best_season_result.provider.provider_type,
                    best_season_result.name)
                ep_objs = []
                for cur_ep_num in all_eps:
                    for season in {x.season for x in episodes}:
                        ep_objs.append(
                            series_obj.get_episode(season, cur_ep_num))
                best_season_result.episodes = ep_objs

                # Remove provider from thread name before return results
                threading.currentThread().name = original_thread_name

                return [best_season_result]

            elif not any_wanted:
                log.debug(
                    u'No episodes in this season are needed at this quality, ignoring {0} {1}',
                    best_season_result.provider.provider_type,
                    best_season_result.name)
            else:
                # Some NZB providers (e.g. Jackett) can also download torrents, but torrents cannot be split like NZB
                if (best_season_result.provider.provider_type
                        == GenericProvider.NZB
                        and not best_season_result.url.endswith(
                            GenericProvider.TORRENT)):
                    log.debug(
                        u'Breaking apart the NZB and adding the individual ones to our results'
                    )

                    # if not, break it apart and add them as the lowest priority results
                    individual_results = nzb_splitter.split_result(
                        best_season_result)
                    for cur_result in individual_results:
                        if len(cur_result.episodes) == 1:
                            ep_number = cur_result.episodes[0].episode
                        elif len(cur_result.episodes) > 1:
                            ep_number = MULTI_EP_RESULT

                        if ep_number in found_results[cur_provider.name]:
                            found_results[cur_provider.name][ep_number].append(
                                cur_result)
                        else:
                            found_results[cur_provider.name][ep_number] = [
                                cur_result
                            ]

                # If this is a torrent all we can do is leech the entire torrent,
                # user will have to select which eps not do download in his torrent client
                else:
                    # Season result from Torrent Provider must be a full-season torrent,
                    # creating multi-ep result for it.
                    log.info(
                        u'Adding multi-ep result for full-season torrent.'
                        u' Undesired episodes can be skipped in torrent client if desired!'
                    )
                    ep_objs = []
                    for cur_ep_num in all_eps:
                        for season in {x.season for x in episodes}:
                            ep_objs.append(
                                series_obj.get_episode(season, cur_ep_num))
                    best_season_result.episodes = ep_objs

                    if MULTI_EP_RESULT in found_results[cur_provider.name]:
                        found_results[cur_provider.name][
                            MULTI_EP_RESULT].append(best_season_result)
                    else:
                        found_results[cur_provider.name][MULTI_EP_RESULT] = [
                            best_season_result
                        ]

        # go through multi-ep results and see if we really want them or not, get rid of the rest
        multi_results = {}
        if MULTI_EP_RESULT in found_results[cur_provider.name]:
            for _multi_result in found_results[
                    cur_provider.name][MULTI_EP_RESULT]:
                log.debug(
                    u'Seeing if we want to bother with multi-episode result {0}',
                    _multi_result.name)

                # Filter result by ignore/required/whitelist/blacklist/quality, etc
                multi_result = pick_best_result(_multi_result)
                if not multi_result:
                    continue

                # see how many of the eps that this result covers aren't covered by single results
                needed_eps = []
                not_needed_eps = []
                for ep_obj in multi_result.episodes:
                    # if we have results for the episode
                    if ep_obj.episode in found_results[cur_provider.name] and \
                            len(found_results[cur_provider.name][ep_obj.episode]) > 0:
                        not_needed_eps.append(ep_obj.episode)
                    else:
                        needed_eps.append(ep_obj.episode)

                log.debug(
                    u'Single-ep check result is needed_eps: {0}, not_needed_eps: {1}',
                    needed_eps, not_needed_eps)

                if not needed_eps:
                    log.debug(
                        u'All of these episodes were covered by single episode results,'
                        u' ignoring this multi-episode result')
                    continue

                # check if these eps are already covered by another multi-result
                multi_needed_eps = []
                multi_not_needed_eps = []
                for ep_obj in multi_result.episodes:
                    if ep_obj.episode in multi_results:
                        multi_not_needed_eps.append(ep_obj.episode)
                    else:
                        multi_needed_eps.append(ep_obj.episode)

                log.debug(
                    u'Multi-ep check result is multi_needed_eps: {0}, multi_not_needed_eps: {1}',
                    multi_needed_eps, multi_not_needed_eps)

                if not multi_needed_eps:
                    log.debug(
                        u'All of these episodes were covered by another multi-episode nzb, '
                        u'ignoring this multi-ep result')
                    continue

                # don't bother with the single result if we're going to get it with a multi result
                for ep_obj in multi_result.episodes:
                    multi_results[ep_obj.episode] = multi_result
                    if ep_obj.episode in found_results[cur_provider.name]:
                        log.debug(
                            u'A needed multi-episode result overlaps with a single-episode result for episode {0},'
                            u' removing the single-episode results from the list',
                            ep_obj.episode,
                        )
                        del found_results[cur_provider.name][ep_obj.episode]

        # of all the single ep results narrow it down to the best one for each episode
        final_results += set(multi_results.values())
        for cur_ep in found_results[cur_provider.name]:
            if cur_ep in (MULTI_EP_RESULT, SEASON_RESULT):
                continue

            if not found_results[cur_provider.name][cur_ep]:
                continue

            # if all results were rejected move on to the next episode
            best_result = pick_best_result(
                found_results[cur_provider.name][cur_ep])
            if not best_result:
                continue

            # add result if its not a duplicate and
            found = False
            for i, result in enumerate(final_results):
                for best_resultEp in best_result.episodes:
                    if best_resultEp in result.episodes:
                        if result.quality < best_result.quality:
                            final_results.pop(i)
                        else:
                            found = True
            if not found:
                # Skip the result if search delay is enabled for the provider.
                if not delay_search(best_result):
                    final_results += [best_result]

    # Remove provider from thread name before return results
    threading.currentThread().name = original_thread_name

    if manual_search:
        # If results in manual search return True, else False
        return any(manual_search_results)
    else:
        return final_results
Beispiel #3
0
def search_providers(series_obj,
                     episodes,
                     forced_search=False,
                     down_cur_quality=False,
                     manual_search=False,
                     manual_search_type=u'episode'):
    """
    Walk providers for information on shows.

    :param series_obj: Show we are looking for
    :param episodes: List, episodes we hope to find
    :param forced_search: Boolean, is this a forced search?
    :param down_cur_quality: Boolean, should we re-download currently available quality file
    :param manual_search: Boolean, should we choose what to download?
    :param manual_search_type: Episode or Season search
    :return: results for search
    """
    found_results = {}
    manual_search_results = []
    multi_results = []
    single_results = []

    # build name cache for show
    name_cache.build_name_cache(series_obj)

    original_thread_name = threading.currentThread().name

    if manual_search:
        log.info(u'Using manual search providers')
        providers = enabled_providers(u'manualsearch')
    else:
        log.info(u'Using backlog search providers')
        providers = enabled_providers(u'backlog')

    if not providers:
        log.warning(
            u'No NZB/Torrent providers found or enabled in the application config for {0} searches.'
            u' Please check your settings',
            'manual' if manual_search else 'backlog')

    threading.currentThread().name = original_thread_name

    for cur_provider in providers:
        threading.currentThread(
        ).name = '{original_thread_name} :: [{provider}]'.format(
            original_thread_name=original_thread_name,
            provider=cur_provider.name)

        if cur_provider.anime_only and not series_obj.is_anime:
            log.debug(u'{0} is not an anime, skipping', series_obj.name)
            continue

        found_results[cur_provider.name] = {}
        search_count = 0
        search_mode = cur_provider.search_mode

        # Always search for episode when manually searching when in sponly
        if search_mode == u'sponly' and (forced_search or manual_search):
            search_mode = u'eponly'

        if manual_search and manual_search_type == u'season':
            search_mode = u'sponly'

        while True:
            search_count += 1

            if search_mode == u'eponly':
                log.info(u'Performing episode search for {0}', series_obj.name)
            else:
                log.info(u'Performing season pack search for {0}',
                         series_obj.name)

            try:
                search_results = []
                cache_search_results = []
                cache_multi = []
                cache_single = []

                if not manual_search:
                    cache_search_results = cur_provider.search_results_in_cache(
                        episodes)
                    if cache_search_results:
                        # From our provider multi_episode and single_episode results, collect candidates.
                        cache_found_results = list_results_for_provider(
                            cache_search_results, found_results, cur_provider)
                        # We're passing the empty lists, because we don't want to include previous candidates
                        cache_multi, cache_single = collect_candidates(
                            cache_found_results, cur_provider, [], [],
                            series_obj, down_cur_quality)

                # For now we only search if we didn't get any results back from cache,
                # but we might wanna check if there was something useful in cache.
                if not (cache_multi or cache_single):
                    log.debug(
                        u'Could not find any candidates in cache, searching provider.'
                    )
                    search_results = cur_provider.find_search_results(
                        series_obj, episodes, search_mode, forced_search,
                        down_cur_quality, manual_search, manual_search_type)
                    # Update the list found_results
                    found_results = list_results_for_provider(
                        search_results, found_results, cur_provider)
                else:
                    found_results = cache_found_results

            except AuthException as error:
                log.error(u'Authentication error: {0!r}', error)
                break

            if search_results or cache_search_results:
                break
            elif not cur_provider.search_fallback or search_count == 2:
                break

            # Don't fallback when doing manual season search
            if manual_search_type == u'season':
                break

            if search_mode == u'sponly':
                log.debug(u'Fallback episode search initiated')
                search_mode = u'eponly'
            else:
                log.debug(u'Fallback season pack search initiated')
                search_mode = u'sponly'

        # skip to next provider if we have no results to process
        if not found_results[cur_provider.name]:
            continue

        # Update the cache if a manual search is being run
        if manual_search:
            # Let's create a list with episodes that we where looking for
            if manual_search_type == u'season':
                # If season search type, we only want season packs
                searched_episode_list = [SEASON_RESULT]
            else:
                searched_episode_list = [
                    episode_obj.episode for episode_obj in episodes
                ] + [MULTI_EP_RESULT]
            for searched_episode in searched_episode_list:
                if (searched_episode in search_results
                        and cur_provider.cache.update_cache_manual_search(
                            search_results[searched_episode])):
                    # If we have at least a result from one provider, it's good enough to be marked as result
                    manual_search_results.append(True)
            # Continue because we don't want to pick best results as we are running a manual search by user
            continue

        multi_results, single_results = collect_candidates(
            found_results, cur_provider, multi_results, single_results,
            series_obj, down_cur_quality)

    # Remove provider from thread name before return results
    threading.currentThread().name = original_thread_name

    if manual_search:
        # If results in manual search return True, else False
        return any(manual_search_results)
    else:
        return combine_results(multi_results, single_results)
Beispiel #4
0
    def _get_proper_results(self):  # pylint: disable=too-many-locals, too-many-branches, too-many-statements
        """Retrieve a list of recently aired episodes, and search for these episodes in the different providers."""
        propers = {}

        # For each provider get the list of propers
        original_thread_name = threading.currentThread().name
        providers = enabled_providers('backlog')

        search_date = datetime.datetime.today() - datetime.timedelta(
            days=app.PROPERS_SEARCH_DAYS)
        main_db_con = db.DBConnection()
        if not app.POSTPONE_IF_NO_SUBS:
            # Get the recently aired (last 2 days) shows from DB
            search_q_params = ','.join('?' for _ in Quality.DOWNLOADED)
            recently_aired = main_db_con.select(
                b'SELECT showid, season, episode, status, airdate'
                b' FROM tv_episodes'
                b' WHERE airdate >= ?'
                b' AND status IN ({0})'.format(search_q_params),
                [search_date.toordinal()] + Quality.DOWNLOADED)
        else:
            # Get recently subtitled episodes (last 2 days) from DB
            # Episode status becomes downloaded only after found subtitles
            last_subtitled = search_date.strftime(History.date_format)
            recently_aired = main_db_con.select(
                b'SELECT showid, season, episode FROM history '
                b"WHERE date >= ? AND action LIKE '%10'", [last_subtitled])

        if not recently_aired:
            log.info('No recently aired new episodes, nothing to search for')
            return []

        # Loop through the providers, and search for releases
        for cur_provider in providers:
            threading.currentThread().name = '{thread} :: [{provider}]'.format(
                thread=original_thread_name, provider=cur_provider.name)

            log.info('Searching for any new PROPER releases from {provider}',
                     {'provider': cur_provider.name})

            try:
                cur_propers = cur_provider.find_propers(recently_aired)
            except AuthException as e:
                log.debug('Authentication error: {error}', {'error': ex(e)})
                continue

            # if they haven't been added by a different provider than add the proper to the list
            for proper in cur_propers:
                name = self._sanitize_name(proper.name)
                if name not in propers:
                    log.debug('Found new possible proper result: {name}',
                              {'name': proper.name})
                    propers[name] = proper

        threading.currentThread().name = original_thread_name

        # take the list of unique propers and get it sorted by
        sorted_propers = sorted(propers.values(),
                                key=operator.attrgetter('date'),
                                reverse=True)
        final_propers = []

        # Keep only items from last PROPER_SEARCH_DAYS setting in processed propers:
        latest_proper = datetime.datetime.now() - datetime.timedelta(
            days=app.PROPERS_SEARCH_DAYS)
        self.processed_propers = [
            p for p in self.processed_propers if p.get('date') >= latest_proper
        ]

        # Get proper names from processed propers
        processed_propers_names = [
            proper.get('name') for proper in self.processed_propers
            if proper.get('name')
        ]

        for cur_proper in sorted_propers:

            if not self.ignore_processed_propers and cur_proper.name in processed_propers_names:
                log.debug(u'Proper already processed. Skipping: {proper_name}',
                          {'proper_name': cur_proper.name})
                continue

            try:
                cur_proper.parse_result = NameParser().parse(cur_proper.name)
            except (InvalidNameException, InvalidShowException) as error:
                log.debug('{error}', {'error': error})
                continue

            if not cur_proper.parse_result.proper_tags:
                log.info('Skipping non-proper: {name}',
                         {'name': cur_proper.name})
                continue

            log.debug(
                'Proper tags for {proper}: {tags}', {
                    'proper': cur_proper.name,
                    'tags': cur_proper.parse_result.proper_tags
                })

            if not cur_proper.parse_result.series_name:
                log.debug('Ignoring invalid show: {name}',
                          {'name': cur_proper.name})
                if cur_proper.name not in processed_propers_names:
                    self.processed_propers.append({
                        'name': cur_proper.name,
                        'date': cur_proper.date
                    })
                continue

            if not cur_proper.parse_result.episode_numbers:
                log.debug('Ignoring full season instead of episode: {name}',
                          {'name': cur_proper.name})
                if cur_proper.name not in processed_propers_names:
                    self.processed_propers.append({
                        'name': cur_proper.name,
                        'date': cur_proper.date
                    })
                continue

            log.debug(
                'Successful match! Matched {original_name} to show {new_name}',
                {
                    'original_name': cur_proper.parse_result.original_name,
                    'new_name': cur_proper.parse_result.show.name
                })

            # Map the indexerid in the db to the show's indexerid
            cur_proper.indexerid = cur_proper.parse_result.show.indexerid

            # Map the indexer in the db to the show's indexer
            cur_proper.indexer = cur_proper.parse_result.show.indexer

            # Map our Proper instance
            cur_proper.show = cur_proper.parse_result.show
            cur_proper.actual_season = cur_proper.parse_result.season_number \
                if cur_proper.parse_result.season_number is not None else 1
            cur_proper.actual_episodes = cur_proper.parse_result.episode_numbers
            cur_proper.release_group = cur_proper.parse_result.release_group
            cur_proper.version = cur_proper.parse_result.version
            cur_proper.quality = cur_proper.parse_result.quality
            cur_proper.content = None
            cur_proper.proper_tags = cur_proper.parse_result.proper_tags

            # filter release, in this case, it's just a quality gate. As we only send one result.
            best_result = pick_best_result(cur_proper)

            if not best_result:
                log.info('Rejected proper: {name}', {'name': cur_proper.name})
                if cur_proper.name not in processed_propers_names:
                    self.processed_propers.append({
                        'name': cur_proper.name,
                        'date': cur_proper.date
                    })
                continue

            # only get anime proper if it has release group and version
            if best_result.show.is_anime:
                if not best_result.release_group and best_result.version == -1:
                    log.info(
                        'Ignoring proper without release group and version: {name}',
                        {'name': best_result.name})
                    if cur_proper.name not in processed_propers_names:
                        self.processed_propers.append({
                            'name': cur_proper.name,
                            'date': cur_proper.date
                        })
                    continue

            # check if we have the episode as DOWNLOADED
            main_db_con = db.DBConnection()
            sql_results = main_db_con.select(
                b"SELECT status, release_name FROM tv_episodes WHERE "
                b"showid = ? AND season = ? AND episode = ? AND status LIKE '%04'",
                [
                    best_result.indexerid, best_result.actual_season,
                    best_result.actual_episodes[0]
                ])
            if not sql_results:
                log.info(
                    "Ignoring proper because this episode doesn't have 'DOWNLOADED' status: {name}",
                    {'name': best_result.name})
                continue

            # only keep the proper if we have already downloaded an episode with the same quality
            _, old_quality = Quality.split_composite_status(
                int(sql_results[0][b'status']))
            if old_quality != best_result.quality:
                log.info(
                    'Ignoring proper because quality is different: {name}',
                    {'name': best_result.name})
                if cur_proper.name not in processed_propers_names:
                    self.processed_propers.append({
                        'name': cur_proper.name,
                        'date': cur_proper.date
                    })
                continue

            # only keep the proper if we have already downloaded an episode with the same codec
            release_name = sql_results[0][b'release_name']
            if release_name:
                current_codec = NameParser()._parse_string(
                    release_name).video_codec
                # Ignore proper if codec differs from downloaded release codec
                if all([
                        current_codec, best_result.parse_result.video_codec,
                        best_result.parse_result.video_codec != current_codec
                ]):
                    log.info(
                        'Ignoring proper because codec is different: {name}',
                        {'name': best_result.name})
                    if best_result.name not in processed_propers_names:
                        self.processed_propers.append({
                            'name': best_result.name,
                            'date': best_result.date
                        })
                    continue
            else:
                log.debug(
                    "Coudn't find a release name in database. Skipping codec comparison for: {name}",
                    {'name': best_result.name})

            # check if we actually want this proper (if it's the right release group and a higher version)
            if best_result.show.is_anime:
                main_db_con = db.DBConnection()
                sql_results = main_db_con.select(
                    b'SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
                    [
                        best_result.indexerid, best_result.actual_season,
                        best_result.actual_episodes[0]
                    ])

                old_version = int(sql_results[0][b'version'])
                old_release_group = (sql_results[0][b'release_group'])

                if -1 < old_version < best_result.version:
                    log.info(
                        'Found new anime version {new} to replace existing version {old}: {name}',
                        {
                            'old': old_version,
                            'new': best_result.version,
                            'name': best_result.name
                        })
                else:
                    log.info(
                        'Ignoring proper with the same or lower version: {name}',
                        {'name': best_result.name})
                    if cur_proper.name not in processed_propers_names:
                        self.processed_propers.append({
                            'name': best_result.name,
                            'date': best_result.date
                        })
                    continue

                if old_release_group != best_result.release_group:
                    log.info(
                        'Ignoring proper from release group {new} instead of current group {old}',
                        {
                            'new': best_result.release_group,
                            'old': old_release_group
                        })
                    if best_result.name not in processed_propers_names:
                        self.processed_propers.append({
                            'name': best_result.name,
                            'date': best_result.date
                        })
                    continue

            # if the show is in our list and there hasn't been a proper already added for that particular episode
            # then add it to our list of propers
            if best_result.indexerid != -1 and (
                    best_result.indexerid, best_result.actual_season,
                    best_result.actual_episodes[0]) not in map(
                        operator.attrgetter('indexerid', 'actual_season',
                                            'actual_episode'), final_propers):
                log.info('Found a desired proper: {name}',
                         {'name': best_result.name})
                final_propers.append(best_result)

            if best_result.name not in processed_propers_names:
                self.processed_propers.append({
                    'name': best_result.name,
                    'date': best_result.date
                })

        return final_propers
Beispiel #5
0
def get_provider_cache_results(indexer,
                               show_all_results=None,
                               perform_search=None,
                               show=None,
                               season=None,
                               episode=None,
                               manual_search_type=None,
                               **search_show):
    """Check all provider cache tables for search results."""
    down_cur_quality = 0
    show_obj = Show.find(app.showList, int(show))
    preferred_words = show_obj.show_words().preferred_words
    undesired_words = show_obj.show_words().undesired_words
    ignored_words = show_obj.show_words().ignored_words
    required_words = show_obj.show_words().required_words

    main_db_con = db.DBConnection('cache.db')

    provider_results = {
        'last_prov_updates': {},
        'error': {},
        'found_items': []
    }
    original_thread_name = threading.currentThread().name

    sql_total = []
    combined_sql_q = []
    combined_sql_params = []

    for cur_provider in enabled_providers('manualsearch'):
        threading.currentThread().name = '{thread} :: [{provider}]'.format(
            thread=original_thread_name, provider=cur_provider.name)

        # Let's check if this provider table already exists
        table_exists = main_db_con.select(
            b"SELECT name "
            b"FROM sqlite_master "
            b"WHERE type='table'"
            b" AND name=?", [cur_provider.get_id()])
        columns = [
            i[1] for i in main_db_con.select("PRAGMA table_info('{0}')".format(
                cur_provider.get_id()))
        ] if table_exists else []
        minseed = int(cur_provider.minseed) if getattr(cur_provider, 'minseed',
                                                       None) else -1
        minleech = int(cur_provider.minleech) if getattr(
            cur_provider, 'minleech', None) else -1

        # TODO: the implicit sqlite rowid is used, should be replaced with an explicit PK column
        # If table doesn't exist, start a search to create table and new columns seeders, leechers and size
        required_columns = ['seeders', 'leechers', 'size', 'proper_tags']
        if table_exists and all(required_column in columns
                                for required_column in required_columns):
            # The default sql, that's executed for each providers cache table
            common_sql = (
                b"SELECT rowid, ? AS 'provider_type', ? AS 'provider_image',"
                b" ? AS 'provider', ? AS 'provider_id', ? 'provider_minseed',"
                b" ? 'provider_minleech', name, season, episodes, indexerid,"
                b" url, time, proper_tags, quality, release_group, version,"
                b" seeders, leechers, size, time, pubdate "
                b"FROM '{provider_id}' "
                b"WHERE indexerid = ? AND quality > 0 ".format(
                    provider_id=cur_provider.get_id()))

            # Let's start by adding the default parameters, which are used to subsitute the '?'s.
            add_params = [
                cur_provider.provider_type.title(),
                cur_provider.image_name(), cur_provider.name,
                cur_provider.get_id(), minseed, minleech, show
            ]

            if manual_search_type != 'season':
                # If were not looking for all results, meaning don't do the filter on season + ep, add sql
                if not int(show_all_results):
                    # If it's an episode search, pass season and episode.
                    common_sql += " AND season = ? AND episodes LIKE ? "
                    add_params += [season, "%|{0}|%".format(episode)]

            else:
                # If were not looking for all results, meaning don't do the filter on season + ep, add sql
                if not int(show_all_results):
                    list_of_episodes = '{0}{1}'.format(
                        ' episodes LIKE ', ' AND episodes LIKE '.join(
                            ['?' for _ in show_obj.get_all_episodes(season)]))

                    common_sql += " AND season = ? AND (episodes LIKE ? OR {list_of_episodes})".format(
                        list_of_episodes=list_of_episodes)
                    add_params += [season,
                                   '||']  # When the episodes field is empty.
                    add_params += [
                        '%|{episode}|%'.format(episode=ep.episode)
                        for ep in show_obj.get_all_episodes(season)
                    ]

            # Add the created sql, to lists, that are used down below to perform one big UNIONED query
            combined_sql_q.append(common_sql)
            combined_sql_params += add_params

            # Get the last updated cache items timestamp
            last_update = main_db_con.select(
                b"SELECT max(time) AS lastupdate "
                b"FROM '{provider_id}'".format(
                    provider_id=cur_provider.get_id()))
            provider_results['last_prov_updates'][
                cur_provider.get_id()] = last_update[0][
                    'lastupdate'] if last_update[0]['lastupdate'] else 0

    # Check if we have the combined sql strings
    if combined_sql_q:
        sql_prepend = b"SELECT * FROM ("
        sql_append = b") ORDER BY CASE quality WHEN '{quality_unknown}' THEN -1 ELSE CAST(quality AS DECIMAL) END DESC, " \
                     b" proper_tags DESC, seeders DESC".format(quality_unknown=Quality.UNKNOWN)

        # Add all results
        sql_total += main_db_con.select(
            b'{0} {1} {2}'.format(sql_prepend,
                                  ' UNION ALL '.join(combined_sql_q),
                                  sql_append), combined_sql_params)

    # Always start a search when no items found in cache
    if not sql_total or int(perform_search):
        # retrieve the episode object and fail if we can't get one
        ep_obj = get_episode(show, season, episode)
        if isinstance(ep_obj, str):
            provider_results[
                'error'] = 'Something went wrong when starting the manual search for show {0}, \
            and episode: {1}x{2}'.format(show_obj.name, season, episode)

        # make a queue item for it and put it on the queue
        ep_queue_item = ForcedSearchQueueItem(ep_obj.series, [ep_obj],
                                              bool(int(down_cur_quality)),
                                              True, manual_search_type)  # pylint: disable=maybe-no-member

        app.forced_search_queue_scheduler.action.add_item(ep_queue_item)

        # give the CPU a break and some time to start the queue
        time.sleep(cpu_presets[app.CPU_PRESET])
    else:
        cached_results = [dict(row) for row in sql_total]
        for i in cached_results:
            i['quality_name'] = Quality.split_quality(int(i['quality']))
            i['time'] = datetime.fromtimestamp(i['time'])
            i['release_group'] = i['release_group'] or 'None'
            i['provider_img_link'] = 'images/providers/' + i[
                'provider_image'] or 'missing.png'
            i['provider'] = i['provider'] if i[
                'provider_image'] else 'missing provider'
            i['proper_tags'] = i['proper_tags'].replace('|', ', ')
            i['pretty_size'] = pretty_file_size(
                i['size']) if i['size'] > -1 else 'N/A'
            i['seeders'] = i['seeders'] if i['seeders'] >= 0 else '-'
            i['leechers'] = i['leechers'] if i['leechers'] >= 0 else '-'
            i['pubdate'] = sbdatetime.convert_to_setting(
                parser.parse(i['pubdate'])).strftime(
                    app.DATE_PRESET + ' ' +
                    app.TIME_PRESET) if i['pubdate'] else '-'
            release_group = i['release_group']
            if ignored_words and release_group in ignored_words:
                i['rg_highlight'] = 'ignored'
            elif required_words and release_group in required_words:
                i['rg_highlight'] = 'required'
            elif preferred_words and release_group in preferred_words:
                i['rg_highlight'] = 'preferred'
            elif undesired_words and release_group in undesired_words:
                i['rg_highlight'] = 'undesired'
            else:
                i['rg_highlight'] = ''
            if contains_at_least_one_word(i['name'], required_words):
                i['name_highlight'] = 'required'
            elif contains_at_least_one_word(
                    i['name'], ignored_words) or not filter_bad_releases(
                        i['name'], parse=False):
                i['name_highlight'] = 'ignored'
            elif contains_at_least_one_word(i['name'], undesired_words):
                i['name_highlight'] = 'undesired'
            elif contains_at_least_one_word(i['name'], preferred_words):
                i['name_highlight'] = 'preferred'
            else:
                i['name_highlight'] = ''
            i['seed_highlight'] = 'ignored' if i.get(
                'provider_minseed') > i.get('seeders', -1) >= 0 else ''
            i['leech_highlight'] = 'ignored' if i.get(
                'provider_minleech') > i.get('leechers', -1) >= 0 else ''
        provider_results['found_items'] = cached_results

    # Remove provider from thread name before return results
    threading.currentThread().name = original_thread_name

    # Sanitize the last_prov_updates key
    provider_results['last_prov_updates'] = json.dumps(
        provider_results['last_prov_updates'])
    return provider_results
Beispiel #6
0
def search_for_needed_episodes(scheduler_start_time, force=False):
    """Search providers for needed episodes.

    :param force: run the search even if no episodes are needed
    :param scheduler_start_time: timestamp of the start of the search scheduler
    :return: list of found episodes
    """
    show_list = app.showList
    from_date = datetime.date.fromordinal(1)
    episodes = []

    for cur_show in show_list:
        if cur_show.paused:
            log.debug(
                u'Not checking for needed episodes of {0} because the show is paused',
                cur_show.name,
            )
            continue
        episodes.extend(wanted_episodes(cur_show, from_date))

    if not episodes and not force:
        # nothing wanted so early out, ie: avoid whatever arbitrarily
        # complex thing a provider cache update entails, for example,
        # reading rss feeds
        return []

    providers = enabled_providers(u'daily')
    if not providers:
        log.warning(
            u'No NZB/Torrent providers found or enabled in the application config for daily searches.'
            u' Please check your settings')
        return []

    original_thread_name = threading.currentThread().name
    log.info(u'Using daily search providers')

    for cur_provider in providers:
        threading.currentThread().name = u'{thread} :: [{provider}]'.format(
            thread=original_thread_name, provider=cur_provider.name)
        cur_provider.cache.update_cache(scheduler_start_time)

    single_results = {}
    multi_results = []
    for cur_provider in providers:
        threading.currentThread().name = u'{thread} :: [{provider}]'.format(
            thread=original_thread_name, provider=cur_provider.name)
        try:
            found_results = cur_provider.cache.find_needed_episodes(episodes)
        except AuthException as error:
            log.error(u'Authentication error: {0}', ex(error))
            continue

        # pick a single result for each episode, respecting existing results
        for episode_no, results in iteritems(found_results):
            if results[0].series.paused:
                log.debug(u'Skipping {0} because the show is paused.',
                          results[0].series.name)
                continue

            # if all results were rejected move on to the next episode
            wanted_results = filter_results(results)
            if not wanted_results:
                log.debug(u'All found results for {0} were rejected.',
                          results[0].series.name)
                continue

            best_result = pick_result(wanted_results)
            # Skip the result if search delay is enabled for the provider.
            if delay_search(best_result):
                continue

            if episode_no in (SEASON_RESULT, MULTI_EP_RESULT):
                multi_results.append(best_result)
            else:
                # if it's already in the list (from another provider) and
                # the newly found quality is no better then skip it
                if episode_no in single_results:
                    allowed_qualities, preferred_qualities = results[
                        0].series.current_qualities
                    if not Quality.is_higher_quality(
                            single_results[episode_no].quality,
                            best_result.quality, allowed_qualities,
                            preferred_qualities):
                        continue

                single_results[episode_no] = best_result

    threading.currentThread().name = original_thread_name

    return combine_results(multi_results, list(itervalues(single_results)))
Beispiel #7
0
def search_providers(series_obj, episodes, forced_search=False, down_cur_quality=False,
                     manual_search=False, manual_search_type=u'episode'):
    """
    Walk providers for information on shows.

    :param series_obj: Show we are looking for
    :param episodes: List, episodes we hope to find
    :param forced_search: Boolean, is this a forced search?
    :param down_cur_quality: Boolean, should we re-download currently available quality file
    :param manual_search: Boolean, should we choose what to download?
    :param manual_search_type: Episode or Season search
    :return: results for search
    """
    found_results = {}
    manual_search_results = []
    multi_results = []
    single_results = []

    # build name cache for show
    name_cache.build_name_cache(series_obj)

    original_thread_name = threading.currentThread().name

    if manual_search:
        log.info(u'Using manual search providers')
        providers = enabled_providers(u'manualsearch')
    else:
        log.info(u'Using backlog search providers')
        providers = enabled_providers(u'backlog')

    if not providers:
        log.warning(u'No NZB/Torrent providers found or enabled in the application config for {0} searches.'
                    u' Please check your settings', 'manual' if manual_search else 'backlog')

    threading.currentThread().name = original_thread_name

    for cur_provider in providers:
        threading.currentThread().name = '{original_thread_name} :: [{provider}]'.format(
            original_thread_name=original_thread_name, provider=cur_provider.name
        )

        if cur_provider.anime_only and not series_obj.is_anime:
            log.debug(u'{0} is not an anime, skipping', series_obj.name)
            continue

        found_results[cur_provider.name] = {}
        search_count = 0
        search_mode = cur_provider.search_mode

        # Always search for episode when manually searching when in sponly
        if search_mode == u'sponly' and (forced_search or manual_search):
            search_mode = u'eponly'

        if manual_search and manual_search_type == u'season':
            search_mode = u'sponly'

        while True:
            search_count += 1

            if search_mode == u'eponly':
                log.info(u'Performing episode search for {0}', series_obj.name)
            else:
                log.info(u'Performing season pack search for {0}', series_obj.name)

            try:
                search_results = []
                cache_search_results = []
                cache_multi = []
                cache_single = []

                if not manual_search:
                    cache_search_results = cur_provider.search_results_in_cache(episodes)
                    if cache_search_results:
                        # From our provider multi_episode and single_episode results, collect candidates.
                        cache_found_results = list_results_for_provider(cache_search_results, found_results, cur_provider)
                        # We're passing the empty lists, because we don't want to include previous candidates
                        cache_multi, cache_single = collect_candidates(cache_found_results, cur_provider, [],
                                                                       [], series_obj, down_cur_quality)

                # For now we only search if we didn't get any results back from cache,
                # but we might wanna check if there was something useful in cache.
                if not (cache_multi or cache_single):
                    log.debug(u'Could not find any candidates in cache, searching provider.')
                    search_results = cur_provider.find_search_results(series_obj, episodes, search_mode, forced_search,
                                                                      down_cur_quality, manual_search, manual_search_type)
                    # Update the list found_results
                    found_results = list_results_for_provider(search_results, found_results, cur_provider)
                else:
                    found_results = cache_found_results

            except AuthException as error:
                log.error(u'Authentication error: {0!r}', error)
                break

            if search_results or cache_search_results:
                break
            elif not cur_provider.search_fallback or search_count == 2:
                break

            # Don't fallback when doing manual season search
            if manual_search_type == u'season':
                break

            if search_mode == u'sponly':
                log.debug(u'Fallback episode search initiated')
                search_mode = u'eponly'
            else:
                log.debug(u'Fallback season pack search initiated')
                search_mode = u'sponly'

        # skip to next provider if we have no results to process
        if not found_results[cur_provider.name]:
            continue

        # Update the cache if a manual search is being run
        if manual_search:
            # Let's create a list with episodes that we where looking for
            if manual_search_type == u'season':
                # If season search type, we only want season packs
                searched_episode_list = [SEASON_RESULT]
            else:
                searched_episode_list = [episode_obj.episode for episode_obj in episodes] + [MULTI_EP_RESULT]
            for searched_episode in searched_episode_list:
                if (searched_episode in search_results and
                        cur_provider.cache.update_cache_manual_search(search_results[searched_episode])):
                    # If we have at least a result from one provider, it's good enough to be marked as result
                    manual_search_results.append(True)
            # Continue because we don't want to pick best results as we are running a manual search by user
            continue

        multi_results, single_results = collect_candidates(found_results, cur_provider, multi_results,
                                                           single_results, series_obj, down_cur_quality)

    # Remove provider from thread name before return results
    threading.currentThread().name = original_thread_name

    if manual_search:
        # If results in manual search return True, else False
        return any(manual_search_results)
    else:
        return combine_results(multi_results, single_results)
Beispiel #8
0
def search_for_needed_episodes(scheduler_start_time, force=False):
    """Search providers for needed episodes.

    :param force: run the search even if no episodes are needed
    :param scheduler_start_time: timestamp of the start of the search scheduler
    :return: list of found episodes
    """
    show_list = app.showList
    from_date = datetime.date.fromordinal(1)
    episodes = []

    for cur_show in show_list:
        if cur_show.paused:
            log.debug(
                u'Not checking for needed episodes of {0} because the show is paused',
                cur_show.name,
            )
            continue
        episodes.extend(wanted_episodes(cur_show, from_date))

    if not episodes and not force:
        # nothing wanted so early out, ie: avoid whatever arbitrarily
        # complex thing a provider cache update entails, for example,
        # reading rss feeds
        return []

    providers = enabled_providers(u'daily')
    if not providers:
        log.warning(
            u'No NZB/Torrent providers found or enabled in the application config for daily searches.'
            u' Please check your settings'
        )
        return []

    original_thread_name = threading.currentThread().name
    log.info(u'Using daily search providers')

    for cur_provider in providers:
        threading.currentThread().name = u'{thread} :: [{provider}]'.format(
            thread=original_thread_name, provider=cur_provider.name
        )
        cur_provider.cache.update_cache(scheduler_start_time)

    single_results = {}
    multi_results = []
    for cur_provider in providers:
        threading.currentThread().name = u'{thread} :: [{provider}]'.format(
            thread=original_thread_name, provider=cur_provider.name
        )
        try:
            found_results = cur_provider.cache.find_needed_episodes(episodes)
        except AuthException as error:
            log.error(u'Authentication error: {0}', ex(error))
            continue

        # pick a single result for each episode, respecting existing results
        for episode_no, results in iteritems(found_results):
            if results[0].series.paused:
                log.debug(u'Skipping {0} because the show is paused.', results[0].series.name)
                continue

            # if all results were rejected move on to the next episode
            wanted_results = filter_results(results)
            if not wanted_results:
                log.debug(u'All found results for {0} were rejected.', results[0].series.name)
                continue

            best_result = pick_result(wanted_results)
            # Skip the result if search delay is enabled for the provider.
            if delay_search(best_result):
                continue

            if episode_no in (SEASON_RESULT, MULTI_EP_RESULT):
                multi_results.append(best_result)
            else:
                # if it's already in the list (from another provider) and
                # the newly found quality is no better then skip it
                if episode_no in single_results:
                    allowed_qualities, preferred_qualities = results[0].series.current_qualities
                    if not Quality.is_higher_quality(single_results[episode_no].quality,
                                                     best_result.quality, allowed_qualities,
                                                     preferred_qualities):
                        continue

                single_results[episode_no] = best_result

    threading.currentThread().name = original_thread_name

    return combine_results(multi_results, list(itervalues(single_results)))
Beispiel #9
0
def search_providers(series_obj,
                     episodes,
                     forced_search=False,
                     down_cur_quality=False,
                     manual_search=False,
                     manual_search_type=u'episode'):
    """
    Walk providers for information on shows.

    :param series_obj: Show we are looking for
    :param episodes: List, episodes we hope to find
    :param forced_search: Boolean, is this a forced search?
    :param down_cur_quality: Boolean, should we re-download currently available quality file
    :param manual_search: Boolean, should we choose what to download?
    :param manual_search_type: Episode or Season search
    :return: results for search
    """
    found_results = {}
    manual_search_results = []
    multi_results = []
    single_results = []

    # build name cache for show
    name_cache.build_name_cache(series_obj)

    original_thread_name = threading.currentThread().name

    if manual_search:
        log.info(u'Using manual search providers')
        providers = enabled_providers(u'manualsearch')
    else:
        log.info(u'Using backlog search providers')
        providers = enabled_providers(u'backlog')

    if not providers:
        log.warning(
            u'No NZB/Torrent providers found or enabled in the application config for {0} searches.'
            u' Please check your settings',
            'manual' if manual_search else 'backlog')

    threading.currentThread().name = original_thread_name

    for cur_provider in providers:
        threading.currentThread(
        ).name = original_thread_name + u' :: [' + cur_provider.name + u']'

        if cur_provider.anime_only and not series_obj.is_anime:
            log.debug(u'{0} is not an anime, skipping', series_obj.name)
            continue

        found_results[cur_provider.name] = {}

        search_count = 0
        search_mode = cur_provider.search_mode

        # Always search for episode when manually searching when in sponly
        if search_mode == u'sponly' and (forced_search or manual_search):
            search_mode = u'eponly'

        if manual_search and manual_search_type == u'season':
            search_mode = u'sponly'

        while True:
            search_count += 1

            if search_mode == u'eponly':
                log.info(u'Performing episode search for {0}', series_obj.name)
            else:
                log.info(u'Performing season pack search for {0}',
                         series_obj.name)

            try:
                search_results = cur_provider.find_search_results(
                    series_obj, episodes, search_mode, forced_search,
                    down_cur_quality, manual_search, manual_search_type)
            except AuthException as error:
                log.error(u'Authentication error: {0}', ex(error))
                break

            if search_results:
                # make a list of all the results for this provider
                for cur_ep in search_results:
                    if cur_ep in found_results[cur_provider.name]:
                        found_results[cur_provider.
                                      name][cur_ep] += search_results[cur_ep]
                    else:
                        found_results[
                            cur_provider.name][cur_ep] = search_results[cur_ep]

                    # Sort the list by seeders if possible
                    if cur_provider.provider_type == u'torrent' or getattr(
                            cur_provider, u'torznab', None):
                        found_results[cur_provider.name][cur_ep].sort(
                            key=lambda d: int(d.seeders), reverse=True)

                break
            elif not cur_provider.search_fallback or search_count == 2:
                break

            # Don't fallback when doing manual season search
            if manual_search_type == u'season':
                break

            if search_mode == u'sponly':
                log.debug(u'Fallback episode search initiated')
                search_mode = u'eponly'
            else:
                log.debug(u'Fallback season pack search initiated')
                search_mode = u'sponly'

        # skip to next provider if we have no results to process
        if not found_results[cur_provider.name]:
            continue

        # Update the cache if a manual search is being run
        if manual_search:
            # Let's create a list with episodes that we where looking for
            if manual_search_type == u'season':
                # If season search type, we only want season packs
                searched_episode_list = [SEASON_RESULT]
            else:
                searched_episode_list = [
                    episode_obj.episode for episode_obj in episodes
                ] + [MULTI_EP_RESULT]
            for searched_episode in searched_episode_list:
                if (searched_episode in search_results
                        and cur_provider.cache.update_cache_manual_search(
                            search_results[searched_episode])):
                    # If we have at least a result from one provider, it's good enough to be marked as result
                    manual_search_results.append(True)
            # Continue because we don't want to pick best results as we are running a manual search by user
            continue

        # Collect candidates for multi-episode or season results
        candidates = (candidate for result, candidate in iteritems(
            found_results[cur_provider.name])
                      if result in (SEASON_RESULT, MULTI_EP_RESULT))
        candidates = list(itertools.chain(*candidates))
        if candidates:
            multi_results += collect_multi_candidates(candidates, series_obj,
                                                      episodes,
                                                      down_cur_quality)

        # Collect candidates for single-episode results
        single_results = collect_single_candidates(
            found_results[cur_provider.name], single_results)

    # Remove provider from thread name before return results
    threading.currentThread().name = original_thread_name

    if manual_search:
        # If results in manual search return True, else False
        return any(manual_search_results)
    else:
        return combine_results(multi_results, single_results)
Beispiel #10
0
    def _get_proper_results(self):  # pylint: disable=too-many-locals, too-many-branches, too-many-statements
        """Retrieve a list of recently aired episodes, and search for these episodes in the different providers."""
        propers = {}

        # For each provider get the list of propers
        original_thread_name = threading.currentThread().name
        providers = enabled_providers('backlog')

        search_date = datetime.datetime.today() - datetime.timedelta(days=app.PROPERS_SEARCH_DAYS)
        main_db_con = db.DBConnection()
        if not app.POSTPONE_IF_NO_SUBS:
            # Get the recently aired (last 2 days) shows from DB
            recently_aired = main_db_con.select(
                'SELECT indexer, showid, season, episode, status, airdate'
                ' FROM tv_episodes'
                ' WHERE airdate >= ?'
                ' AND status = ?',
                [search_date.toordinal(), DOWNLOADED]
            )
        else:
            # Get recently subtitled episodes (last 2 days) from DB
            # Episode status becomes downloaded only after found subtitles
            last_subtitled = search_date.strftime(History.date_format)
            recently_aired = main_db_con.select('SELECT indexer_id AS indexer, showid, season, episode FROM history '
                                                'WHERE date >= ? AND action = ?', [last_subtitled, SUBTITLED])

        if not recently_aired:
            log.info('No recently aired new episodes, nothing to search for')
            return []

        # Loop through the providers, and search for releases
        for cur_provider in providers:
            threading.currentThread().name = '{thread} :: [{provider}]'.format(thread=original_thread_name,
                                                                               provider=cur_provider.name)

            log.info('Searching for any new PROPER releases from {provider}', {'provider': cur_provider.name})

            try:
                cur_propers = cur_provider.find_propers(recently_aired)
            except AuthException as e:
                log.debug('Authentication error: {error}', {'error': ex(e)})
                continue

            # if they haven't been added by a different provider than add the proper to the list
            for proper in cur_propers:
                name = self._sanitize_name(proper.name)
                if name not in propers:
                    log.debug('Found new possible proper result: {name}', {'name': proper.name})
                    propers[name] = proper

        threading.currentThread().name = original_thread_name

        # take the list of unique propers and get it sorted by
        sorted_propers = sorted(list(itervalues(propers)), key=operator.attrgetter('date'), reverse=True)
        final_propers = []

        # Keep only items from last PROPER_SEARCH_DAYS setting in processed propers:
        latest_proper = datetime.datetime.now() - datetime.timedelta(days=app.PROPERS_SEARCH_DAYS)
        self.processed_propers = [p for p in self.processed_propers if p.get('date') >= latest_proper]

        # Get proper names from processed propers
        processed_propers_names = [proper.get('name') for proper in self.processed_propers if proper.get('name')]

        for cur_proper in sorted_propers:

            if not self.ignore_processed_propers and cur_proper.name in processed_propers_names:
                log.debug(u'Proper already processed. Skipping: {proper_name}', {'proper_name': cur_proper.name})
                continue

            try:
                cur_proper.parse_result = NameParser().parse(cur_proper.name)
            except (InvalidNameException, InvalidShowException) as error:
                log.debug('{error}', {'error': error})
                continue

            if not cur_proper.parse_result.proper_tags:
                log.info('Skipping non-proper: {name}', {'name': cur_proper.name})
                continue

            if not cur_proper.series.episodes.get(cur_proper.parse_result.season_number) or \
                    any([ep for ep in cur_proper.parse_result.episode_numbers
                         if not cur_proper.series.episodes[cur_proper.parse_result.season_number].get(ep)]):
                log.info('Skipping proper for wrong season/episode: {name}', {'name': cur_proper.name})
                continue

            log.debug('Proper tags for {proper}: {tags}', {
                'proper': cur_proper.name,
                'tags': cur_proper.parse_result.proper_tags
            })

            if not cur_proper.parse_result.series_name:
                log.debug('Ignoring invalid show: {name}', {'name': cur_proper.name})
                if cur_proper.name not in processed_propers_names:
                    self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date})
                continue

            if not cur_proper.parse_result.episode_numbers:
                log.debug('Ignoring full season instead of episode: {name}', {'name': cur_proper.name})
                if cur_proper.name not in processed_propers_names:
                    self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date})
                continue

            log.debug('Successful match! Matched {original_name} to show {new_name}',
                      {'original_name': cur_proper.parse_result.original_name,
                       'new_name': cur_proper.parse_result.series.name
                       })

            # Map the indexerid in the db to the show's indexerid
            cur_proper.indexerid = cur_proper.parse_result.series.indexerid

            # Map the indexer in the db to the show's indexer
            cur_proper.indexer = cur_proper.parse_result.series.indexer

            # Map our Proper instance
            cur_proper.series = cur_proper.parse_result.series
            cur_proper.actual_season = cur_proper.parse_result.season_number \
                if cur_proper.parse_result.season_number is not None else 1
            cur_proper.actual_episodes = cur_proper.parse_result.episode_numbers
            cur_proper.release_group = cur_proper.parse_result.release_group
            cur_proper.version = cur_proper.parse_result.version
            cur_proper.quality = cur_proper.parse_result.quality
            cur_proper.content = None
            cur_proper.proper_tags = cur_proper.parse_result.proper_tags

            # filter release, in this case, it's just a quality gate. As we only send one result.
            wanted_results = filter_results(cur_proper)
            best_result = pick_result(wanted_results)

            if not best_result:
                log.info('Rejected proper: {name}', {'name': cur_proper.name})
                if cur_proper.name not in processed_propers_names:
                    self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date})
                continue

            # only get anime proper if it has release group and version
            if best_result.series.is_anime:
                if not best_result.release_group and best_result.version == -1:
                    log.info('Ignoring proper without release group and version: {name}', {'name': best_result.name})
                    if cur_proper.name not in processed_propers_names:
                        self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date})
                    continue

            # check if we have the episode as DOWNLOADED
            main_db_con = db.DBConnection()
            sql_results = main_db_con.select('SELECT quality, release_name '
                                             'FROM tv_episodes WHERE indexer = ? '
                                             'AND showid = ? AND season = ? '
                                             'AND episode = ? AND status = ?',
                                             [best_result.indexer,
                                              best_result.series.indexerid,
                                              best_result.actual_season,
                                              best_result.actual_episodes[0],
                                              DOWNLOADED])
            if not sql_results:
                log.info("Ignoring proper because this episode doesn't have 'DOWNLOADED' status: {name}", {
                    'name': best_result.name
                })
                continue

            # only keep the proper if we have already downloaded an episode with the same quality
            old_quality = int(sql_results[0]['quality'])
            if old_quality != best_result.quality:
                log.info('Ignoring proper because quality is different: {name}', {'name': best_result.name})
                if cur_proper.name not in processed_propers_names:
                    self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date})
                continue

            # only keep the proper if we have already downloaded an episode with the same codec
            release_name = sql_results[0]['release_name']
            if release_name:
                release_name_guess = NameParser()._parse_string(release_name)
                current_codec = release_name_guess.video_codec

                # Ignore proper if codec differs from downloaded release codec
                if all([current_codec, best_result.parse_result.video_codec,
                        best_result.parse_result.video_codec != current_codec]):
                    log.info('Ignoring proper because codec is different: {name}', {'name': best_result.name})
                    if best_result.name not in processed_propers_names:
                        self.processed_propers.append({'name': best_result.name, 'date': best_result.date})
                    continue

                streaming_service = release_name_guess.guess.get(u'streaming_service')
                # Ignore proper if streaming service differs from downloaded release streaming service
                if best_result.parse_result.guess.get(u'streaming_service') != streaming_service:
                    log.info('Ignoring proper because streaming service is different: {name}',
                             {'name': best_result.name})
                    if best_result.name not in processed_propers_names:
                        self.processed_propers.append({'name': best_result.name, 'date': best_result.date})
                    continue
            else:
                log.debug("Coudn't find a release name in database. Skipping codec comparison for: {name}", {
                    'name': best_result.name
                })

            # check if we actually want this proper (if it's the right release group and a higher version)
            if best_result.series.is_anime:
                main_db_con = db.DBConnection()
                sql_results = main_db_con.select(
                    'SELECT release_group, version '
                    'FROM tv_episodes WHERE indexer = ? AND showid = ? '
                    'AND season = ? AND episode = ?',
                    [best_result.indexer, best_result.series.indexerid, best_result.actual_season,
                     best_result.actual_episodes[0]])

                old_version = int(sql_results[0]['version'])
                old_release_group = (sql_results[0]['release_group'])

                if -1 < old_version < best_result.version:
                    log.info('Found new anime version {new} to replace existing version {old}: {name}',
                             {'old': old_version,
                              'new': best_result.version,
                              'name': best_result.name
                              })
                else:
                    log.info('Ignoring proper with the same or lower version: {name}', {'name': best_result.name})
                    if cur_proper.name not in processed_propers_names:
                        self.processed_propers.append({'name': best_result.name, 'date': best_result.date})
                    continue

                if old_release_group != best_result.release_group:
                    log.info('Ignoring proper from release group {new} instead of current group {old}',
                             {'new': best_result.release_group,
                              'old': old_release_group})
                    if best_result.name not in processed_propers_names:
                        self.processed_propers.append({'name': best_result.name, 'date': best_result.date})
                    continue

            # if the show is in our list and there hasn't been a proper already added for that particular episode
            # then add it to our list of propers
            if best_result.indexerid != -1 and (
                best_result.indexerid, best_result.actual_season, best_result.actual_episodes
            ) not in list(map(operator.attrgetter('indexerid', 'actual_season', 'actual_episodes'), final_propers)):
                log.info('Found a desired proper: {name}', {'name': best_result.name})
                final_propers.append(best_result)

            if best_result.name not in processed_propers_names:
                self.processed_propers.append({'name': best_result.name, 'date': best_result.date})

        return final_propers
Beispiel #11
0
def get_provider_cache_results(series_obj, show_all_results=None, perform_search=None,
                               season=None, episode=None, manual_search_type=None, **search_show):
    """Check all provider cache tables for search results."""
    down_cur_quality = 0
    preferred_words = series_obj.show_words().preferred_words
    undesired_words = series_obj.show_words().undesired_words
    ignored_words = series_obj.show_words().ignored_words
    required_words = series_obj.show_words().required_words

    main_db_con = db.DBConnection('cache.db')

    provider_results = {'last_prov_updates': {}, 'error': {}, 'found_items': []}
    original_thread_name = threading.currentThread().name

    cached_results_total = []
    combined_sql_q = []
    combined_sql_params = []

    for cur_provider in enabled_providers('manualsearch'):
        threading.currentThread().name = '{thread} :: [{provider}]'.format(thread=original_thread_name, provider=cur_provider.name)

        # Let's check if this provider table already exists
        table_exists = main_db_con.select(
            'SELECT name '
            'FROM sqlite_master '
            "WHERE type='table'"
            ' AND name=?',
            [cur_provider.get_id()]
        )

        columns = []
        if table_exists:
            table_columns = main_db_con.select("PRAGMA table_info('{0}')".format(cur_provider.get_id()))
            columns = [table_column['name'] for table_column in table_columns]

        minseed = int(cur_provider.minseed) if getattr(cur_provider, 'minseed', None) else -1
        minleech = int(cur_provider.minleech) if getattr(cur_provider, 'minleech', None) else -1

        # TODO: the implicit sqlite rowid is used, should be replaced with an explicit PK column
        # If table doesn't exist, start a search to create table and new columns seeders, leechers and size
        required_columns = ['indexer', 'indexerid', 'seeders', 'leechers', 'size', 'proper_tags', 'date_added']
        if table_exists and all(required_column in columns for required_column in required_columns):
            # The default sql, that's executed for each providers cache table
            common_sql = (
                "SELECT rowid, ? AS 'provider_type', ? AS 'provider_image',"
                " ? AS 'provider', ? AS 'provider_id', ? 'provider_minseed',"
                " ? 'provider_minleech', name, season, episodes, indexer, indexerid,"
                ' url, proper_tags, quality, release_group, version,'
                ' seeders, leechers, size, time, pubdate, date_added '
                "FROM '{provider_id}' "
                'WHERE indexer = ? AND indexerid = ? AND quality > 0 '.format(
                    provider_id=cur_provider.get_id()
                )
            )

            # Let's start by adding the default parameters, which are used to substitute the '?'s.
            add_params = [cur_provider.provider_type.title(), cur_provider.image_name(),
                          cur_provider.name, cur_provider.get_id(), minseed, minleech,
                          series_obj.indexer, series_obj.series_id]

            if manual_search_type != 'season':
                # If were not looking for all results, meaning don't do the filter on season + ep, add sql
                if not int(show_all_results):
                    # If it's an episode search, pass season and episode.
                    common_sql += ' AND season = ? AND episodes LIKE ? '
                    add_params += [season, '%|{0}|%'.format(episode)]

            else:
                # If were not looking for all results, meaning don't do the filter on season + ep, add sql
                if not int(show_all_results):
                    list_of_episodes = '{0}{1}'.format(' episodes LIKE ', ' AND episodes LIKE '.join(
                        ['?' for _ in series_obj.get_all_episodes(season)]
                    ))

                    common_sql += ' AND season = ? AND (episodes LIKE ? OR {list_of_episodes})'.format(
                        list_of_episodes=list_of_episodes
                    )
                    add_params += [season, '||']  # When the episodes field is empty.
                    add_params += ['%|{episode}|%'.format(episode=ep.episode) for ep in series_obj.get_all_episodes(season)]

            # Add the created sql, to lists, that are used down below to perform one big UNIONED query
            combined_sql_q.append(common_sql)
            combined_sql_params += add_params

            # Get the last updated cache items timestamp
            last_update = main_db_con.select('SELECT max(time) AS lastupdate '
                                             "FROM '{provider_id}'".format(provider_id=cur_provider.get_id()))
            provider_results['last_prov_updates'][cur_provider.get_id()] = last_update[0]['lastupdate'] if last_update[0]['lastupdate'] else 0

    # Check if we have the combined sql strings
    if combined_sql_q:
        sql_prepend = 'SELECT * FROM ('
        sql_append = ') ORDER BY quality DESC, proper_tags DESC, seeders DESC'

        # Add all results
        cached_results_total += main_db_con.select('{0} {1} {2}'.
                                                   format(sql_prepend, ' UNION ALL '.join(combined_sql_q), sql_append),
                                                   combined_sql_params)

    # Always start a search when no items found in cache
    if not cached_results_total or int(perform_search):
        # retrieve the episode object and fail if we can't get one
        ep_obj = series_obj.get_episode(season, episode)
        if isinstance(ep_obj, str):
            provider_results['error'] = 'Something went wrong when starting the manual search for show {0}, \
            and episode: {1}x{2}'.format(series_obj.name, season, episode)

        # make a queue item for it and put it on the queue
        ep_queue_item = ForcedSearchQueueItem(ep_obj.series, [ep_obj], bool(int(down_cur_quality)), True, manual_search_type)  # pylint: disable=maybe-no-member

        app.forced_search_queue_scheduler.action.add_item(ep_queue_item)

        # give the CPU a break and some time to start the queue
        time.sleep(cpu_presets[app.CPU_PRESET])
    else:
        for i in cached_results_total:
            threading.currentThread().name = '{thread} :: [{provider}]'.format(
                thread=original_thread_name, provider=i['provider'])

            i['quality_name'] = Quality.split_quality(int(i['quality']))
            i['time'] = datetime.fromtimestamp(i['time'])
            i['release_group'] = i['release_group'] or 'None'
            i['provider_img_link'] = 'images/providers/' + i['provider_image'] or 'missing.png'
            i['provider'] = i['provider'] if i['provider_image'] else 'missing provider'
            i['proper_tags'] = i['proper_tags'].replace('|', ', ')
            i['pretty_size'] = pretty_file_size(i['size']) if i['size'] > -1 else 'N/A'
            i['seeders'] = i['seeders'] if i['seeders'] >= 0 else '-'
            i['leechers'] = i['leechers'] if i['leechers'] >= 0 else '-'
            i['pubdate'] = parser.parse(i['pubdate']).astimezone(app_timezone) if i['pubdate'] else ''
            i['date_added'] = datetime.fromtimestamp(float(i['date_added']), tz=app_timezone) if i['date_added'] else ''

            release_group = i['release_group']
            if ignored_words and release_group in ignored_words:
                i['rg_highlight'] = 'ignored'
            elif required_words and release_group in required_words:
                i['rg_highlight'] = 'required'
            elif preferred_words and release_group in preferred_words:
                i['rg_highlight'] = 'preferred'
            elif undesired_words and release_group in undesired_words:
                i['rg_highlight'] = 'undesired'
            else:
                i['rg_highlight'] = ''
            if contains_at_least_one_word(i['name'], required_words):
                i['name_highlight'] = 'required'
            elif contains_at_least_one_word(i['name'], ignored_words) or not filter_bad_releases(i['name'], parse=False):
                i['name_highlight'] = 'ignored'
            elif contains_at_least_one_word(i['name'], undesired_words):
                i['name_highlight'] = 'undesired'
            elif contains_at_least_one_word(i['name'], preferred_words):
                i['name_highlight'] = 'preferred'
            else:
                i['name_highlight'] = ''

            i['seed_highlight'] = 'ignored'
            if i['seeders'] == '-' or i['provider_minseed'] <= i['seeders']:
                i['seed_highlight'] = ''

            i['leech_highlight'] = 'ignored'
            if i['leechers'] == '-' or i['provider_minleech'] <= i['leechers']:
                i['leech_highlight'] = ''

        provider_results['found_items'] = cached_results_total

    # Remove provider from thread name before return results
    threading.currentThread().name = original_thread_name

    # Sanitize the last_prov_updates key
    provider_results['last_prov_updates'] = json.dumps(provider_results['last_prov_updates'])
    return provider_results