def test_pass_wordlist_checks(self):
        # default:[] or copy in a test case tuple to debug in isolation
        isolated = []

        test_cases = (self.cases_pass_wordlist_checks, isolated)[len(isolated)]
        for case_num, (name, ignore_list, require_list, expected_result,
                       show_obj) in enumerate(test_cases):
            name = name if '%02d' not in name else name % case_num
            if ignore_list.startswith('regex:'):
                sickbeard.IGNORE_WORDS_REGEX = True
                ignore_list = ignore_list.replace('regex:', '')
            else:
                sickbeard.IGNORE_WORDS_REGEX = False
            sickbeard.IGNORE_WORDS = set(i.strip()
                                         for i in ignore_list.split(',')
                                         if i.strip())
            if require_list.startswith('regex:'):
                sickbeard.REQUIRE_WORDS_REGEX = True
                require_list = require_list.replace('regex:', '')
            else:
                sickbeard.REQUIRE_WORDS_REGEX = False
            sickbeard.REQUIRE_WORDS = set(r.strip()
                                          for r in require_list.split(',')
                                          if r.strip())
            self.assertEqual(
                expected_result,
                show_name_helpers.pass_wordlist_checks(name,
                                                       False,
                                                       show_obj=show_obj),
                'Expected %s with test: "%s" with ignore: "%s", require: "%s"'
                % (expected_result, name, ignore_list, require_list))
    def test_pass_wordlist_checks(self):
        # default:[] or copy in a test case tuple to debug in isolation
        isolated = []

        test_cases = (self.cases_pass_wordlist_checks, isolated)[len(isolated)]
        for case_num, (name, ignore_list, require_list, expected_result) in enumerate(test_cases):
            name = (name, name % case_num)['%02d' in name]
            sickbeard.IGNORE_WORDS = ignore_list
            sickbeard.REQUIRE_WORDS = require_list
            self.assertEqual(expected_result, show_name_helpers.pass_wordlist_checks(name, False),
                             'Expected %s with test: "%s" with ignore: "%s", require: "%s"' %
                             (expected_result, name, ignore_list, require_list))
    def test_pass_wordlist_checks(self):
        # default:[] or copy in a test case tuple to debug in isolation
        isolated = []

        test_cases = (self.cases_pass_wordlist_checks, isolated)[len(isolated)]
        for case_num, (name, ignore_list, require_list,
                       expected_result) in enumerate(test_cases):
            name = name if '%02d' not in name else name % case_num
            sickbeard.IGNORE_WORDS = ignore_list
            sickbeard.REQUIRE_WORDS = require_list
            self.assertEqual(
                expected_result,
                show_name_helpers.pass_wordlist_checks(name, False),
                'Expected %s with test: "%s" with ignore: "%s", require: "%s"'
                % (expected_result, name, ignore_list, require_list))
Example #4
0
def search_providers(show,
                     episodes,
                     manual_search=False,
                     torrent_only=False,
                     try_other_searches=False):
    found_results = {}
    final_results = []

    search_done = False

    orig_thread_name = threading.currentThread().name

    provider_list = [
        x for x in sickbeard.providers.sortedProviderList()
        if x.is_active() and x.enable_backlog and (
            not torrent_only or x.providerType == GenericProvider.TORRENT)
    ]
    for cur_provider in provider_list:
        if cur_provider.anime_only and not show.is_anime:
            logger.log(u'%s is not an anime, skipping' % show.name,
                       logger.DEBUG)
            continue

        threading.currentThread().name = '%s :: [%s]' % (orig_thread_name,
                                                         cur_provider.name)
        provider_id = cur_provider.get_id()

        found_results[provider_id] = {}

        search_count = 0
        search_mode = cur_provider.search_mode

        while True:
            search_count += 1

            if 'eponly' == search_mode:
                logger.log(u'Performing episode search for %s' % show.name)
            else:
                logger.log(u'Performing season pack search for %s' % show.name)

            try:
                cur_provider.cache._clearCache()
                search_results = cur_provider.find_search_results(
                    show,
                    episodes,
                    search_mode,
                    manual_search,
                    try_other_searches=try_other_searches)
                if any(search_results):
                    logger.log(', '.join([
                        '%s %s candidate%s' %
                        (len(v), (('multiep', 'season')[SEASON_RESULT == k],
                                  'episode')['ep' in search_mode],
                         helpers.maybe_plural(len(v)))
                        for (k, v) in search_results.iteritems()
                    ]))
            except exceptions.AuthException as e:
                logger.log(u'Authentication error: %s' % ex(e), logger.ERROR)
                break
            except Exception as e:
                logger.log(
                    u'Error while searching %s, skipping: %s' %
                    (cur_provider.name, ex(e)), logger.ERROR)
                logger.log(traceback.format_exc(), logger.DEBUG)
                break
            finally:
                threading.currentThread().name = orig_thread_name

            search_done = True

            if len(search_results):
                # make a list of all the results for this provider
                for cur_ep in search_results:
                    # skip non-tv crap
                    search_results[cur_ep] = filter(
                        lambda ep_item: show_name_helpers.pass_wordlist_checks(
                            ep_item.name, parse=False) and ep_item.show ==
                        show, search_results[cur_ep])

                    if cur_ep in found_results:
                        found_results[provider_id][cur_ep] += search_results[
                            cur_ep]
                    else:
                        found_results[provider_id][cur_ep] = search_results[
                            cur_ep]

                break
            elif not cur_provider.search_fallback or search_count == 2:
                break

            search_mode = '%sonly' % ('ep', 'sp')['ep' in search_mode]
            logger.log(u'Falling back to %s search ...' %
                       ('season pack', 'episode')['ep' in search_mode])

        # skip to next provider if we have no results to process
        if not len(found_results[provider_id]):
            continue

        any_qualities, best_qualities = Quality.splitQuality(show.quality)

        # pick the best season NZB
        best_season_result = None
        if SEASON_RESULT in found_results[provider_id]:
            best_season_result = pick_best_result(
                found_results[provider_id][SEASON_RESULT], show,
                any_qualities + best_qualities)

        highest_quality_overall = 0
        for cur_episode in found_results[provider_id]:
            for cur_result in found_results[provider_id][cur_episode]:
                if Quality.UNKNOWN != cur_result.quality and highest_quality_overall < cur_result.quality:
                    highest_quality_overall = cur_result.quality
        logger.log(
            u'%s is the highest quality of any match' %
            Quality.qualityStrings[highest_quality_overall], logger.DEBUG)

        # see if every episode is wanted
        if best_season_result:
            # get the quality of the season nzb
            season_qual = best_season_result.quality
            logger.log(
                u'%s is the quality of the season %s' %
                (Quality.qualityStrings[season_qual],
                 best_season_result.provider.providerType), logger.DEBUG)

            my_db = db.DBConnection()
            sql = 'SELECT episode FROM tv_episodes WHERE showid = %s AND (season IN (%s))' %\
                  (show.indexerid, ','.join([str(x.season) for x in episodes]))
            ep_nums = [int(x['episode']) for x in my_db.select(sql)]

            logger.log(u'Executed query: [%s]' % sql)
            logger.log(u'Episode list: %s' % ep_nums, logger.DEBUG)

            all_wanted = True
            any_wanted = False
            for ep_num in ep_nums:
                for season in set([x.season for x in episodes]):
                    if not show.wantEpisode(season, ep_num, season_qual):
                        all_wanted = False
                    else:
                        any_wanted = True

            # if we need every ep in the season and there's nothing better then just download this and
            # be done with it (unless single episodes are preferred)
            if all_wanted and highest_quality_overall == best_season_result.quality:
                logger.log(
                    u'Every episode in this season is needed, downloading the whole %s %s'
                    % (best_season_result.provider.providerType,
                       best_season_result.name))
                ep_objs = []
                for ep_num in ep_nums:
                    for season in set([x.season for x in episodes]):
                        ep_objs.append(show.getEpisode(season, ep_num))
                best_season_result.episodes = ep_objs

                return [best_season_result]

            elif not any_wanted:
                logger.log(
                    u'No episodes from this season are wanted at this quality, ignoring the result of '
                    + best_season_result.name, logger.DEBUG)
            else:
                if GenericProvider.NZB == best_season_result.provider.providerType:
                    logger.log(
                        u'Breaking apart the NZB and adding the individual ones to our results',
                        logger.DEBUG)

                    # if not, break it apart and add them as the lowest priority results
                    individual_results = nzbSplitter.splitResult(
                        best_season_result)

                    individual_results = filter(
                        lambda r: show_name_helpers.pass_wordlist_checks(
                            r.name, parse=False) and r.show == show,
                        individual_results)

                    for cur_result in individual_results:
                        if 1 == len(cur_result.episodes):
                            ep_num = cur_result.episodes[0].episode
                        elif 1 < len(cur_result.episodes):
                            ep_num = MULTI_EP_RESULT

                        if ep_num in found_results[provider_id]:
                            found_results[provider_id][ep_num].append(
                                cur_result)
                        else:
                            found_results[provider_id][ep_num] = [cur_result]

                # If this is a torrent all we can do is leech the entire torrent,
                # user will have to select which eps not do download in his torrent client
                else:

                    # Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it
                    logger.log(
                        u'Adding multi episode result for full season torrent. In your torrent client, set '
                        +
                        u'the episodes that you do not want to "don\'t download"'
                    )
                    ep_objs = []
                    for ep_num in ep_nums:
                        for season in set([x.season for x in episodes]):
                            ep_objs.append(show.getEpisode(season, ep_num))
                    best_season_result.episodes = ep_objs

                    ep_num = MULTI_EP_RESULT
                    if ep_num in found_results[provider_id]:
                        found_results[provider_id][ep_num].append(
                            best_season_result)
                    else:
                        found_results[provider_id][ep_num] = [
                            best_season_result
                        ]

        # go through multi-ep results and see if we really want them or not, get rid of the rest
        multi_results = {}
        if MULTI_EP_RESULT in found_results[provider_id]:
            for multi_result in found_results[provider_id][MULTI_EP_RESULT]:

                logger.log(
                    u'Checking usefulness of multi episode result %s' %
                    multi_result.name, logger.DEBUG)

                if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(
                        multi_result.name, multi_result.size,
                        multi_result.provider.name):
                    logger.log(
                        u'%s has previously failed, rejecting this multi episode result'
                        % multi_result.name)
                    continue

                # see how many of the eps that this result covers aren't covered by single results
                needed_eps = []
                not_needed_eps = []
                for ep_obj in multi_result.episodes:
                    ep_num = ep_obj.episode
                    # if we have results for the episode
                    if ep_num in found_results[provider_id] and 0 < len(
                            found_results[provider_id][ep_num]):
                        needed_eps.append(ep_num)
                    else:
                        not_needed_eps.append(ep_num)

                logger.log(
                    u'Single episode check result is... needed episodes: %s, not needed episodes: %s'
                    % (needed_eps, not_needed_eps), logger.DEBUG)

                if not not_needed_eps:
                    logger.log(
                        u'All of these episodes were covered by single episode results, '
                        + 'ignoring this multi episode result', logger.DEBUG)
                    continue

                # check if these eps are already covered by another multi-result
                multi_needed_eps = []
                multi_not_needed_eps = []
                for ep_obj in multi_result.episodes:
                    ep_num = ep_obj.episode
                    if ep_num in multi_results:
                        multi_not_needed_eps.append(ep_num)
                    else:
                        multi_needed_eps.append(ep_num)

                logger.log(
                    u'Multi episode check result is... multi needed episodes: '
                    + '%s, multi not needed episodes: %s' %
                    (multi_needed_eps, multi_not_needed_eps), logger.DEBUG)

                if not multi_needed_eps:
                    logger.log(
                        u'All of these episodes were covered by another multi episode nzb, '
                        + 'ignoring this multi episode result', logger.DEBUG)
                    continue

                # if we're keeping this multi-result then remember it
                for ep_obj in multi_result.episodes:
                    multi_results[ep_obj.episode] = multi_result

                # don't bother with the single result if we're going to get it with a multi result
                for ep_obj in multi_result.episodes:
                    ep_num = ep_obj.episode
                    if ep_num in found_results[provider_id]:
                        logger.log(
                            u'A needed multi episode result overlaps with a single episode result for episode '
                            +
                            '#%s, removing the single episode results from the list'
                            % ep_num, logger.DEBUG)
                        del found_results[provider_id][ep_num]

        # of all the single ep results narrow it down to the best one for each episode
        final_results += set(multi_results.values())
        for cur_ep in found_results[provider_id]:
            if cur_ep in (MULTI_EP_RESULT, SEASON_RESULT):
                continue

            if 0 == len(found_results[provider_id][cur_ep]):
                continue

            best_result = pick_best_result(found_results[provider_id][cur_ep],
                                           show)

            # if all results were rejected move on to the next episode
            if not best_result:
                continue

            # filter out possible bad torrents from providers
            if 'torrent' == best_result.resultType:
                if best_result.url.startswith('magnet'):
                    if 'blackhole' != sickbeard.TORRENT_METHOD:
                        best_result.content = None
                else:
                    td = best_result.provider.get_url(best_result.url)
                    if not td:
                        continue
                    if getattr(best_result.provider, 'chk_td', None):
                        name = None
                        try:
                            hdr = re.findall('(\w+(\d+):)', td[0:6])[0]
                            x, v = len(hdr[0]), int(hdr[1])
                            for item in range(0, 12):
                                y = x + v
                                name = 'name' == td[x:y]
                                w = re.findall('((?:i\d+e|d|l)?(\d+):)',
                                               td[y:y + 32])[0]
                                x, v = y + len(w[0]), int(w[1])
                                if name:
                                    name = td[x:x + v]
                                    break
                        except:
                            continue
                        if name:
                            if not pass_show_wordlist_checks(name, show):
                                continue
                            if not show_name_helpers.pass_wordlist_checks(
                                    name):
                                logger.log(
                                    u'Ignored: %s (debug log has detail)' %
                                    name)
                                continue
                            best_result.name = name

                    if 'blackhole' != sickbeard.TORRENT_METHOD:
                        best_result.content = td

            # add result if its not a duplicate and
            found = False
            for i, result in enumerate(final_results):
                for best_result_ep in best_result.episodes:
                    if best_result_ep in result.episodes:
                        if best_result.quality > result.quality:
                            final_results.pop(i)
                        else:
                            found = True
            if not found:
                final_results += [best_result]

        # check that we got all the episodes we wanted first before doing a match and snatch
        wanted_ep_count = 0
        for wanted_ep in episodes:
            for result in final_results:
                if wanted_ep in result.episodes and is_final_result(result):
                    wanted_ep_count += 1

        # make sure we search every provider for results unless we found everything we wanted
        if len(episodes) == wanted_ep_count:
            break

    if not len(provider_list):
        logger.log(
            'No NZB/Torrent sources enabled in Search Provider options to do backlog searches',
            logger.WARNING)
    elif not search_done:
        logger.log(
            'Failed backlog search of %s enabled provider%s. More info in debug log.'
            % (len(provider_list), helpers.maybe_plural(len(provider_list))),
            logger.ERROR)

    return final_results
Example #5
0
def _get_proper_list(aired_since_shows,
                     recent_shows,
                     recent_anime,
                     proper_list=None):
    propers = {}

    # for each provider get a list of the
    orig_thread_name = threading.currentThread().name
    providers = [
        x for x in sickbeard.providers.sortedProviderList() if x.is_active()
    ]
    for cur_provider in providers:
        if not recent_anime and cur_provider.anime_only:
            continue

        if None is not proper_list:
            found_propers = proper_list.get(cur_provider.get_id(), [])
            if not found_propers:
                continue
        else:
            threading.currentThread(
            ).name = orig_thread_name + ' :: [' + cur_provider.name + ']'

            logger.log(u'Searching for new PROPER releases')

            try:
                found_propers = cur_provider.find_propers(
                    search_date=aired_since_shows,
                    shows=recent_shows,
                    anime=recent_anime)
            except exceptions.AuthException as e:
                logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
                continue
            except Exception as e:
                logger.log(
                    u'Error while searching ' + cur_provider.name +
                    ', skipping: ' + ex(e), logger.ERROR)
                logger.log(traceback.format_exc(), logger.ERROR)
                continue
            finally:
                threading.currentThread().name = orig_thread_name

        # if they haven't been added by a different provider than add the proper to the list
        count = 0
        for x in found_propers:
            name = _generic_name(x.name)
            if name not in propers:
                try:
                    np = NameParser(False,
                                    try_scene_exceptions=True,
                                    showObj=x.parsed_show,
                                    indexer_lookup=False)
                    parse_result = np.parse(x.name)
                    if parse_result.series_name and parse_result.episode_numbers and \
                            (parse_result.show.indexer, parse_result.show.indexerid) in recent_shows + recent_anime:
                        cur_size = getattr(x, 'size', None)
                        if failed_history.has_failed(x.name, cur_size,
                                                     cur_provider.name):
                            continue
                        logger.log(u'Found new proper: ' + x.name,
                                   logger.DEBUG)
                        x.show = parse_result.show.indexerid
                        x.provider = cur_provider
                        x.is_repack, x.properlevel = Quality.get_proper_level(
                            parse_result.extra_info_no_name(),
                            parse_result.version,
                            parse_result.is_anime,
                            check_is_repack=True)
                        x.is_internal = parse_result.extra_info_no_name() and \
                            re.search(r'\binternal\b', parse_result.extra_info_no_name(), flags=re.I)
                        x.codec = _get_codec(parse_result.extra_info_no_name())
                        propers[name] = x
                        count += 1
                except (InvalidNameException, InvalidShowException):
                    continue
                except (StandardError, Exception):
                    continue

        cur_provider.log_result('Propers', count, '%s' % cur_provider.name)

    # take the list of unique propers and get it sorted by
    sorted_propers = sorted(propers.values(),
                            key=operator.attrgetter('properlevel', 'date'),
                            reverse=True)
    verified_propers = set()

    for cur_proper in sorted_propers:

        np = NameParser(False,
                        try_scene_exceptions=True,
                        showObj=cur_proper.parsed_show,
                        indexer_lookup=False)
        try:
            parse_result = np.parse(cur_proper.name)
        except (StandardError, Exception):
            continue

        # set the indexerid in the db to the show's indexerid
        cur_proper.indexerid = parse_result.show.indexerid

        # set the indexer in the db to the show's indexer
        cur_proper.indexer = parse_result.show.indexer

        # populate our Proper instance
        cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1
        cur_proper.episode = parse_result.episode_numbers[0]
        cur_proper.release_group = parse_result.release_group
        cur_proper.version = parse_result.version
        cur_proper.extra_info = parse_result.extra_info
        cur_proper.extra_info_no_name = parse_result.extra_info_no_name
        cur_proper.quality = Quality.nameQuality(cur_proper.name,
                                                 parse_result.is_anime)
        cur_proper.is_anime = parse_result.is_anime

        # only get anime proper if it has release group and version
        if parse_result.is_anime:
            if not cur_proper.release_group and -1 == cur_proper.version:
                logger.log(
                    u'Proper %s doesn\'t have a release group and version, ignoring it'
                    % cur_proper.name, logger.DEBUG)
                continue

        if not show_name_helpers.pass_wordlist_checks(
                cur_proper.name, parse=False, indexer_lookup=False):
            logger.log(
                u'Proper %s isn\'t a valid scene release that we want, ignoring it'
                % cur_proper.name, logger.DEBUG)
            continue

        re_extras = dict(re_prefix='.*', re_suffix='.*')
        result = show_name_helpers.contains_any(
            cur_proper.name, parse_result.show.rls_ignore_words, **re_extras)
        if None is not result and result:
            logger.log(u'Ignored: %s for containing ignore word' %
                       cur_proper.name)
            continue

        result = show_name_helpers.contains_any(
            cur_proper.name, parse_result.show.rls_require_words, **re_extras)
        if None is not result and not result:
            logger.log(
                u'Ignored: %s for not containing any required word match' %
                cur_proper.name)
            continue

        # check if we actually want this proper (if it's the right quality)
        my_db = db.DBConnection()
        sql_results = my_db.select(
            'SELECT release_group, status, version, release_name FROM tv_episodes WHERE showid = ? AND indexer = ? '
            + 'AND season = ? AND episode = ?', [
                cur_proper.indexerid, cur_proper.indexer, cur_proper.season,
                cur_proper.episode
            ])
        if not sql_results:
            continue

        # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
        # don't take proper of the same level we already downloaded
        old_status, old_quality = Quality.splitCompositeStatus(
            int(sql_results[0]['status']))
        cur_proper.is_repack, cur_proper.proper_level = Quality.get_proper_level(
            cur_proper.extra_info_no_name(),
            cur_proper.version,
            cur_proper.is_anime,
            check_is_repack=True)

        old_release_group = sql_results[0]['release_group']
        # check if we want this release: same quality as current, current has correct status
        # restrict other release group releases to proper's
        if old_status not in SNATCHED_ANY + [DOWNLOADED, ARCHIVED] \
                or cur_proper.quality != old_quality \
                or (cur_proper.is_repack and cur_proper.release_group != old_release_group):
            continue

        np = NameParser(False,
                        try_scene_exceptions=True,
                        showObj=parse_result.show,
                        indexer_lookup=False)
        try:
            extra_info = np.parse(
                sql_results[0]['release_name']).extra_info_no_name()
        except (StandardError, Exception):
            extra_info = None

        old_proper_level, old_is_internal, old_codec, old_extra_no_name, old_name = \
            get_old_proper_level(parse_result.show, cur_proper.indexer, cur_proper.indexerid, cur_proper.season,
                                 parse_result.episode_numbers, old_status, cur_proper.quality, extra_info,
                                 cur_proper.version, cur_proper.is_anime)

        old_name = (old_name,
                    sql_results[0]['release_name'])[old_name in ('', None)]
        if cur_proper.proper_level < old_proper_level:
            continue
        elif cur_proper.proper_level == old_proper_level:
            if '264' == cur_proper.codec and 'xvid' == old_codec:
                pass
            elif old_is_internal and not cur_proper.is_internal:
                pass
            else:
                continue

        log_same_grp = 'Skipping proper from release group: [%s], does not match existing release group: [%s] for [%s]'\
                       % (cur_proper.release_group, old_release_group, cur_proper.name)

        is_web = (old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL,
                                  Quality.UHD4KWEB) or
                  (old_quality == Quality.SDTV
                   and re.search(r'\Wweb.?(dl|rip|.[hx]26[45])\W',
                                 str(sql_results[0]['release_name']), re.I)))

        if is_web:
            old_webdl_type = get_webdl_type(old_extra_no_name, old_name)
            new_webdl_type = get_webdl_type(cur_proper.extra_info_no_name(),
                                            cur_proper.name)
            if old_webdl_type != new_webdl_type:
                logger.log(
                    'Skipping proper webdl source: [%s], does not match existing webdl source: [%s] for [%s]'
                    % (old_webdl_type, new_webdl_type, cur_proper.name),
                    logger.DEBUG)
                continue

        # for webldls, prevent propers from different groups
        if sickbeard.PROPERS_WEBDL_ONEGRP and is_web and cur_proper.release_group != old_release_group:
            logger.log(log_same_grp, logger.DEBUG)
            continue

        # check if we actually want this proper (if it's the right release group and a higher version)
        if parse_result.is_anime:

            old_version = int(sql_results[0]['version'])
            if -1 < old_version < cur_proper.version:
                logger.log(u'Found new anime v%s to replace existing v%s' %
                           (cur_proper.version, old_version))
            else:
                continue

            if cur_proper.release_group != old_release_group:
                logger.log(log_same_grp, logger.DEBUG)
                continue

        # if the show is in our list and there hasn't been a proper already added for that particular episode
        # then add it to our list of propers
        if cur_proper.indexerid != -1:
            if (cur_proper.indexerid, cur_proper.indexer, cur_proper.season,
                    cur_proper.episode) not in map(
                        operator.attrgetter('indexerid', 'indexer', 'season',
                                            'episode'), verified_propers):
                logger.log(u'Found a proper that may be useful: %s' %
                           cur_proper.name)
                verified_propers.add(cur_proper)
            else:
                rp = set()
                for vp in verified_propers:
                    if vp.indexer == cur_proper.indexer and vp.indexerid == cur_proper.indexerid and \
                                    vp.season == cur_proper.season and vp.episode == cur_proper.episode and \
                                    vp.proper_level < cur_proper.proper_level:
                        rp.add(vp)
                if rp:
                    verified_propers = verified_propers - rp
                    logger.log(u'Found a proper that may be useful: %s' %
                               cur_proper.name)
                    verified_propers.add(cur_proper)

    return list(verified_propers)
Example #6
0
 def _test_pass_wordlist_checks(self, name, expected):
     result = show_name_helpers.pass_wordlist_checks(name)
     self.assertEqual(result, expected)
Example #7
0
def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
    propers = {}

    # for each provider get a list of the
    orig_thread_name = threading.currentThread().name
    providers = [
        x for x in sickbeard.providers.sortedProviderList() if x.is_active()
    ]
    for cur_provider in providers:
        if not recent_anime and cur_provider.anime_only:
            continue
        threading.currentThread(
        ).name = orig_thread_name + ' :: [' + cur_provider.name + ']'

        logger.log(u'Searching for new PROPER releases')

        try:
            found_propers = cur_provider.find_propers(
                search_date=aired_since_shows,
                shows=recent_shows,
                anime=recent_anime)
        except exceptions.AuthException as e:
            logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
            continue
        except Exception as e:
            logger.log(
                u'Error while searching ' + cur_provider.name +
                ', skipping: ' + ex(e), logger.ERROR)
            logger.log(traceback.format_exc(), logger.DEBUG)
            continue
        finally:
            threading.currentThread().name = orig_thread_name

        # if they haven't been added by a different provider than add the proper to the list
        count = 0
        np = NameParser(False, try_scene_exceptions=True)
        for x in found_propers:
            name = _generic_name(x.name)
            if name not in propers:
                try:
                    parse_result = np.parse(x.title)
                    if parse_result.series_name and parse_result.episode_numbers and \
                            parse_result.show.indexerid in recent_shows + recent_anime:
                        logger.log(u'Found new proper: ' + x.name,
                                   logger.DEBUG)
                        x.show = parse_result.show.indexerid
                        x.provider = cur_provider
                        propers[name] = x
                        count += 1
                except Exception:
                    continue

        cur_provider.log_result('Propers', count, '%s' % cur_provider.name)

    # take the list of unique propers and get it sorted by
    sorted_propers = sorted(propers.values(),
                            key=operator.attrgetter('date'),
                            reverse=True)
    verified_propers = []

    for cur_proper in sorted_propers:

        # set the indexerid in the db to the show's indexerid
        cur_proper.indexerid = parse_result.show.indexerid

        # set the indexer in the db to the show's indexer
        cur_proper.indexer = parse_result.show.indexer

        # populate our Proper instance
        cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1
        cur_proper.episode = parse_result.episode_numbers[0]
        cur_proper.release_group = parse_result.release_group
        cur_proper.version = parse_result.version
        cur_proper.quality = Quality.nameQuality(cur_proper.name,
                                                 parse_result.is_anime)

        # only get anime proper if it has release group and version
        if parse_result.is_anime:
            if not cur_proper.release_group and -1 == cur_proper.version:
                logger.log(
                    u'Proper %s doesn\'t have a release group and version, ignoring it'
                    % cur_proper.name, logger.DEBUG)
                continue

        if not show_name_helpers.pass_wordlist_checks(cur_proper.name,
                                                      parse=False):
            logger.log(
                u'Proper %s isn\'t a valid scene release that we want, ignoring it'
                % cur_proper.name, logger.DEBUG)
            continue

        re_extras = dict(re_prefix='.*', re_suffix='.*')
        result = show_name_helpers.contains_any(
            cur_proper.name, parse_result.show.rls_ignore_words, **re_extras)
        if None is not result and result:
            logger.log(u'Ignored: %s for containing ignore word' %
                       cur_proper.name)
            continue

        result = show_name_helpers.contains_any(
            cur_proper.name, parse_result.show.rls_require_words, **re_extras)
        if None is not result and not result:
            logger.log(
                u'Ignored: %s for not containing any required word match' %
                cur_proper.name)
            continue

        # check if we actually want this proper (if it's the right quality)
        my_db = db.DBConnection()
        sql_results = my_db.select(
            'SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
            [cur_proper.indexerid, cur_proper.season, cur_proper.episode])
        if not sql_results:
            continue

        # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
        old_status, old_quality = Quality.splitCompositeStatus(
            int(sql_results[0]['status']))
        if old_status not in (DOWNLOADED,
                              SNATCHED) or cur_proper.quality != old_quality:
            continue

        # check if we actually want this proper (if it's the right release group and a higher version)
        if parse_result.is_anime:
            my_db = db.DBConnection()
            sql_results = my_db.select(
                'SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
                [cur_proper.indexerid, cur_proper.season, cur_proper.episode])

            old_version = int(sql_results[0]['version'])
            old_release_group = (sql_results[0]['release_group'])

            if -1 < old_version < cur_proper.version:
                logger.log(u'Found new anime v%s to replace existing v%s' %
                           (cur_proper.version, old_version))
            else:
                continue

            if cur_proper.release_group != old_release_group:
                logger.log(
                    u'Skipping proper from release group: %s, does not match existing release group: %s'
                    % (cur_proper.release_group, old_release_group))
                continue

        # if the show is in our list and there hasn't been a proper already added for that particular episode
        # then add it to our list of propers
        if cur_proper.indexerid != -1 and (
                cur_proper.indexerid, cur_proper.season,
                cur_proper.episode) not in map(
                    operator.attrgetter('indexerid', 'season', 'episode'),
                    verified_propers):
            logger.log(u'Found a proper that may be useful: %s' %
                       cur_proper.name)
            verified_propers.append(cur_proper)

    return verified_propers
Example #8
0
    def findNeededEpisodes(self, episode, manualSearch=False):
        neededEps = {}
        cl = []

        myDB = self.get_db()
        if type(episode) != list:
            sqlResults = myDB.select(
                'SELECT * FROM provider_cache WHERE provider = ? AND indexerid = ? AND season = ? AND episodes LIKE ?',
                [self.providerID, episode.show.indexerid, episode.season, '%|' + str(episode.episode) + '|%'])
        else:
            for epObj in episode:
                cl.append([
                    'SELECT * FROM provider_cache WHERE provider = ? AND indexerid = ? AND season = ?'
                    + ' AND episodes LIKE ? AND quality IN (' + ','.join([str(x) for x in epObj.wantedQuality]) + ')',
                    [self.providerID, epObj.show.indexerid, epObj.season, '%|' + str(epObj.episode) + '|%']])
            sqlResults = myDB.mass_action(cl)
            if sqlResults:
                sqlResults = list(itertools.chain(*sqlResults))

        if not sqlResults:
            self.setLastSearch()
            return neededEps

        # for each cache entry
        for curResult in sqlResults:

            # skip non-tv crap
            if not show_name_helpers.pass_wordlist_checks(curResult['name'], parse=False):
                continue

            # get the show object, or if it's not one of our shows then ignore it
            showObj = helpers.findCertainShow(sickbeard.showList, int(curResult['indexerid']))
            if not showObj:
                continue

            # skip if provider is anime only and show is not anime
            if self.provider.anime_only and not showObj.is_anime:
                logger.log(u'' + str(showObj.name) + ' is not an anime, skipping', logger.DEBUG)
                continue

            # get season and ep data (ignoring multi-eps for now)
            curSeason = int(curResult['season'])
            if curSeason == -1:
                continue
            curEp = curResult['episodes'].split('|')[1]
            if not curEp:
                continue
            curEp = int(curEp)

            curQuality = int(curResult['quality'])
            curReleaseGroup = curResult['release_group']
            curVersion = curResult['version']

            # if the show says we want that episode then add it to the list
            if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch):
                logger.log(u'Skipping ' + curResult['name'] + ' because we don\'t want an episode that\'s ' +
                           Quality.qualityStrings[curQuality], logger.DEBUG)
                continue

            epObj = showObj.getEpisode(curSeason, curEp)

            # build a result object
            title = curResult['name']
            url = curResult['url']

            logger.log(u'Found result ' + title + ' at ' + url)

            result = self.provider.get_result([epObj], url)
            if None is result:
                continue
            result.show = showObj
            result.name = title
            result.quality = curQuality
            result.release_group = curReleaseGroup
            result.version = curVersion
            result.content = None

            # add it to the list
            if epObj not in neededEps:
                neededEps[epObj] = [result]
            else:
                neededEps[epObj].append(result)

        # datetime stamp this search so cache gets cleared
        self.setLastSearch()

        return neededEps
 def _test_pass_wordlist_checks(self, name, expected):
     result = show_name_helpers.pass_wordlist_checks(name)
     self.assertEqual(result, expected)
Example #10
0
def _get_proper_list(aired_since_shows,
                     recent_shows,
                     recent_anime,
                     proper_list=None):
    propers = {}

    my_db = db.DBConnection()
    # for each provider get a list of arbitrary Propers
    orig_thread_name = threading.currentThread().name
    providers = filter(lambda p: p.is_active(),
                       sickbeard.providers.sortedProviderList())
    for cur_provider in providers:
        if not recent_anime and cur_provider.anime_only:
            continue

        if None is not proper_list:
            found_propers = proper_list.get(cur_provider.get_id(), [])
            if not found_propers:
                continue
        else:
            threading.currentThread().name = '%s :: [%s]' % (orig_thread_name,
                                                             cur_provider.name)

            logger.log('Searching for new PROPER releases')

            try:
                found_propers = cur_provider.find_propers(
                    search_date=aired_since_shows,
                    shows=recent_shows,
                    anime=recent_anime)
            except exceptions.AuthException as e:
                logger.log('Authentication error: %s' % ex(e), logger.ERROR)
                continue
            except Exception as e:
                logger.log(
                    'Error while searching %s, skipping: %s' %
                    (cur_provider.name, ex(e)), logger.ERROR)
                logger.log(traceback.format_exc(), logger.ERROR)
                continue
            finally:
                threading.currentThread().name = orig_thread_name

        # if they haven't been added by a different provider than add the Proper to the list
        for cur_proper in found_propers:
            name = _generic_name(cur_proper.name)
            if name in propers:
                continue

            try:
                np = NameParser(False,
                                try_scene_exceptions=True,
                                showObj=cur_proper.parsed_show,
                                indexer_lookup=False)
                parse_result = np.parse(cur_proper.name)
            except (InvalidNameException, InvalidShowException, Exception):
                continue

            # get the show object
            cur_proper.parsed_show = (cur_proper.parsed_show
                                      or helpers.findCertainShow(
                                          sickbeard.showList,
                                          parse_result.show.indexerid))
            if None is cur_proper.parsed_show:
                logger.log(
                    'Skip download; cannot find show with indexerid [%s]' %
                    cur_proper.indexerid, logger.ERROR)
                continue

            cur_proper.indexer = cur_proper.parsed_show.indexer
            cur_proper.indexerid = cur_proper.parsed_show.indexerid

            if not (-1 != cur_proper.indexerid and parse_result.series_name
                    and parse_result.episode_numbers and
                    (cur_proper.indexer, cur_proper.indexerid)
                    in recent_shows + recent_anime):
                continue

            # only get anime Proper if it has release group and version
            if parse_result.is_anime and not parse_result.release_group and -1 == parse_result.version:
                logger.log(
                    'Ignored Proper with no release group and version in name [%s]'
                    % cur_proper.name, logger.DEBUG)
                continue

            if not show_name_helpers.pass_wordlist_checks(
                    cur_proper.name, parse=False, indexer_lookup=False):
                logger.log('Ignored unwanted Proper [%s]' % cur_proper.name,
                           logger.DEBUG)
                continue

            re_x = dict(re_prefix='.*', re_suffix='.*')
            result = show_name_helpers.contains_any(
                cur_proper.name, cur_proper.parsed_show.rls_ignore_words,
                **re_x)
            if None is not result and result:
                logger.log(
                    'Ignored Proper containing ignore word [%s]' %
                    cur_proper.name, logger.DEBUG)
                continue

            result = show_name_helpers.contains_any(
                cur_proper.name, cur_proper.parsed_show.rls_require_words,
                **re_x)
            if None is not result and not result:
                logger.log(
                    'Ignored Proper for not containing any required word [%s]'
                    % cur_proper.name, logger.DEBUG)
                continue

            cur_size = getattr(cur_proper, 'size', None)
            if failed_history.has_failed(cur_proper.name, cur_size,
                                         cur_provider.name):
                continue

            cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1
            cur_proper.episode = parse_result.episode_numbers[0]
            # check if we actually want this Proper (if it's the right quality)
            sql_results = my_db.select(
                'SELECT release_group, status, version, release_name'
                ' FROM tv_episodes'
                ' WHERE showid = ? AND indexer = ? AND season = ? AND episode = ?'
                ' LIMIT 1', [
                    cur_proper.indexerid, cur_proper.indexer,
                    cur_proper.season, cur_proper.episode
                ])
            if not sql_results:
                continue

            # only keep the Proper if we already retrieved the same quality ep (don't get better/worse ones)
            # check if we want this release: same quality as current, current has correct status
            # restrict other release group releases to Proper's
            old_status, old_quality = Quality.splitCompositeStatus(
                int(sql_results[0]['status']))
            cur_proper.quality = Quality.nameQuality(cur_proper.name,
                                                     parse_result.is_anime)
            cur_proper.is_repack, cur_proper.properlevel = Quality.get_proper_level(
                parse_result.extra_info_no_name(),
                parse_result.version,
                parse_result.is_anime,
                check_is_repack=True)
            cur_proper.proper_level = cur_proper.properlevel  # local non global value
            old_release_group = sql_results[0]['release_group']
            try:
                same_release_group = parse_result.release_group.lower(
                ) == old_release_group.lower()
            except (StandardError, Exception):
                same_release_group = parse_result.release_group == old_release_group
            if old_status not in SNATCHED_ANY + [DOWNLOADED, ARCHIVED] \
                    or cur_proper.quality != old_quality \
                    or (cur_proper.is_repack and not same_release_group):
                continue

            np = NameParser(False,
                            try_scene_exceptions=True,
                            showObj=cur_proper.parsed_show,
                            indexer_lookup=False)
            try:
                extra_info = np.parse(
                    sql_results[0]['release_name']).extra_info_no_name()
            except (StandardError, Exception):
                extra_info = None
            # don't take Proper of the same level we already downloaded
            old_proper_level, old_is_internal, old_codec, old_extra_no_name, old_name = \
                get_old_proper_level(cur_proper.parsed_show, cur_proper.indexer, cur_proper.indexerid,
                                     cur_proper.season, parse_result.episode_numbers,
                                     old_status, cur_proper.quality, extra_info,
                                     parse_result.version, parse_result.is_anime)
            cur_proper.codec = _get_codec(parse_result.extra_info_no_name())
            if cur_proper.proper_level < old_proper_level:
                continue

            cur_proper.is_internal = (parse_result.extra_info_no_name()
                                      and re.search(
                                          r'\binternal\b',
                                          parse_result.extra_info_no_name(),
                                          flags=re.I))
            if cur_proper.proper_level == old_proper_level:
                if (('264' == cur_proper.codec and 'xvid' == old_codec)
                        or (old_is_internal and not cur_proper.is_internal)):
                    pass
                continue

            is_web = (old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL,
                                      Quality.UHD4KWEB)
                      or (old_quality == Quality.SDTV and re.search(
                          r'\Wweb.?(dl|rip|.[hx]26[45])\W',
                          str(sql_results[0]['release_name']), re.I)))

            if is_web:
                old_name = (old_name,
                            sql_results[0]['release_name'])[old_name in ('',
                                                                         None)]
                old_webdl_type = get_webdl_type(old_extra_no_name, old_name)
                new_webdl_type = get_webdl_type(
                    parse_result.extra_info_no_name(), cur_proper.name)
                if old_webdl_type != new_webdl_type:
                    logger.log(
                        'Ignored Proper webdl source [%s], does not match existing webdl source [%s] for [%s]'
                        % (old_webdl_type, new_webdl_type, cur_proper.name),
                        logger.DEBUG)
                    continue

            # for webdls, prevent Propers from different groups
            log_same_grp = 'Ignored Proper from release group [%s] does not match existing group [%s] for [%s]' \
                           % (parse_result.release_group, old_release_group, cur_proper.name)
            if sickbeard.PROPERS_WEBDL_ONEGRP and is_web and not same_release_group:
                logger.log(log_same_grp, logger.DEBUG)
                continue

            # check if we actually want this Proper (if it's the right release group and a higher version)
            if parse_result.is_anime:
                old_version = int(sql_results[0]['version'])
                if not (-1 < old_version < parse_result.version):
                    continue
                if not same_release_group:
                    logger.log(log_same_grp, logger.DEBUG)
                    continue
                found_msg = 'Found anime Proper v%s to replace v%s' % (
                    parse_result.version, old_version)
            else:
                found_msg = 'Found Proper [%s]' % cur_proper.name

            # make sure the episode has been downloaded before
            history_limit = datetime.datetime.today() - datetime.timedelta(
                days=30)
            history_results = my_db.select(
                'SELECT resource FROM history'
                ' WHERE showid = ?'
                ' AND season = ? AND episode = ? AND quality = ? AND date >= ?'
                ' AND (%s)' %
                ' OR '.join('action LIKE "%%%02d"' % x
                            for x in SNATCHED_ANY + [DOWNLOADED, ARCHIVED]), [
                                cur_proper.indexerid, cur_proper.season,
                                cur_proper.episode, cur_proper.quality,
                                history_limit.strftime(history.dateFormat)
                            ])

            # skip if the episode has never downloaded, because a previous quality is required to match the Proper
            if not len(history_results):
                logger.log(
                    'Ignored Proper cannot find a recent history item for [%s]'
                    % cur_proper.name, logger.DEBUG)
                continue

            # make sure that none of the existing history downloads are the same Proper as the download candidate
            clean_proper_name = _generic_name(
                helpers.remove_non_release_groups(
                    cur_proper.name, cur_proper.parsed_show.is_anime))
            is_same = False
            for hitem in history_results:
                # if the result exists in history already we need to skip it
                if clean_proper_name == _generic_name(
                        helpers.remove_non_release_groups(
                            ek.ek(os.path.basename, hitem['resource']))):
                    is_same = True
                    break
            if is_same:
                logger.log('Ignored Proper already in history [%s]' %
                           cur_proper.name)
                continue

            logger.log(found_msg, logger.DEBUG)

            # finish populating the Proper instance
            # cur_proper.show = cur_proper.parsed_show.indexerid
            cur_proper.provider = cur_provider
            cur_proper.extra_info = parse_result.extra_info
            cur_proper.extra_info_no_name = parse_result.extra_info_no_name
            cur_proper.release_group = parse_result.release_group

            cur_proper.is_anime = parse_result.is_anime
            cur_proper.version = parse_result.version

            propers[name] = cur_proper

        cur_provider.log_result('Propers', len(propers),
                                '%s' % cur_provider.name)

    return propers.values()
Example #11
0
def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
    propers = {}

    # for each provider get a list of the
    orig_thread_name = threading.currentThread().name
    providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active()]
    for cur_provider in providers:
        if not recent_anime and cur_provider.anime_only:
            continue
        threading.currentThread().name = orig_thread_name + ' :: [' + cur_provider.name + ']'

        logger.log(u'Searching for new PROPER releases')

        try:
            found_propers = cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows, anime=recent_anime)
        except exceptions.AuthException as e:
            logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
            continue
        except Exception as e:
            logger.log(u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR)
            logger.log(traceback.format_exc(), logger.DEBUG)
            continue
        finally:
            threading.currentThread().name = orig_thread_name

        # if they haven't been added by a different provider than add the proper to the list
        count = 0
        np = NameParser(False, try_scene_exceptions=True)
        for x in found_propers:
            name = _generic_name(x.name)
            if name not in propers:
                try:
                    parse_result = np.parse(x.title)
                    if parse_result.series_name and parse_result.episode_numbers and \
                            parse_result.show.indexerid in recent_shows + recent_anime:
                        logger.log(u'Found new proper: ' + x.name, logger.DEBUG)
                        x.show = parse_result.show.indexerid
                        x.provider = cur_provider
                        propers[name] = x
                        count += 1
                except Exception:
                    continue

        cur_provider.log_result('Propers', count, '%s' % cur_provider.name)

    # take the list of unique propers and get it sorted by
    sorted_propers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True)
    verified_propers = []

    for cur_proper in sorted_propers:

        # set the indexerid in the db to the show's indexerid
        cur_proper.indexerid = parse_result.show.indexerid

        # set the indexer in the db to the show's indexer
        cur_proper.indexer = parse_result.show.indexer

        # populate our Proper instance
        cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1
        cur_proper.episode = parse_result.episode_numbers[0]
        cur_proper.release_group = parse_result.release_group
        cur_proper.version = parse_result.version
        cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime)

        # only get anime proper if it has release group and version
        if parse_result.is_anime:
            if not cur_proper.release_group and -1 == cur_proper.version:
                logger.log(u'Proper %s doesn\'t have a release group and version, ignoring it' % cur_proper.name,
                           logger.DEBUG)
                continue

        if not show_name_helpers.pass_wordlist_checks(cur_proper.name, parse=False):
            logger.log(u'Proper %s isn\'t a valid scene release that we want, ignoring it' % cur_proper.name,
                       logger.DEBUG)
            continue

        re_extras = dict(re_prefix='.*', re_suffix='.*')
        result = show_name_helpers.contains_any(cur_proper.name, parse_result.show.rls_ignore_words, **re_extras)
        if None is not result and result:
            logger.log(u'Ignored: %s for containing ignore word' % cur_proper.name)
            continue

        result = show_name_helpers.contains_any(cur_proper.name, parse_result.show.rls_require_words, **re_extras)
        if None is not result and not result:
            logger.log(u'Ignored: %s for not containing any required word match' % cur_proper.name)
            continue

        # check if we actually want this proper (if it's the right quality)
        my_db = db.DBConnection()
        sql_results = my_db.select('SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
                                   [cur_proper.indexerid, cur_proper.season, cur_proper.episode])
        if not sql_results:
            continue

        # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
        old_status, old_quality = Quality.splitCompositeStatus(int(sql_results[0]['status']))
        if old_status not in (DOWNLOADED, SNATCHED) or cur_proper.quality != old_quality:
            continue

        # check if we actually want this proper (if it's the right release group and a higher version)
        if parse_result.is_anime:
            my_db = db.DBConnection()
            sql_results = my_db.select(
                'SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
                [cur_proper.indexerid, cur_proper.season, cur_proper.episode])

            old_version = int(sql_results[0]['version'])
            old_release_group = (sql_results[0]['release_group'])

            if -1 < old_version < cur_proper.version:
                logger.log(u'Found new anime v%s to replace existing v%s' % (cur_proper.version, old_version))
            else:
                continue

            if cur_proper.release_group != old_release_group:
                logger.log(u'Skipping proper from release group: %s, does not match existing release group: %s' %
                           (cur_proper.release_group, old_release_group))
                continue

        # if the show is in our list and there hasn't been a proper already added for that particular episode
        # then add it to our list of propers
        if cur_proper.indexerid != -1 and (cur_proper.indexerid, cur_proper.season, cur_proper.episode) not in map(
                operator.attrgetter('indexerid', 'season', 'episode'), verified_propers):
            logger.log(u'Found a proper that may be useful: %s' % cur_proper.name)
            verified_propers.append(cur_proper)

    return verified_propers
Example #12
0
def search_providers(show, episodes, manual_search=False, torrent_only=False, try_other_searches=False, old_status=None, scheduled=False):
    found_results = {}
    final_results = []

    search_done = False

    orig_thread_name = threading.currentThread().name

    use_quality_list = None
    if any([episodes]):
        old_status = old_status or failed_history.find_old_status(episodes[0]) or episodes[0].status
        if old_status:
            status, quality = Quality.splitCompositeStatus(old_status)
            use_quality_list = (status not in (
                common.WANTED, common.FAILED, common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN))

    provider_list = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog and
                     (not torrent_only or x.providerType == GenericProvider.TORRENT) and
                     (not scheduled or x.enable_scheduled_backlog)]
    for cur_provider in provider_list:
        if cur_provider.anime_only and not show.is_anime:
            logger.log(u'%s is not an anime, skipping' % show.name, logger.DEBUG)
            continue

        threading.currentThread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name)
        provider_id = cur_provider.get_id()

        found_results[provider_id] = {}

        search_count = 0
        search_mode = getattr(cur_provider, 'search_mode', 'eponly')

        while True:
            search_count += 1

            if 'eponly' == search_mode:
                logger.log(u'Performing episode search for %s' % show.name)
            else:
                logger.log(u'Performing season pack search for %s' % show.name)

            try:
                cur_provider.cache._clearCache()
                search_results = cur_provider.find_search_results(show, episodes, search_mode, manual_search,
                                                                  try_other_searches=try_other_searches)
                if any(search_results):
                    logger.log(', '.join(['%s %s candidate%s' % (
                        len(v), (('multiep', 'season')[SEASON_RESULT == k], 'episode')['ep' in search_mode],
                        helpers.maybe_plural(len(v))) for (k, v) in search_results.iteritems()]))
            except exceptions.AuthException as e:
                logger.log(u'Authentication error: %s' % ex(e), logger.ERROR)
                break
            except Exception as e:
                logger.log(u'Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR)
                logger.log(traceback.format_exc(), logger.ERROR)
                break
            finally:
                threading.currentThread().name = orig_thread_name

            search_done = True

            if len(search_results):
                # make a list of all the results for this provider
                for cur_ep in search_results:
                    # skip non-tv crap
                    search_results[cur_ep] = filter(
                        lambda ep_item: show_name_helpers.pass_wordlist_checks(
                            ep_item.name, parse=False, indexer_lookup=False) and
                                        ep_item.show == show, search_results[cur_ep])

                    if cur_ep in found_results:
                        found_results[provider_id][cur_ep] += search_results[cur_ep]
                    else:
                        found_results[provider_id][cur_ep] = search_results[cur_ep]

                break
            elif not getattr(cur_provider, 'search_fallback', False) or 2 == search_count:
                break

            search_mode = '%sonly' % ('ep', 'sp')['ep' in search_mode]
            logger.log(u'Falling back to %s search ...' % ('season pack', 'episode')['ep' in search_mode])

        # skip to next provider if we have no results to process
        if not len(found_results[provider_id]):
            continue

        any_qualities, best_qualities = Quality.splitQuality(show.quality)

        # pick the best season NZB
        best_season_result = None
        if SEASON_RESULT in found_results[provider_id]:
            best_season_result = pick_best_result(found_results[provider_id][SEASON_RESULT], show,
                                                  any_qualities + best_qualities)

        highest_quality_overall = 0
        for cur_episode in found_results[provider_id]:
            for cur_result in found_results[provider_id][cur_episode]:
                if Quality.UNKNOWN != cur_result.quality and highest_quality_overall < cur_result.quality:
                    highest_quality_overall = cur_result.quality
        logger.log(u'%s is the highest quality of any match' % Quality.qualityStrings[highest_quality_overall],
                   logger.DEBUG)

        # see if every episode is wanted
        if best_season_result:
            # get the quality of the season nzb
            season_qual = best_season_result.quality
            logger.log(u'%s is the quality of the season %s' % (Quality.qualityStrings[season_qual],
                                                                best_season_result.provider.providerType), logger.DEBUG)

            my_db = db.DBConnection()
            sql = 'SELECT season, episode FROM tv_episodes WHERE showid = %s AND (season IN (%s))' %\
                  (show.indexerid, ','.join([str(x.season) for x in episodes]))
            ep_nums = [(int(x['season']), int(x['episode'])) for x in my_db.select(sql)]

            logger.log(u'Executed query: [%s]' % sql)
            logger.log(u'Episode list: %s' % ep_nums, logger.DEBUG)

            all_wanted = True
            any_wanted = False
            for ep_num in ep_nums:
                if not show.wantEpisode(ep_num[0], ep_num[1], season_qual):
                    all_wanted = False
                else:
                    any_wanted = True

            # if we need every ep in the season and there's nothing better then just download this and
            # be done with it (unless single episodes are preferred)
            if all_wanted and highest_quality_overall == best_season_result.quality:
                logger.log(u'Every episode in this season is needed, downloading the whole %s %s' %
                           (best_season_result.provider.providerType, best_season_result.name))
                ep_objs = []
                for ep_num in ep_nums:
                    ep_objs.append(show.getEpisode(ep_num[0], ep_num[1]))
                best_season_result.episodes = ep_objs

                return [best_season_result]

            elif not any_wanted:
                logger.log(u'No episodes from this season are wanted at this quality, ignoring the result of ' +
                           best_season_result.name, logger.DEBUG)
            else:
                if GenericProvider.NZB == best_season_result.provider.providerType:
                    logger.log(u'Breaking apart the NZB and adding the individual ones to our results', logger.DEBUG)

                    # if not, break it apart and add them as the lowest priority results
                    individual_results = nzbSplitter.splitResult(best_season_result)

                    individual_results = filter(
                        lambda r: show_name_helpers.pass_wordlist_checks(
                            r.name, parse=False, indexer_lookup=False) and r.show == show, individual_results)

                    for cur_result in individual_results:
                        if 1 == len(cur_result.episodes):
                            ep_num = cur_result.episodes[0].episode
                        elif 1 < len(cur_result.episodes):
                            ep_num = MULTI_EP_RESULT

                        if ep_num in found_results[provider_id]:
                            found_results[provider_id][ep_num].append(cur_result)
                        else:
                            found_results[provider_id][ep_num] = [cur_result]

                # If this is a torrent all we can do is leech the entire torrent,
                # user will have to select which eps not do download in his torrent client
                else:

                    # Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it
                    logger.log(u'Adding multi episode result for full season torrent. In your torrent client, set ' +
                               u'the episodes that you do not want to "don\'t download"')
                    ep_objs = []
                    for ep_num in ep_nums:
                        ep_objs.append(show.getEpisode(ep_num[0], ep_num[1]))
                    best_season_result.episodes = ep_objs

                    ep_num = MULTI_EP_RESULT
                    if ep_num in found_results[provider_id]:
                        found_results[provider_id][ep_num].append(best_season_result)
                    else:
                        found_results[provider_id][ep_num] = [best_season_result]

        # go through multi-ep results and see if we really want them or not, get rid of the rest
        multi_results = {}
        if MULTI_EP_RESULT in found_results[provider_id]:
            for multi_result in found_results[provider_id][MULTI_EP_RESULT]:

                logger.log(u'Checking usefulness of multi episode result [%s]' % multi_result.name, logger.DEBUG)

                if sickbeard.USE_FAILED_DOWNLOADS and failed_history.has_failed(multi_result.name, multi_result.size,
                                                                                multi_result.provider.name):
                    logger.log(u'Rejecting previously failed multi episode result [%s]' % multi_result.name)
                    continue

                # see how many of the eps that this result covers aren't covered by single results
                needed_eps = []
                not_needed_eps = []
                for ep_obj in multi_result.episodes:
                    ep_num = ep_obj.episode
                    # if we have results for the episode
                    if ep_num in found_results[provider_id] and 0 < len(found_results[provider_id][ep_num]):
                        needed_eps.append(ep_num)
                    else:
                        not_needed_eps.append(ep_num)

                logger.log(u'Single episode check result is... needed episodes: %s, not needed episodes: %s' %
                           (needed_eps, not_needed_eps), logger.DEBUG)

                if not not_needed_eps:
                    logger.log(u'All of these episodes were covered by single episode results, ' +
                               'ignoring this multi episode result', logger.DEBUG)
                    continue

                # check if these eps are already covered by another multi-result
                multi_needed_eps = []
                multi_not_needed_eps = []
                for ep_obj in multi_result.episodes:
                    ep_num = ep_obj.episode
                    if ep_num in multi_results:
                        multi_not_needed_eps.append(ep_num)
                    else:
                        multi_needed_eps.append(ep_num)

                logger.log(u'Multi episode check result is... multi needed episodes: ' +
                           '%s, multi not needed episodes: %s' % (multi_needed_eps, multi_not_needed_eps), logger.DEBUG)

                if not multi_needed_eps:
                    logger.log(u'All of these episodes were covered by another multi episode nzb, ' +
                               'ignoring this multi episode result',
                               logger.DEBUG)
                    continue

                # if we're keeping this multi-result then remember it
                for ep_obj in multi_result.episodes:
                    multi_results[ep_obj.episode] = multi_result

                # don't bother with the single result if we're going to get it with a multi result
                for ep_obj in multi_result.episodes:
                    ep_num = ep_obj.episode
                    if ep_num in found_results[provider_id]:
                        logger.log(u'A needed multi episode result overlaps with a single episode result for episode ' +
                                   '#%s, removing the single episode results from the list' % ep_num, logger.DEBUG)
                        del found_results[provider_id][ep_num]

        # of all the single ep results narrow it down to the best one for each episode
        final_results += set(multi_results.values())
        quality_list = use_quality_list and (None, best_qualities)[any(best_qualities)] or None
        for cur_ep in found_results[provider_id]:
            if cur_ep in (MULTI_EP_RESULT, SEASON_RESULT):
                continue

            if 0 == len(found_results[provider_id][cur_ep]):
                continue

            best_result = pick_best_result(found_results[provider_id][cur_ep], show, quality_list,
                                           filter_rls=orig_thread_name)

            # if all results were rejected move on to the next episode
            if not best_result:
                continue

            # filter out possible bad torrents from providers
            if 'torrent' == best_result.resultType:
                if not best_result.url.startswith('magnet') and None is not best_result.get_data_func:
                    best_result.url = best_result.get_data_func(best_result.url)
                    best_result.get_data_func = None  # consume only once
                    if not best_result.url:
                        continue
                if best_result.url.startswith('magnet'):
                    if 'blackhole' != sickbeard.TORRENT_METHOD:
                        best_result.content = None
                else:
                    cache_file = ek.ek(os.path.join, sickbeard.CACHE_DIR or helpers._getTempDir(),
                                       '%s.torrent' % (helpers.sanitizeFileName(best_result.name)))
                    if not helpers.download_file(best_result.url, cache_file, session=best_result.provider.session):
                        continue

                    try:
                        with open(cache_file, 'rb') as fh:
                            td = fh.read()
                        setattr(best_result, 'cache_file', cache_file)
                    except (StandardError, Exception):
                        continue

                    if getattr(best_result.provider, 'chk_td', None):
                        name = None
                        try:
                            hdr = re.findall('(\w+(\d+):)', td[0:6])[0]
                            x, v = len(hdr[0]), int(hdr[1])
                            while x < len(td):
                                y = x + v
                                name = 'name' == td[x: y]
                                w = re.findall('((?:i-?\d+e|e+|d|l+)*(\d+):)', td[y: y + 32])[0]
                                x, v = y + len(w[0]), int(w[1])
                                if name:
                                    name = td[x: x + v]
                                    break
                        except (StandardError, Exception):
                            continue
                        if name:
                            if not pass_show_wordlist_checks(name, show):
                                continue
                            if not show_name_helpers.pass_wordlist_checks(name, indexer_lookup=False):
                                logger.log('Ignored: %s (debug log has detail)' % name)
                                continue
                            best_result.name = name

                    if 'blackhole' != sickbeard.TORRENT_METHOD:
                        best_result.content = td

            # add result if its not a duplicate and
            found = False
            for i, result in enumerate(final_results):
                for best_result_ep in best_result.episodes:
                    if best_result_ep in result.episodes:
                        if best_result.quality > result.quality:
                            final_results.pop(i)
                        else:
                            found = True
            if not found:
                final_results += [best_result]

        # check that we got all the episodes we wanted first before doing a match and snatch
        wanted_ep_count = 0
        for wanted_ep in episodes:
            for result in final_results:
                if wanted_ep in result.episodes and is_final_result(result):
                    wanted_ep_count += 1

        # make sure we search every provider for results unless we found everything we wanted
        if len(episodes) == wanted_ep_count:
            break

    if not len(provider_list):
        logger.log('No NZB/Torrent providers in Media Providers/Options are allowed for active searching', logger.WARNING)
    elif not search_done:
        logger.log('Failed active search of %s enabled provider%s. More info in debug log.' % (
            len(provider_list), helpers.maybe_plural(len(provider_list))), logger.ERROR)
    elif not any(final_results):
        logger.log('No suitable candidates')

    return final_results
Example #13
0
def search_providers(show, episodes, manual_search=False):
    found_results = {}
    final_results = []

    search_done = False

    orig_thread_name = threading.currentThread().name

    provider_list = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog]
    for cur_provider in provider_list:
        if cur_provider.anime_only and not show.is_anime:
            logger.log(u'%s is not an anime, skipping' % show.name, logger.DEBUG)
            continue

        threading.currentThread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name)
        provider_id = cur_provider.get_id()

        found_results[provider_id] = {}

        search_count = 0
        search_mode = cur_provider.search_mode

        while True:
            search_count += 1

            if 'eponly' == search_mode:
                logger.log(u'Performing episode search for %s' % show.name)
            else:
                logger.log(u'Performing season pack search for %s' % show.name)

            try:
                cur_provider.cache._clearCache()
                search_results = cur_provider.find_search_results(show, episodes, search_mode, manual_search)
            except exceptions.AuthException as e:
                logger.log(u'Authentication error: %s' % ex(e), logger.ERROR)
                break
            except Exception as e:
                logger.log(u'Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR)
                logger.log(traceback.format_exc(), logger.DEBUG)
                break
            finally:
                threading.currentThread().name = orig_thread_name

            search_done = True

            if len(search_results):
                # make a list of all the results for this provider
                for cur_ep in search_results:
                    # skip non-tv crap
                    search_results[cur_ep] = filter(
                        lambda item: show_name_helpers.pass_wordlist_checks(item.name, parse=False) and
                                     item.show == show, search_results[cur_ep])

                    if cur_ep in found_results:
                        found_results[provider_id][cur_ep] += search_results[cur_ep]
                    else:
                        found_results[provider_id][cur_ep] = search_results[cur_ep]

                break
            elif not cur_provider.search_fallback or search_count == 2:
                break

            search_mode = '%sonly' % ('ep', 'sp')['ep' in search_mode]
            logger.log(u'Falling back to %s search ...' % ('season pack', 'episode')['ep' in search_mode])

        # skip to next provider if we have no results to process
        if not len(found_results[provider_id]):
            continue

        any_qualities, best_qualities = Quality.splitQuality(show.quality)

        # pick the best season NZB
        best_season_result = None
        if SEASON_RESULT in found_results[provider_id]:
            best_season_result = pick_best_result(found_results[provider_id][SEASON_RESULT], show,
                                                  any_qualities + best_qualities)

        highest_quality_overall = 0
        for cur_episode in found_results[provider_id]:
            for cur_result in found_results[provider_id][cur_episode]:
                if Quality.UNKNOWN != cur_result.quality and highest_quality_overall < cur_result.quality:
                    highest_quality_overall = cur_result.quality
        logger.log(u'%s is the highest quality of any match' % Quality.qualityStrings[highest_quality_overall],
                   logger.DEBUG)

        # see if every episode is wanted
        if best_season_result:
            # get the quality of the season nzb
            season_qual = best_season_result.quality
            logger.log(u'%s is the quality of the season %s' % (Quality.qualityStrings[season_qual],
                                                                best_season_result.provider.providerType), logger.DEBUG)

            my_db = db.DBConnection()
            sql = 'SELECT episode FROM tv_episodes WHERE showid = %s AND (season IN (%s))' %\
                  (show.indexerid, ','.join([str(x.season) for x in episodes]))
            ep_nums = [int(x['episode']) for x in my_db.select(sql)]

            logger.log(u'Executed query: [%s]' % sql)
            logger.log(u'Episode list: %s' % ep_nums, logger.DEBUG)

            all_wanted = True
            any_wanted = False
            for ep_num in ep_nums:
                for season in set([x.season for x in episodes]):
                    if not show.wantEpisode(season, ep_num, season_qual):
                        all_wanted = False
                    else:
                        any_wanted = True

            # if we need every ep in the season and there's nothing better then just download this and be done with it (unless single episodes are preferred)
            if all_wanted and highest_quality_overall == best_season_result.quality:
                logger.log(u'Every episode in this season is needed, downloading the whole %s %s' %
                           (best_season_result.provider.providerType, best_season_result.name))
                ep_objs = []
                for ep_num in ep_nums:
                    for season in set([x.season for x in episodes]):
                        ep_objs.append(show.getEpisode(season, ep_num))
                best_season_result.episodes = ep_objs

                return [best_season_result]

            elif not any_wanted:
                logger.log(u'No episodes from this season are wanted at this quality, ignoring the result of ' +
                           best_season_result.name, logger.DEBUG)
            else:
                if GenericProvider.NZB == best_season_result.provider.providerType:
                    logger.log(u'Breaking apart the NZB and adding the individual ones to our results', logger.DEBUG)

                    # if not, break it apart and add them as the lowest priority results
                    individual_results = nzbSplitter.splitResult(best_season_result)

                    individual_results = filter(
                        lambda r: show_name_helpers.pass_wordlist_checks(r.name, parse=False) and r.show == show, individual_results)

                    for cur_result in individual_results:
                        if 1 == len(cur_result.episodes):
                            ep_num = cur_result.episodes[0].episode
                        elif 1 < len(cur_result.episodes):
                            ep_num = MULTI_EP_RESULT

                        if ep_num in found_results[provider_id]:
                            found_results[provider_id][ep_num].append(cur_result)
                        else:
                            found_results[provider_id][ep_num] = [cur_result]

                # If this is a torrent all we can do is leech the entire torrent, user will have to select which eps not do download in his torrent client
                else:

                    # Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it.
                    logger.log(u'Adding multi episode result for full season torrent. In your torrent client, set ' +
                               u'the episodes that you do not want to "don\'t download"')
                    ep_objs = []
                    for ep_num in ep_nums:
                        for season in set([x.season for x in episodes]):
                            ep_objs.append(show.getEpisode(season, ep_num))
                    best_season_result.episodes = ep_objs

                    ep_num = MULTI_EP_RESULT
                    if ep_num in found_results[provider_id]:
                        found_results[provider_id][ep_num].append(best_season_result)
                    else:
                        found_results[provider_id][ep_num] = [best_season_result]

        # go through multi-ep results and see if we really want them or not, get rid of the rest
        multi_results = {}
        if MULTI_EP_RESULT in found_results[provider_id]:
            for multi_result in found_results[provider_id][MULTI_EP_RESULT]:

                logger.log(u'Checking usefulness of multi episode result %s' % multi_result.name, logger.DEBUG)

                if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(multi_result.name, multi_result.size,
                                                                               multi_result.provider.name):
                    logger.log(u'%s has previously failed, rejecting this multi episode result' % multi_result.name)
                    continue

                # see how many of the eps that this result covers aren't covered by single results
                needed_eps = []
                not_needed_eps = []
                for ep_obj in multi_result.episodes:
                    ep_num = ep_obj.episode
                    # if we have results for the episode
                    if ep_num in found_results[provider_id] and 0 < len(found_results[provider_id][ep_num]):
                        needed_eps.append(ep_num)
                    else:
                        not_needed_eps.append(ep_num)

                logger.log(u'Single episode check result is... needed episodes: %s, not needed episodes: %s' %
                           (needed_eps, not_needed_eps), logger.DEBUG)

                if not not_needed_eps:
                    logger.log(u'All of these episodes were covered by single episode results, ignoring this multi episode result', logger.DEBUG)
                    continue

                # check if these eps are already covered by another multi-result
                multi_needed_eps = []
                multi_not_needed_eps = []
                for ep_obj in multi_result.episodes:
                    ep_num = ep_obj.episode
                    if ep_num in multi_results:
                        multi_not_needed_eps.append(ep_num)
                    else:
                        multi_needed_eps.append(ep_num)

                logger.log(u'Multi episode check result is... multi needed episodes: %s, multi not needed episodes: %s' %
                           (multi_needed_eps, multi_not_needed_eps), logger.DEBUG)

                if not multi_needed_eps:
                    logger.log(u'All of these episodes were covered by another multi episode nzb, ignoring this multi episode result',
                               logger.DEBUG)
                    continue

                # if we're keeping this multi-result then remember it
                for ep_obj in multi_result.episodes:
                    multi_results[ep_obj.episode] = multi_result

                # don't bother with the single result if we're going to get it with a multi result
                for ep_obj in multi_result.episodes:
                    ep_num = ep_obj.episode
                    if ep_num in found_results[provider_id]:
                        logger.log(u'A needed multi episode result overlaps with a single episode result for episode #%s, removing the single episode results from the list' %
                                   ep_num, logger.DEBUG)
                        del found_results[provider_id][ep_num]

        # of all the single ep results narrow it down to the best one for each episode
        final_results += set(multi_results.values())
        for cur_ep in found_results[provider_id]:
            if cur_ep in (MULTI_EP_RESULT, SEASON_RESULT):
                continue

            if 0 == len(found_results[provider_id][cur_ep]):
                continue

            best_result = pick_best_result(found_results[provider_id][cur_ep], show)

            # if all results were rejected move on to the next episode
            if not best_result:
                continue

            # filter out possible bad torrents from providers
            if 'torrent' == best_result.resultType and 'blackhole' != sickbeard.TORRENT_METHOD:
                best_result.content = None
                if not best_result.url.startswith('magnet'):
                    best_result.content = best_result.provider.get_url(best_result.url)
                    if not best_result.content:
                        continue

            # add result if its not a duplicate and
            found = False
            for i, result in enumerate(final_results):
                for best_result_ep in best_result.episodes:
                    if best_result_ep in result.episodes:
                        if best_result.quality > result.quality:
                            final_results.pop(i)
                        else:
                            found = True
            if not found:
                final_results += [best_result]

        # check that we got all the episodes we wanted first before doing a match and snatch
        wanted_ep_count = 0
        for wanted_ep in episodes:
            for result in final_results:
                if wanted_ep in result.episodes and is_final_result(result):
                    wanted_ep_count += 1

        # make sure we search every provider for results unless we found everything we wanted
        if len(episodes) == wanted_ep_count:
            break

    if not search_done:
        logger.log(u'No NZB/Torrent providers found or enabled in the SickGear config for backlog searches. Please check your settings.',
                   logger.ERROR)

    return final_results