Beispiel #1
0
def wanted_episodes(series_obj, from_date):
    """
    Get a list of episodes that we want to download.

    :param series_obj: Series these episodes are from
    :param from_date: Search from a certain date
    :return: list of wanted episodes
    """
    wanted = []
    allowed_qualities, preferred_qualities = series_obj.current_qualities
    all_qualities = list(set(allowed_qualities + preferred_qualities))

    log.debug(u'Seeing if we need anything from {0}', series_obj.name)
    con = db.DBConnection()

    sql_results = con.select(
        'SELECT status, quality, season, episode, manually_searched '
        'FROM tv_episodes '
        'WHERE indexer = ? '
        ' AND showid = ?'
        ' AND season > 0'
        ' AND airdate > ?',
        [series_obj.indexer, series_obj.series_id, from_date.toordinal()]
    )

    # check through the list of statuses to see if we want any
    for episode in sql_results:
        cur_status, cur_quality = int(episode['status'] or UNSET), int(episode['quality'] or Quality.NA)
        should_search, should_search_reason = Quality.should_search(
            cur_status, cur_quality, series_obj, episode['manually_searched']
        )
        if not should_search:
            continue
        else:
            log.debug(
                u'Searching for {show} {ep}. Reason: {reason}', {
                    u'show': series_obj.name,
                    u'ep': episode_num(episode['season'], episode['episode']),
                    u'reason': should_search_reason,
                }
            )

        ep_obj = series_obj.get_episode(episode['season'], episode['episode'])
        ep_obj.wanted_quality = [
            quality
            for quality in all_qualities
            if Quality.is_higher_quality(
                cur_quality, quality, allowed_qualities, preferred_qualities
            )
        ]
        wanted.append(ep_obj)

    return wanted
Beispiel #2
0
def refine(video, tv_episode=None, **kwargs):
    """Refine a video by using TVEpisode information.

    :param video: the video to refine.
    :type video: Episode
    :param tv_episode: the TVEpisode to be used.
    :type tv_episode: medusa.tv.Episode
    :param kwargs:
    """
    if video.series_tvdb_id and video.tvdb_id:
        log.debug('No need to refine with Episode')
        return

    if not tv_episode:
        log.debug('No Episode to be used to refine')
        return

    if not isinstance(video, Episode):
        log.debug('Video {name!r} is not an episode. Skipping refiner...',
                  {'name': video.name})
        return

    if tv_episode.series:
        log.debug('Refining using Series information.')
        series, year, _ = series_re.match(tv_episode.series.name).groups()
        enrich({'series': series, 'year': int(year) if year else None}, video)
        enrich(SHOW_MAPPING, video, tv_episode.series)

    log.debug('Refining using Episode information.')
    enrich(EPISODE_MAPPING, video, tv_episode)
    enrich(ADDITIONAL_MAPPING, video, tv_episode, overwrite=False)
    guess = Quality.to_guessit(tv_episode.quality)
    enrich({'resolution': guess.get('screen_size'), 'source': guess.get('source')}, video, overwrite=False)
Beispiel #3
0
def test_to_guessit(p):
    # Given
    quality = p['quality']
    expected = p['expected']

    # When
    actual = Quality.to_guessit(quality)

    # Then
    assert expected == actual
Beispiel #4
0
def test_is_valid_combined_quality(p):
    # Given
    quality = p['quality']
    expected = p['expected']

    # When
    actual = Quality.is_valid_combined_quality(quality)

    # Then
    assert expected == actual
Beispiel #5
0
def test_is_higher_quality(p):
    # Given
    expected = p.pop('expected')
    kwargs = p

    # When
    actual = Quality.is_higher_quality(**kwargs)

    # Then
    assert expected == actual
Beispiel #6
0
def test_quality_from_name(p):
    # Given
    name = p['name']
    anime = p.get('anime', False)
    expected = p['expected']

    # When
    actual = Quality.quality_from_name(name, anime)

    # Then
    assert expected == actual
Beispiel #7
0
def test_wanted_quality():
    # Given
    quality = Quality.FULLHDWEBDL
    allowed_qualities = [Quality.HDTV, Quality.HDWEBDL, Quality.HDBLURAY]
    preferred_qualities = [Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY]

    # When
    actual = Quality.wanted_quality(quality, allowed_qualities, preferred_qualities)

    # Then
    assert actual is True
Beispiel #8
0
def test_from_guessit(p):
    # Given
    guess = {
        'screen_size': p.get('screen_size'),
        'source': p.get('source'),
    }
    expected = p['expected']

    # When
    actual = Quality.from_guessit(guess)

    # Then
    assert expected == actual
Beispiel #9
0
def is_first_best_match(result):
    """
    Check if the given result is a best quality match and if we want to stop searching providers here.

    :param result: to check
    :return: True if the result is the best quality match else False
    """
    log.debug(u'Checking if we should stop searching for a better quality for for episode {0}', result.name)

    series_obj = result.episodes[0].series

    _, preferred_qualities = series_obj.current_qualities
    # Don't pass allowed because we only want to check if this quality is wanted preferred.
    return Quality.wanted_quality(result.quality, [], preferred_qualities)
Beispiel #10
0
def test_should_search(p):
    """Run the test."""
    # Given
    status = p['status']
    quality = p['quality']
    show_obj = p['show_obj']
    manually_searched = p['manually_searched']
    expected = p['expected']

    # When
    replace, msg = Quality.should_search(status, quality, show_obj, manually_searched)
    actual = replace

    # Then
    if expected != actual:
        print(msg)
    assert expected == actual
Beispiel #11
0
    def add_show(indexer, indexer_id, show_name, status):
        """Add a new show with default settings."""
        if Show.find_by_id(app.showList, EXTERNAL_IMDB, indexer_id):
            return

        root_dirs = app.ROOT_DIRS

        location = root_dirs[int(root_dirs[0]) + 1] if root_dirs else None

        if location:
            log.info("Adding show '{show}' using indexer: '{indexer_name}' and ID: {id}", {
                'show': show_name,
                'indexer_name': indexerConfig[indexer]['identifier'],
                'id': indexer_id
            })

            allowed, preferred = Quality.split_quality(int(app.QUALITY_DEFAULT))
            quality = {'allowed': allowed, 'preferred': preferred}

            app.show_queue_scheduler.action.addShow(indexer, indexer_id, None,
                                                    default_status=status,
                                                    quality=quality,
                                                    season_folders=int(app.SEASON_FOLDERS_DEFAULT),
                                                    paused=app.TRAKT_START_PAUSED,
                                                    default_status_after=status,
                                                    root_dir=location)
            tries = 0
            while tries < 3:
                if Show.find_by_id(app.showList, indexer, indexer_id):
                    return
                # Wait before show get's added and refreshed
                time.sleep(60)
                tries += 1
            log.warning("Error creating show '{show}. Please check logs' ", {
                'show': show_name
            })
            return
        else:
            log.warning("Error creating show '{show}' folder. No default root directory", {
                'show': show_name
            })
            return
Beispiel #12
0
    def _get_segments(series_obj, from_date):
        """Get episodes that should be backlog searched."""
        wanted = {}
        if series_obj.paused:
            log.debug(u'Skipping backlog for {0} because the show is paused', series_obj.name)
            return wanted

        log.debug(u'Seeing if we need anything from {0}', series_obj.name)

        con = db.DBConnection()
        sql_results = con.select(
            'SELECT status, quality, season, episode, manually_searched '
            'FROM tv_episodes '
            'WHERE airdate > ?'
            ' AND indexer = ? '
            ' AND showid = ?',
            [from_date.toordinal(), series_obj.indexer, series_obj.series_id]
        )

        # check through the list of statuses to see if we want any
        for episode in sql_results:
            cur_status, cur_quality = int(episode['status'] or UNSET), int(episode['quality'] or Quality.NA)
            should_search, should_search_reason = Quality.should_search(
                cur_status, cur_quality, series_obj, episode['manually_searched']
            )
            if not should_search:
                continue
            log.debug(
                u'Found needed backlog episodes for: {show} {ep}. Reason: {reason}', {
                    'show': series_obj.name,
                    'ep': episode_num(episode['season'], episode['episode']),
                    'reason': should_search_reason,
                }
            )
            ep_obj = series_obj.get_episode(episode['season'], episode['episode'])

            if ep_obj.season not in wanted:
                wanted[ep_obj.season] = [ep_obj]
            else:
                wanted[ep_obj.season].append(ep_obj)

        return wanted
Beispiel #13
0
    def saveAddShowDefaults(default_status, allowed_qualities, preferred_qualities, default_season_folders,
                            subtitles=False, anime=False, scene=False, default_status_after=WANTED):

        allowed_qualities = [_.strip() for _ in allowed_qualities.split(',')] if allowed_qualities else []
        preferred_qualities = [_.strip() for _ in preferred_qualities.split(',')] if preferred_qualities else []

        new_quality = Quality.combine_qualities([int(quality) for quality in allowed_qualities],
                                                [int(quality) for quality in preferred_qualities])

        app.STATUS_DEFAULT = int(default_status)
        app.STATUS_DEFAULT_AFTER = int(default_status_after)
        app.QUALITY_DEFAULT = int(new_quality)

        app.SEASON_FOLDERS_DEFAULT = config.checkbox_to_value(default_season_folders)
        app.SUBTITLES_DEFAULT = config.checkbox_to_value(subtitles)

        app.ANIME_DEFAULT = config.checkbox_to_value(anime)

        app.SCENE_DEFAULT = config.checkbox_to_value(scene)
        app.instance.save_config()
Beispiel #14
0
    def saveAddShowDefaults(default_status, allowed_qualities, preferred_qualities, default_season_folders,
                            subtitles=False, anime=False, scene=False, default_status_after=WANTED):

        allowed_qualities = [_.strip() for _ in allowed_qualities.split(',')] if allowed_qualities else []
        preferred_qualities = [_.strip() for _ in preferred_qualities.split(',')] if preferred_qualities else []

        new_quality = Quality.combine_qualities([int(quality) for quality in allowed_qualities],
                                                [int(quality) for quality in preferred_qualities])

        app.STATUS_DEFAULT = int(default_status)
        app.STATUS_DEFAULT_AFTER = int(default_status_after)
        app.QUALITY_DEFAULT = int(new_quality)

        app.SEASON_FOLDERS_DEFAULT = config.checkbox_to_value(default_season_folders)
        app.SUBTITLES_DEFAULT = config.checkbox_to_value(subtitles)

        app.ANIME_DEFAULT = config.checkbox_to_value(anime)

        app.SCENE_DEFAULT = config.checkbox_to_value(scene)
        app.instance.save_config()
Beispiel #15
0
def refine(video, tv_episode=None, **kwargs):
    """Refine a video by using TVEpisode information.

    :param video: the video to refine.
    :type video: Episode
    :param tv_episode: the TVEpisode to be used.
    :type tv_episode: medusa.tv.Episode
    :param kwargs:
    """
    if video.series_tvdb_id and video.tvdb_id:
        log.debug('No need to refine with Episode')
        return

    if not tv_episode:
        log.debug('No Episode to be used to refine')
        return

    if not isinstance(video, Episode):
        log.debug('Video {name} is not an episode. Skipping refiner...',
                  {'name': video.name})
        return

    if tv_episode.series:
        log.debug('Refining using Series information.')
        series, year, _ = series_re.match(tv_episode.series.name).groups()
        enrich({'series': series, 'year': int(year) if year else None}, video)
        enrich(SHOW_MAPPING, video, tv_episode.series)

    log.debug('Refining using Episode information.')
    enrich(EPISODE_MAPPING, video, tv_episode)
    enrich(ADDITIONAL_MAPPING, video, tv_episode, overwrite=False)
    guess = Quality.to_guessit(tv_episode.status)
    enrich(
        {
            'resolution': guess.get('screen_size'),
            'format': guess.get('format')
        },
        video,
        overwrite=False)
def test_should_replace(p):
    """Run the test."""
    # Given
    ep_status = p['ep_status']
    cur_quality = p['cur_quality']
    new_quality = p['new_quality']
    allowed_qualities = p['allowed_qualities']
    preferred_qualities = p['preferred_qualities']
    expected = p['expected']
    download_current_quality = p['download_current_quality']
    force = p['force']
    manually_searched = p['manually_searched']
    search_type = p.get('search_type', DAILY_SEARCH)

    # When
    replace, msg = Quality.should_replace(ep_status, cur_quality, new_quality, allowed_qualities, preferred_qualities,
                                          download_current_quality, force, manually_searched, search_type)
    actual = replace

    # Then
    if expected != actual:
        print(msg)
    assert expected == actual
Beispiel #17
0
def test_should_replace(p):
    """Run the test."""
    # Given
    ep_status = p['ep_status']
    cur_quality = p['cur_quality']
    new_quality = p['new_quality']
    allowed_qualities = p['allowed_qualities']
    preferred_qualities = p['preferred_qualities']
    expected = p['expected']
    download_current_quality = p['download_current_quality']
    force = p['force']
    manually_searched = p['manually_searched']
    search_type = p.get('search_type', DAILY_SEARCH)

    # When
    replace, msg = Quality.should_replace(ep_status, cur_quality, new_quality, allowed_qualities, preferred_qualities,
                                          download_current_quality, force, manually_searched, search_type)
    actual = replace

    # Then
    if expected != actual:
        print msg
    assert expected == actual
def test_should_replace(p):
    # Given
    ep_status = p['ep_status']
    cur_quality = p['cur_quality']
    new_quality = p['new_quality']
    allowed_qualities = p['allowed_qualities']
    preferred_qualities = p['preferred_qualities']
    expected = p['expected']
    download_current_quality = p['download_current_quality']
    force = p['force']
    manually_searched = p['manually_searched']

    # When
    replace, msg = Quality.should_replace(ep_status, cur_quality, new_quality,
                                          allowed_qualities,
                                          preferred_qualities,
                                          download_current_quality, force,
                                          manually_searched)
    actual = replace

    # Then
    if expected != actual:
        print msg
    assert expected == actual
Beispiel #19
0
    def subtitleMissedPP(self):
        t = PageTemplate(rh=self, filename='manage_subtitleMissedPP.mako')
        app.RELEASES_IN_PP = []
        for root, _, files in os.walk(app.TV_DOWNLOAD_DIR, topdown=False):
            # Skip folders that are being used for unpacking
            if u'_UNPACK' in root.upper():
                continue
            for filename in sorted(files):
                if not is_media_file(filename):
                    continue

                video_path = os.path.join(root, filename)
                video_date = datetime.datetime.fromtimestamp(
                    os.stat(video_path).st_ctime)
                video_age = datetime.datetime.today() - video_date

                tv_episode = Episode.from_filepath(video_path)

                if not tv_episode:
                    logger.log(
                        u"Filename '{0}' cannot be parsed to an episode".
                        format(filename), logger.DEBUG)
                    continue

                ep_status = Quality.split_composite_status(
                    tv_episode.status).status
                if ep_status in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST:
                    status = 'snatched'
                elif ep_status in Quality.DOWNLOADED:
                    status = 'downloaded'
                else:
                    continue

                if not tv_episode.series.subtitles:
                    continue

                related_files = PostProcessor(
                    video_path).list_associated_files(video_path,
                                                      subtitles_only=True)
                if related_files:
                    continue

                age_hours = divmod(video_age.seconds, 3600)[0]
                age_minutes = divmod(video_age.seconds, 60)[0]
                if video_age.days > 0:
                    age_unit = 'd'
                    age_value = video_age.days
                elif age_hours > 0:
                    age_unit = 'h'
                    age_value = age_hours
                else:
                    age_unit = 'm'
                    age_value = age_minutes

                app.RELEASES_IN_PP.append({
                    'release':
                    video_path,
                    'seriesid':
                    tv_episode.series.indexerid,
                    'show_name':
                    tv_episode.series.name,
                    'season':
                    tv_episode.season,
                    'episode':
                    tv_episode.episode,
                    'status':
                    status,
                    'age':
                    age_value,
                    'age_unit':
                    age_unit,
                    'date':
                    video_date,
                    'indexername':
                    tv_episode.series.indexer_name
                })

        return t.render(releases_in_pp=app.RELEASES_IN_PP,
                        topmenu='manage',
                        controller='manage',
                        action='subtitleMissedPP')
Beispiel #20
0
             'quality': Quality.FULLHDTV
         },
         {  # 2
             'name': 'Show.Name.S03E04.iNTERNAL.1080p.WEB-DL.x264-RlsGrp',
             'quality': Quality.FULLHDWEBDL
         },
     ]
 },
 {  # p4 - preferred lower quality
     'config': {
         'PREFERRED_WORDS': [],
         'UNDESIRED_WORDS': [],
     },
     'series': {
         'quality': Quality.combine_qualities(
             [Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY],
             [Quality.HDTV]
         ),
     },
     'expected': 1,  # Index of the expected result
     'results': [
         {  # 0
             'name': 'Show.Name.S03E04.1080p.WEB-DL.x264-RlsGrp',
             'quality': Quality.FULLHDWEBDL
         },
         {  # 1
             'name': 'Show.Name.S03E04.720p.HDTV.x264-RlsGrp',
             'quality': Quality.HDTV
         },
         {  # 2
             'name': 'Show.Name.S03E04.1080p.HDTV.x264-RlsGrp',
             'quality': Quality.FULLHDTV
Beispiel #21
0
    def addNewShow(self, whichSeries=None, indexer_lang=None, rootDir=None, defaultStatus=None, quality_preset=None,
                   allowed_qualities=None, preferred_qualities=None, season_folders=None, subtitles=None,
                   fullShowPath=None, other_shows=None, skipShow=None, providedIndexer=None, anime=None,
                   scene=None, blacklist=None, whitelist=None, defaultStatusAfter=None):
        """
        Receive tvdb id, dir, and other options and create a show from them. If extra show dirs are
        provided then it forwards back to newShow, if not it goes to /home.
        """
        provided_indexer = providedIndexer

        indexer_lang = app.INDEXER_DEFAULT_LANGUAGE if not indexer_lang else indexer_lang

        # grab our list of other dirs if given
        if not other_shows:
            other_shows = []
        elif not isinstance(other_shows, list):
            other_shows = [other_shows]

        other_shows = decode_shows(other_shows)

        def finishAddShow():
            # if there are no extra shows then go home
            if not other_shows:
                return json_response(redirect='/home/')

            # go to add the next show
            return json_response(
                redirect='/addShows/newShow/',
                params=[
                    ('show_to_add' if not i else 'other_shows', cur_dir)
                    for i, cur_dir in enumerate(other_shows)
                ]
            )

        # if we're skipping then behave accordingly
        if skipShow:
            return finishAddShow()

        # sanity check on our inputs
        if (not rootDir and not fullShowPath) or not whichSeries:
            error_msg = 'Missing params, no Indexer ID or folder: {series!r} and {root!r}/{path!r}'.format(
                series=whichSeries, root=rootDir, path=fullShowPath)
            log.error(error_msg)
            return json_response(
                result=False,
                message=error_msg,
                redirect='/home/'
            )

        # figure out what show we're adding and where
        series_pieces = whichSeries.split('|')
        if (whichSeries and rootDir) or (whichSeries and fullShowPath and len(series_pieces) > 1):
            if len(series_pieces) < 6:
                log.error('Unable to add show due to show selection. Not enough arguments: {pieces!r}',
                          {'pieces': series_pieces})
                ui.notifications.error('Unknown error. Unable to add show due to problem with show selection.')
                return json_response(
                    result=False,
                    message='Unable to add show due to show selection. Not enough arguments: {0!r}'.format(series_pieces),
                    redirect='/addShows/existingShows/'
                )

            indexer = int(series_pieces[1])
            indexer_id = int(series_pieces[3])
            show_name = series_pieces[4]
        else:
            # if no indexer was provided use the default indexer set in General settings
            if not provided_indexer:
                provided_indexer = app.INDEXER_DEFAULT

            indexer = int(provided_indexer)
            indexer_id = int(whichSeries)
            show_name = os.path.basename(os.path.normpath(fullShowPath))

        # use the whole path if it's given, or else append the show name to the root dir to get the full show path
        if fullShowPath:
            show_dir = os.path.normpath(fullShowPath)
        else:
            show_dir = os.path.join(rootDir, sanitize_filename(show_name))

        # blanket policy - if the dir exists you should have used 'add existing show' numbnuts
        if os.path.isdir(show_dir) and not fullShowPath:
            ui.notifications.error('Unable to add show', 'Folder {path} exists already'.format(path=show_dir))
            return json_response(
                result=False,
                message='Unable to add show: Folder {path} exists already'.format(path=show_dir),
                redirect='/addShows/existingShows/'
            )

        # don't create show dir if config says not to
        if app.ADD_SHOWS_WO_DIR:
            log.info('Skipping initial creation of {path} due to config.ini setting',
                     {'path': show_dir})
        else:
            dir_exists = helpers.make_dir(show_dir)
            if not dir_exists:
                log.error("Unable to create the folder {path}, can't add the show",
                          {'path': show_dir})
                ui.notifications.error('Unable to add show',
                                       'Unable to create the folder {path}, can\'t add the show'.format(path=show_dir))
                # Don't redirect to default page because user wants to see the new show
                return json_response(
                    result=False,
                    message='Unable to add show: Unable to create the folder {path}'.format(path=show_dir),
                    redirect='/home/'
                )
            else:
                helpers.chmod_as_parent(show_dir)

        # prepare the inputs for passing along
        scene = config.checkbox_to_value(scene)
        anime = config.checkbox_to_value(anime)
        season_folders = config.checkbox_to_value(season_folders)
        subtitles = config.checkbox_to_value(subtitles)

        if whitelist:
            if not isinstance(whitelist, list):
                whitelist = [whitelist]
            whitelist = short_group_names(whitelist)
        if blacklist:
            if not isinstance(blacklist, list):
                blacklist = [blacklist]
            blacklist = short_group_names(blacklist)

        if not allowed_qualities:
            allowed_qualities = []
        if not preferred_qualities or try_int(quality_preset, None):
            preferred_qualities = []
        if not isinstance(allowed_qualities, list):
            allowed_qualities = [allowed_qualities]
        if not isinstance(preferred_qualities, list):
            preferred_qualities = [preferred_qualities]
        new_quality = Quality.combine_qualities([int(q) for q in allowed_qualities], [int(q) for q in preferred_qualities])

        # add the show
        app.show_queue_scheduler.action.addShow(indexer, indexer_id, show_dir, int(defaultStatus), new_quality,
                                                season_folders, indexer_lang, subtitles, anime,
                                                scene, None, blacklist, whitelist, int(defaultStatusAfter))
        ui.notifications.message('Show added', 'Adding the specified show into {path}'.format(path=show_dir))

        return finishAddShow()
Beispiel #22
0
# coding=utf-8
"""Tests for medusa/test_should_refresh.py."""
from medusa.common import (ARCHIVED, DOWNLOADED, IGNORED, Quality, SKIPPED,
                           SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, UNAIRED)
from medusa.tv import Series

import pytest


#  Tests order has ve same order as the rules order
@pytest.mark.parametrize(
    'p',
    [
        {  # p0: File is the same: no
            'cur_status': Quality.composite_status(DOWNLOADED, Quality.HDTV),
            'same_file': True,
            'check_quality_again': False,
            'anime': False,
            'filepath': 'Show.S01E01.1080p.HDTV.X264-GROUP.mkv',
            'expected': False
        },
        {  # p1: Not valid media file: no
            'cur_status': Quality.composite_status(DOWNLOADED, Quality.HDTV),
            'same_file': False,
            'check_quality_again': False,
            'anime': False,
            'filepath': 'Show.S01E01.1080p.HDTV.X264-GROUP.srt',
            'expected': False
        },
        {  # p2: Check file again but new file has UNKNOWN quality: yes
            'cur_status': Quality.composite_status(DOWNLOADED, Quality.HDTV),
Beispiel #23
0
def snatch_episode(result):
    """
    Snatch a result that has been found.

    :param result: SearchResult instance to be snatched.
    :return: boolean, True on success
    """
    if result is None:
        return False

    result.priority = 0  # -1 = low, 0 = normal, 1 = high
    is_proper = False
    if app.ALLOW_HIGH_PRIORITY:
        # if it aired recently make it high priority
        for cur_ep in result.episodes:
            if datetime.date.today() - cur_ep.airdate <= datetime.timedelta(
                    days=7):
                result.priority = 1
    if result.proper_tags:
        log.debug(u'Found proper tags for {0}. Snatching as PROPER',
                  result.name)
        is_proper = True
        end_status = SNATCHED_PROPER
    else:
        end_status = SNATCHED

    if result.url.startswith(u'magnet') or result.url.endswith(u'torrent'):
        result.resultType = u'torrent'

    # NZBs can be sent straight to SAB or saved to disk
    if result.resultType in (u'nzb', u'nzbdata'):
        if app.NZB_METHOD == u'blackhole':
            result_downloaded = _download_result(result)
        elif app.NZB_METHOD == u'sabnzbd':
            result_downloaded = sab.send_nzb(result)
        elif app.NZB_METHOD == u'nzbget':
            result_downloaded = nzbget.sendNZB(result, is_proper)
        else:
            log.error(u'Unknown NZB action specified in config: {0}',
                      app.NZB_METHOD)
            result_downloaded = False

    # Torrents can be sent to clients or saved to disk
    elif result.resultType == u'torrent':
        # torrents are saved to disk when blackhole mode
        if app.TORRENT_METHOD == u'blackhole':
            result_downloaded = _download_result(result)
        else:
            if not result.content and not result.url.startswith(u'magnet'):
                if result.provider.login():
                    result.content = result.provider.get_content(result.url)

            if result.content or result.url.startswith(u'magnet'):
                client = torrent.get_client_class(app.TORRENT_METHOD)()
                result_downloaded = client.send_torrent(result)
            else:
                log.warning(u'Torrent file content is empty: {0}', result.name)
                result_downloaded = False
    else:
        log.error(u'Unknown result type, unable to download it: {0!r}',
                  result.resultType)
        result_downloaded = False

    if not result_downloaded:
        return False

    if app.USE_FAILED_DOWNLOADS:
        failed_history.log_snatch(result)

    ui.notifications.message(u'Episode snatched', result.name)

    history.log_snatch(result)

    # don't notify when we re-download an episode
    sql_l = []
    trakt_data = []
    for curEpObj in result.episodes:
        with curEpObj.lock:
            if is_first_best_match(result):
                curEpObj.status = Quality.composite_status(
                    SNATCHED_BEST, result.quality)
            else:
                curEpObj.status = Quality.composite_status(
                    end_status, result.quality)
            # Reset all others fields to the snatched status
            # New snatch by default doesn't have nfo/tbn
            curEpObj.hasnfo = False
            curEpObj.hastbn = False

            # We can't reset location because we need to know what we are replacing
            # curEpObj.location = ''

            # Size and release name are fetched in PP (only for downloaded status, not snatched)
            curEpObj.file_size = 0
            curEpObj.release_name = ''

            # Need to reset subtitle settings because it's a different file
            curEpObj.subtitles = list()
            curEpObj.subtitles_searchcount = 0
            curEpObj.subtitles_lastsearch = u'0001-01-01 00:00:00'

            # Need to store the correct is_proper. Not use the old one
            curEpObj.is_proper = True if result.proper_tags else False
            curEpObj.version = 0

            # Release group is parsed in PP
            curEpObj.release_group = ''

            curEpObj.manually_searched = result.manually_searched

            sql_l.append(curEpObj.get_sql())

        if curEpObj.status not in Quality.DOWNLOADED:
            notify_message = curEpObj.formatted_filename(
                u'%SN - %Sx%0E - %EN - %QN')
            if all([
                    app.SEEDERS_LEECHERS_IN_NOTIFY, result.seeders
                    not in (-1, None), result.leechers not in (-1, None)
            ]):
                notifiers.notify_snatch(
                    u'{0} with {1} seeders and {2} leechers from {3}'.format(
                        notify_message, result.seeders, result.leechers,
                        result.provider.name), is_proper)
            else:
                notifiers.notify_snatch(
                    u'{0} from {1}'.format(notify_message,
                                           result.provider.name), is_proper)

            if app.USE_TRAKT and app.TRAKT_SYNC_WATCHLIST:
                trakt_data.append((curEpObj.season, curEpObj.episode))
                log.info(
                    u'Adding {0} {1} to Trakt watchlist',
                    result.show.name,
                    episode_num(curEpObj.season, curEpObj.episode),
                )

    if trakt_data:
        data_episode = notifiers.trakt_notifier.trakt_episode_data_generate(
            trakt_data)
        if data_episode:
            notifiers.trakt_notifier.update_watchlist(
                result.show, data_episode=data_episode, update=u'add')

    if sql_l:
        main_db_con = db.DBConnection()
        main_db_con.mass_action(sql_l)

    return True
Beispiel #24
0
    def get_quality(self, item, anime=False):
        """Get quality of the result from its name."""
        (title, _) = self._get_title_and_url(item)
        quality = Quality.quality_from_name(title, anime)

        return quality
Beispiel #25
0
def sort_results(results):
    """Sort results based on show specific preferences."""
    wanted_results = []
    if not results:
        log.debug(u'No results to sort.')
        return wanted_results

    sorted_results = sorted(results,
                            key=operator.attrgetter('quality'),
                            reverse=True)
    log.debug(u'Sorting the following results: {0}',
              [x.name for x in sorted_results])

    preferred_words = []
    if app.PREFERRED_WORDS:
        preferred_words = [word.lower() for word in app.PREFERRED_WORDS]
    undesired_words = []
    if app.UNDESIRED_WORDS:
        undesired_words = [word.lower() for word in app.UNDESIRED_WORDS]

    def percentage(percent, whole):
        return (percent * whole) / 100.0

    initial_score = 100.0
    for result in sorted_results:
        score = initial_score

        if wanted_results:
            allowed_qualities, preferred_qualities = result.series.current_qualities
            if Quality.is_higher_quality(wanted_results[0][0].quality,
                                         result.quality, allowed_qualities,
                                         preferred_qualities):
                log.debug(u'Rewarding release {0} (higher quality)',
                          result.name)
                score += percentage(10, score)
                initial_score = score

        if result.proper_tags and (not wanted_results
                                   or wanted_results[0][0].quality
                                   == result.quality):
            log.debug(u'Rewarding release {0} (repack/proper/real/rerip)',
                      result.name)
            # Stop at max. 4 proper tags
            for tag in result.proper_tags[:4]:
                score += percentage(2, score)

        if any(word in result.name.lower() for word in undesired_words):
            log.debug(u'Penalizing release {0} (contains undesired word(s))',
                      result.name)
            score -= percentage(20, score)

        if any(word in result.name.lower() for word in preferred_words):
            log.debug(u'Rewarding release {0} (contains preferred word(s))',
                      result.name)
            score += percentage(20, score)

        wanted_results.append((result, score))
        wanted_results.sort(key=operator.itemgetter(1), reverse=True)

    header = '{0:<6} {1}'.format('Score', 'Release')
    log.debug(
        u'Computed result scores:'
        u'\n{header}'
        u'\n{results}', {
            'header':
            header,
            'results':
            '\n'.join(
                '{score:<6.2f} {name}'.format(score=item[1], name=item[0].name)
                for item in wanted_results)
        })

    return [result[0] for result in wanted_results]
Beispiel #26
0
    def find_search_results(self,
                            series,
                            episodes,
                            search_mode,
                            forced_search=False,
                            download_current_quality=False,
                            manual_search=False,
                            manual_search_type='episode'):
        """
        Search episodes based on param.

        Search the provider using http queries.
        :param series: Series object
        :param episodes: List of Episode objects
        :param search_mode: 'eponly' or 'sponly'
        :param forced_search: Flag if the search was triggered by a forced search
        :param download_current_quality: Flag if we want to include an already downloaded quality in the new search
        :param manual_search: Flag if the search was triggered by a manual search
        :param manual_search_type: How the manual search was started: For example an 'episode' or 'season'

        :return: A dict of search results, ordered by episode number.
        """
        self._check_auth()
        self.series = series

        season_search = (len(episodes) > 1 or manual_search_type
                         == 'season') and search_mode == 'sponly'
        results = []

        for episode in episodes:
            search_strings = []
            if season_search:
                search_strings = self._get_season_search_strings(episode)
            elif search_mode == 'eponly':
                search_strings = self._get_episode_search_strings(episode)

            for search_string in search_strings:
                # Find results from the provider
                items = self.search(search_string,
                                    ep_obj=episode,
                                    manual_search=manual_search)
                for item in items:
                    result = self.get_result(series=series, item=item)
                    if result not in results:
                        result.quality = Quality.quality_from_name(
                            result.name, series.is_anime)
                        results.append(result)

            # In season search, we can't loop in episodes lists as we
            # only need one episode to get the season string
            if search_mode == 'sponly':
                break

        log.debug('Found {0} unique search results', len(results))

        # sort qualities in descending order
        results.sort(key=operator.attrgetter('quality'), reverse=True)

        # Move through each item and parse with NameParser()
        for search_result in results:

            if forced_search:
                search_result.search_type = FORCED_SEARCH
            search_result.download_current_quality = download_current_quality
            search_result.result_wanted = True

            try:
                search_result.parsed_result = NameParser(
                    parse_method=('normal', 'anime')[series.is_anime]).parse(
                        search_result.name)
            except (InvalidNameException, InvalidShowException) as error:
                log.debug(
                    'Error during parsing of release name: {release_name}, with error: {error}',
                    {
                        'release_name': search_result.name,
                        'error': error
                    })
                search_result.add_cache_entry = False
                search_result.result_wanted = False
                continue

            # I don't know why i'm doing this. Maybe remove it later on all together, now i've added the parsed_result
            # to the search_result.
            search_result.series = search_result.parsed_result.series
            search_result.quality = search_result.parsed_result.quality
            search_result.release_group = search_result.parsed_result.release_group
            search_result.version = search_result.parsed_result.version
            search_result.actual_season = search_result.parsed_result.season_number
            search_result.actual_episodes = search_result.parsed_result.episode_numbers

            if not manual_search:
                if not (search_result.series.air_by_date
                        or search_result.series.sports):
                    if search_mode == 'sponly':
                        if search_result.parsed_result.episode_numbers:
                            log.debug(
                                'This is supposed to be a season pack search but the result {0} is not a valid '
                                'season pack, skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue
                        elif not [
                                ep for ep in episodes
                                if search_result.parsed_result.season_number ==
                            (ep.season, ep.scene_season)[ep.series.is_scene]
                        ]:
                            log.debug(
                                'This season result {0} is for a season we are not searching for, '
                                'skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue
                    else:
                        # I'm going to split these up for better readability
                        # Check if at least got a season parsed.
                        if search_result.parsed_result.season_number is None:
                            log.debug(
                                "The result {0} doesn't seem to have a valid season that we are currently trying to "
                                'snatch, skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue

                        # Check if we at least got some episode numbers parsed.
                        if not search_result.parsed_result.episode_numbers:
                            log.debug(
                                "The result {0} doesn't seem to match an episode that we are currently trying to "
                                'snatch, skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue

                        # Compare the episodes and season from the result with what was searched.
                        wanted_ep = False
                        for searched_ep in episodes:
                            if searched_ep.series.is_scene and searched_ep.scene_episode:
                                season = searched_ep.scene_season
                                episode = searched_ep.scene_episode
                            else:
                                season = searched_ep.season
                                episode = searched_ep.episode

                            if (season ==
                                    search_result.parsed_result.season_number
                                    and episode in search_result.parsed_result.
                                    episode_numbers):
                                wanted_ep = True
                                break

                        if not wanted_ep:
                            log.debug(
                                "The result {0} doesn't seem to match an episode that we are currently trying to "
                                'snatch, skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue

                    # We've performed some checks to decided if we want to continue with this result.
                    # If we've hit this, that means this is not an air_by_date and not a sports show. And it seems to be
                    # a valid result. Let's store the parsed season and episode number and continue.
                    search_result.actual_season = search_result.parsed_result.season_number
                    search_result.actual_episodes = search_result.parsed_result.episode_numbers
                else:
                    # air_by_date or sportshow.
                    search_result.same_day_special = False

                    if not search_result.parsed_result.is_air_by_date:
                        log.debug(
                            "This is supposed to be a date search but the result {0} didn't parse as one, "
                            'skipping it', search_result.name)
                        search_result.result_wanted = False
                        continue
                    else:
                        # Use a query against the tv_episodes table, to match the parsed air_date against.
                        air_date = search_result.parsed_result.air_date.toordinal(
                        )
                        db = DBConnection()
                        sql_results = db.select(
                            'SELECT season, episode FROM tv_episodes WHERE indexer = ? AND showid = ? AND airdate = ?',
                            [
                                search_result.series.indexer,
                                search_result.series.series_id, air_date
                            ])

                        if len(sql_results) == 2:
                            if int(sql_results[0]['season']) == 0 and int(
                                    sql_results[1]['season']) != 0:
                                search_result.actual_season = int(
                                    sql_results[1]['season'])
                                search_result.actual_episodes = [
                                    int(sql_results[1]['episode'])
                                ]
                                search_result.same_day_special = True
                            elif int(sql_results[1]['season']) == 0 and int(
                                    sql_results[0]['season']) != 0:
                                search_result.actual_season = int(
                                    sql_results[0]['season'])
                                search_result.actual_episodes = [
                                    int(sql_results[0]['episode'])
                                ]
                                search_result.same_day_special = True
                        elif len(sql_results) != 1:
                            log.warning(
                                "Tried to look up the date for the episode {0} but the database didn't return proper "
                                'results, skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue

                        # @TODO: Need to verify and test this.
                        if search_result.result_wanted and not search_result.same_day_special:
                            search_result.actual_season = int(
                                sql_results[0]['season'])
                            search_result.actual_episodes = [
                                int(sql_results[0]['episode'])
                            ]

        final_results = {}
        cl = []
        # Iterate again over the search results, and see if there is anything we want.
        for search_result in results:

            # Try to cache the item if we want to.
            cache_result = search_result.add_result_to_cache(self.cache)
            if cache_result is not None:
                cl.append(cache_result)

            if not search_result.result_wanted:
                log.debug(
                    "We aren't interested in this result: {0} with url: {1}",
                    search_result.name, search_result.url)
                continue

            log.debug('Found result {0} at {1}', search_result.name,
                      search_result.url)

            search_result.update_search_result()

            if search_result.episode_number == SEASON_RESULT:
                log.debug('Found season pack result {0} at {1}',
                          search_result.name, search_result.url)
            elif search_result.episode_number == MULTI_EP_RESULT:
                log.debug(
                    'Found multi-episode ({0}) result {1} at {2}', ', '.join(
                        map(str, search_result.parsed_result.episode_numbers)),
                    search_result.name, search_result.url)
            else:
                log.debug('Found single episode result {0} at {1}',
                          search_result.name, search_result.url)

            if search_result.episode_number not in final_results:
                final_results[search_result.episode_number] = [search_result]
            else:
                final_results[search_result.episode_number].append(
                    search_result)

        if cl:
            # Access to a protected member of a client class
            db = self.cache._get_db()
            db.mass_action(cl)

        return final_results
Beispiel #27
0
def tvepisode(tvshow, create_tvepisode):
    return create_tvepisode(series=tvshow, season=3, episode=4, indexer=34, file_size=1122334455,
                            name='Episode Title', status=Quality.composite_status(DOWNLOADED, Quality.FULLHDBLURAY),
                            release_group='SuperGroup')
Beispiel #28
0
    def addShowByID(self, showslug=None, show_name=None, which_series=None,
                    indexer_lang=None, root_dir=None, default_status=None,
                    quality_preset=None, any_qualities=None, best_qualities=None,
                    season_folders=None, subtitles=None, full_show_path=None,
                    other_shows=None, skip_show=None, provided_indexer=None,
                    anime=None, scene=None, blacklist=None, whitelist=None,
                    default_status_after=None, configure_show_options=False):
        """
        Add's a new show with provided show options by indexer_id.
        Currently only TVDB and IMDB id's supported.
        """
        identifier = SeriesIdentifier.from_slug(showslug)
        series_id = identifier.id
        indexername = identifier.indexer.slug

        if identifier.indexer.slug != 'tvdb':
            series_id = helpers.get_tvdb_from_id(identifier.id, indexername.upper())
            if not series_id:
                log.info('Unable to find tvdb ID to add {name}', {'name': show_name})
                ui.notifications.error(
                    'Unable to add {0}'.format(show_name),
                    'Could not add {0}. We were unable to locate the tvdb id at this time.'.format(show_name)
                )
                return json_response(
                    result=False,
                    message='Unable to find tvdb ID to add {show}'.format(show=show_name)
                )

        if Series.find_by_identifier(identifier):
            return json_response(
                result=False,
                message='Show already exists'
            )

        # Sanitize the parameter allowed_qualities and preferred_qualities. As these would normally be passed as lists
        if any_qualities:
            any_qualities = any_qualities.split(',')
        else:
            any_qualities = []

        if best_qualities:
            best_qualities = best_qualities.split(',')
        else:
            best_qualities = []

        # If configure_show_options is enabled let's use the provided settings
        configure_show_options = config.checkbox_to_value(configure_show_options)

        if configure_show_options:
            # prepare the inputs for passing along
            scene = config.checkbox_to_value(scene)
            anime = config.checkbox_to_value(anime)
            season_folders = config.checkbox_to_value(season_folders)
            subtitles = config.checkbox_to_value(subtitles)

            if whitelist:
                whitelist = short_group_names(whitelist)
            if blacklist:
                blacklist = short_group_names(blacklist)

            if not any_qualities:
                any_qualities = []

            if not best_qualities or try_int(quality_preset, None):
                best_qualities = []

            if not isinstance(any_qualities, list):
                any_qualities = [any_qualities]

            if not isinstance(best_qualities, list):
                best_qualities = [best_qualities]

            quality = {'allowed': any_qualities, 'preferred': best_qualities}

            location = root_dir

        else:
            default_status = app.STATUS_DEFAULT
            allowed, preferred = Quality.split_quality(int(app.QUALITY_DEFAULT))
            quality = {'allowed': allowed, 'preferred': preferred}
            season_folders = app.SEASON_FOLDERS_DEFAULT
            subtitles = app.SUBTITLES_DEFAULT
            anime = app.ANIME_DEFAULT
            scene = app.SCENE_DEFAULT
            default_status_after = app.STATUS_DEFAULT_AFTER

            if app.ROOT_DIRS:
                root_dirs = app.ROOT_DIRS
                location = root_dirs[int(root_dirs[0]) + 1]
            else:
                location = None

        if not location:
            log.warning('There was an error creating the show, no root directory setting found')
            return json_response(
                result=False,
                message='No root directories set up, please go back and add one.'
            )

        show_name = get_showname_from_indexer(INDEXER_TVDBV2, series_id)
        show_dir = None

        # add the show
        app.show_queue_scheduler.action.addShow(INDEXER_TVDBV2, int(series_id), show_dir, default_status=int(default_status), quality=quality,
                                                season_folders=season_folders, lang=indexer_lang, subtitles=subtitles, anime=anime, scene=scene,
                                                paused=None, blacklist=blacklist, whitelist=whitelist,
                                                default_status_after=int(default_status_after), root_dir=location)

        ui.notifications.message('Show added', 'Adding the specified show {0}'.format(show_name))

        # done adding show
        return json_response(
            message='Adding the specified show {0}'.format(show_name),
            redirect='home'
        )
Beispiel #29
0
    # Then
    assert actual is True


@pytest.mark.parametrize('p', [
    {  # p0 - Invalid combined quality
        'quality': -4,
        'expected': False
    },
    {  # p1 - Valid 'allowed' quality
        'quality': Quality.HDTV,
        'expected': True
    },
    {  # p2 - Valid 'allowed' quality + valid 'preferred' quality
        'quality': Quality.combine_qualities([Quality.HDTV], [Quality.HDWEBDL]),
        'expected': True
    },
    {  # p3 - Valid 'allowed' quality + **invalid** 'preferred' quality
        'quality': Quality.combine_qualities([Quality.HDTV], [-4]),
        'expected': False
    },
])
def test_is_valid_combined_quality(p):
    # Given
    quality = p['quality']
    expected = p['expected']

    # When
    actual = Quality.is_valid_combined_quality(quality)
Beispiel #30
0
def search_for_needed_episodes(scheduler_start_time, force=False):
    """Search providers for needed episodes.

    :param force: run the search even if no episodes are needed
    :param scheduler_start_time: timestamp of the start of the search scheduler
    :return: list of found episodes
    """
    show_list = app.showList
    from_date = datetime.date.fromordinal(1)
    episodes = []

    for cur_show in show_list:
        if cur_show.paused:
            log.debug(
                u'Not checking for needed episodes of {0} because the show is paused',
                cur_show.name,
            )
            continue
        episodes.extend(wanted_episodes(cur_show, from_date))

    if not episodes and not force:
        # nothing wanted so early out, ie: avoid whatever arbitrarily
        # complex thing a provider cache update entails, for example,
        # reading rss feeds
        return []

    providers = enabled_providers(u'daily')
    if not providers:
        log.warning(
            u'No NZB/Torrent providers found or enabled in the application config for daily searches.'
            u' Please check your settings'
        )
        return []

    original_thread_name = threading.currentThread().name
    log.info(u'Using daily search providers')

    for cur_provider in providers:
        threading.currentThread().name = u'{thread} :: [{provider}]'.format(
            thread=original_thread_name, provider=cur_provider.name
        )
        cur_provider.cache.update_cache(scheduler_start_time)

    single_results = {}
    multi_results = []
    for cur_provider in providers:
        threading.currentThread().name = u'{thread} :: [{provider}]'.format(
            thread=original_thread_name, provider=cur_provider.name
        )
        try:
            found_results = cur_provider.cache.find_needed_episodes(episodes)
        except AuthException as error:
            log.error(u'Authentication error: {0}', ex(error))
            continue

        # pick a single result for each episode, respecting existing results
        for episode_no, results in iteritems(found_results):
            if results[0].series.paused:
                log.debug(u'Skipping {0} because the show is paused.', results[0].series.name)
                continue

            # if all results were rejected move on to the next episode
            wanted_results = filter_results(results)
            if not wanted_results:
                log.debug(u'All found results for {0} were rejected.', results[0].series.name)
                continue

            best_result = pick_result(wanted_results)
            # Skip the result if search delay is enabled for the provider.
            if delay_search(best_result):
                continue

            if episode_no in (SEASON_RESULT, MULTI_EP_RESULT):
                multi_results.append(best_result)
            else:
                # if it's already in the list (from another provider) and
                # the newly found quality is no better then skip it
                if episode_no in single_results:
                    allowed_qualities, preferred_qualities = results[0].series.current_qualities
                    if not Quality.is_higher_quality(single_results[episode_no].quality,
                                                     best_result.quality, allowed_qualities,
                                                     preferred_qualities):
                        continue

                single_results[episode_no] = best_result

    threading.currentThread().name = original_thread_name

    return combine_results(multi_results, list(itervalues(single_results)))
Beispiel #31
0
def sort_results(results):
    """Sort results based on show specific preferences."""
    wanted_results = []
    if not results:
        log.debug(u'No results to sort.')
        return wanted_results

    sorted_results = sorted(results, key=operator.attrgetter('quality'), reverse=True)
    log.debug(u'Sorting the following results: {0}', [x.name for x in sorted_results])

    preferred_words = []
    if app.PREFERRED_WORDS:
        preferred_words = [word.lower() for word in app.PREFERRED_WORDS]
    undesired_words = []
    if app.UNDESIRED_WORDS:
        undesired_words = [word.lower() for word in app.UNDESIRED_WORDS]

    def percentage(percent, whole):
        return (percent * whole) / 100.0

    initial_score = 100.0
    for result in sorted_results:
        score = initial_score

        if wanted_results:
            allowed_qualities, preferred_qualities = result.series.current_qualities
            if Quality.is_higher_quality(wanted_results[0][0].quality, result.quality,
                                         allowed_qualities, preferred_qualities):
                log.debug(u'Rewarding release {0} (higher quality)', result.name)
                score += percentage(10, score)
                initial_score = score

        if result.proper_tags and (not wanted_results or
                                   wanted_results[0][0].quality == result.quality):
            log.debug(u'Rewarding release {0} (repack/proper/real/rerip)', result.name)
            # Stop at max. 4 proper tags
            for tag in result.proper_tags[:4]:
                score += percentage(2, score)

        if any(word in result.name.lower() for word in undesired_words):
            log.debug(u'Penalizing release {0} (contains undesired word(s))', result.name)
            score -= percentage(20, score)

        if any(word in result.name.lower() for word in preferred_words):
            log.debug(u'Rewarding release {0} (contains preferred word(s))', result.name)
            score += percentage(20, score)

        wanted_results.append((result, score))
        wanted_results.sort(key=operator.itemgetter(1), reverse=True)

    header = '{0:<6} {1}'.format('Score', 'Release')
    log.debug(
        u'Computed result scores:'
        u'\n{header}'
        u'\n{results}',
        {
            'header': header,
            'results': '\n'.join(
                '{score:<6.2f} {name}'.format(score=item[1], name=item[0].name)
                for item in wanted_results
            )
        }
    )

    return [result[0] for result in wanted_results]
Beispiel #32
0
def get_provider_cache_results(indexer,
                               show_all_results=None,
                               perform_search=None,
                               show=None,
                               season=None,
                               episode=None,
                               manual_search_type=None,
                               **search_show):
    """Check all provider cache tables for search results."""
    down_cur_quality = 0
    show_obj = Show.find(app.showList, int(show))
    preferred_words = show_obj.show_words().preferred_words
    undesired_words = show_obj.show_words().undesired_words
    ignored_words = show_obj.show_words().ignored_words
    required_words = show_obj.show_words().required_words

    main_db_con = db.DBConnection('cache.db')

    provider_results = {
        'last_prov_updates': {},
        'error': {},
        'found_items': []
    }
    original_thread_name = threading.currentThread().name

    sql_total = []
    combined_sql_q = []
    combined_sql_params = []

    for cur_provider in enabled_providers('manualsearch'):
        threading.currentThread().name = '{thread} :: [{provider}]'.format(
            thread=original_thread_name, provider=cur_provider.name)

        # Let's check if this provider table already exists
        table_exists = main_db_con.select(
            b"SELECT name "
            b"FROM sqlite_master "
            b"WHERE type='table'"
            b" AND name=?", [cur_provider.get_id()])
        columns = [
            i[1] for i in main_db_con.select("PRAGMA table_info('{0}')".format(
                cur_provider.get_id()))
        ] if table_exists else []
        minseed = int(cur_provider.minseed) if getattr(cur_provider, 'minseed',
                                                       None) else -1
        minleech = int(cur_provider.minleech) if getattr(
            cur_provider, 'minleech', None) else -1

        # TODO: the implicit sqlite rowid is used, should be replaced with an explicit PK column
        # If table doesn't exist, start a search to create table and new columns seeders, leechers and size
        required_columns = ['seeders', 'leechers', 'size', 'proper_tags']
        if table_exists and all(required_column in columns
                                for required_column in required_columns):
            # The default sql, that's executed for each providers cache table
            common_sql = (
                b"SELECT rowid, ? AS 'provider_type', ? AS 'provider_image',"
                b" ? AS 'provider', ? AS 'provider_id', ? 'provider_minseed',"
                b" ? 'provider_minleech', name, season, episodes, indexerid,"
                b" url, time, proper_tags, quality, release_group, version,"
                b" seeders, leechers, size, time, pubdate "
                b"FROM '{provider_id}' "
                b"WHERE indexerid = ? AND quality > 0 ".format(
                    provider_id=cur_provider.get_id()))

            # Let's start by adding the default parameters, which are used to subsitute the '?'s.
            add_params = [
                cur_provider.provider_type.title(),
                cur_provider.image_name(), cur_provider.name,
                cur_provider.get_id(), minseed, minleech, show
            ]

            if manual_search_type != 'season':
                # If were not looking for all results, meaning don't do the filter on season + ep, add sql
                if not int(show_all_results):
                    # If it's an episode search, pass season and episode.
                    common_sql += " AND season = ? AND episodes LIKE ? "
                    add_params += [season, "%|{0}|%".format(episode)]

            else:
                # If were not looking for all results, meaning don't do the filter on season + ep, add sql
                if not int(show_all_results):
                    list_of_episodes = '{0}{1}'.format(
                        ' episodes LIKE ', ' AND episodes LIKE '.join(
                            ['?' for _ in show_obj.get_all_episodes(season)]))

                    common_sql += " AND season = ? AND (episodes LIKE ? OR {list_of_episodes})".format(
                        list_of_episodes=list_of_episodes)
                    add_params += [season,
                                   '||']  # When the episodes field is empty.
                    add_params += [
                        '%|{episode}|%'.format(episode=ep.episode)
                        for ep in show_obj.get_all_episodes(season)
                    ]

            # Add the created sql, to lists, that are used down below to perform one big UNIONED query
            combined_sql_q.append(common_sql)
            combined_sql_params += add_params

            # Get the last updated cache items timestamp
            last_update = main_db_con.select(
                b"SELECT max(time) AS lastupdate "
                b"FROM '{provider_id}'".format(
                    provider_id=cur_provider.get_id()))
            provider_results['last_prov_updates'][
                cur_provider.get_id()] = last_update[0][
                    'lastupdate'] if last_update[0]['lastupdate'] else 0

    # Check if we have the combined sql strings
    if combined_sql_q:
        sql_prepend = b"SELECT * FROM ("
        sql_append = b") ORDER BY CASE quality WHEN '{quality_unknown}' THEN -1 ELSE CAST(quality AS DECIMAL) END DESC, " \
                     b" proper_tags DESC, seeders DESC".format(quality_unknown=Quality.UNKNOWN)

        # Add all results
        sql_total += main_db_con.select(
            b'{0} {1} {2}'.format(sql_prepend,
                                  ' UNION ALL '.join(combined_sql_q),
                                  sql_append), combined_sql_params)

    # Always start a search when no items found in cache
    if not sql_total or int(perform_search):
        # retrieve the episode object and fail if we can't get one
        ep_obj = get_episode(show, season, episode)
        if isinstance(ep_obj, str):
            provider_results[
                'error'] = 'Something went wrong when starting the manual search for show {0}, \
            and episode: {1}x{2}'.format(show_obj.name, season, episode)

        # make a queue item for it and put it on the queue
        ep_queue_item = ForcedSearchQueueItem(ep_obj.series, [ep_obj],
                                              bool(int(down_cur_quality)),
                                              True, manual_search_type)  # pylint: disable=maybe-no-member

        app.forced_search_queue_scheduler.action.add_item(ep_queue_item)

        # give the CPU a break and some time to start the queue
        time.sleep(cpu_presets[app.CPU_PRESET])
    else:
        cached_results = [dict(row) for row in sql_total]
        for i in cached_results:
            i['quality_name'] = Quality.split_quality(int(i['quality']))
            i['time'] = datetime.fromtimestamp(i['time'])
            i['release_group'] = i['release_group'] or 'None'
            i['provider_img_link'] = 'images/providers/' + i[
                'provider_image'] or 'missing.png'
            i['provider'] = i['provider'] if i[
                'provider_image'] else 'missing provider'
            i['proper_tags'] = i['proper_tags'].replace('|', ', ')
            i['pretty_size'] = pretty_file_size(
                i['size']) if i['size'] > -1 else 'N/A'
            i['seeders'] = i['seeders'] if i['seeders'] >= 0 else '-'
            i['leechers'] = i['leechers'] if i['leechers'] >= 0 else '-'
            i['pubdate'] = sbdatetime.convert_to_setting(
                parser.parse(i['pubdate'])).strftime(
                    app.DATE_PRESET + ' ' +
                    app.TIME_PRESET) if i['pubdate'] else '-'
            release_group = i['release_group']
            if ignored_words and release_group in ignored_words:
                i['rg_highlight'] = 'ignored'
            elif required_words and release_group in required_words:
                i['rg_highlight'] = 'required'
            elif preferred_words and release_group in preferred_words:
                i['rg_highlight'] = 'preferred'
            elif undesired_words and release_group in undesired_words:
                i['rg_highlight'] = 'undesired'
            else:
                i['rg_highlight'] = ''
            if contains_at_least_one_word(i['name'], required_words):
                i['name_highlight'] = 'required'
            elif contains_at_least_one_word(
                    i['name'], ignored_words) or not filter_bad_releases(
                        i['name'], parse=False):
                i['name_highlight'] = 'ignored'
            elif contains_at_least_one_word(i['name'], undesired_words):
                i['name_highlight'] = 'undesired'
            elif contains_at_least_one_word(i['name'], preferred_words):
                i['name_highlight'] = 'preferred'
            else:
                i['name_highlight'] = ''
            i['seed_highlight'] = 'ignored' if i.get(
                'provider_minseed') > i.get('seeders', -1) >= 0 else ''
            i['leech_highlight'] = 'ignored' if i.get(
                'provider_minleech') > i.get('leechers', -1) >= 0 else ''
        provider_results['found_items'] = cached_results

    # Remove provider from thread name before return results
    threading.currentThread().name = original_thread_name

    # Sanitize the last_prov_updates key
    provider_results['last_prov_updates'] = json.dumps(
        provider_results['last_prov_updates'])
    return provider_results
Beispiel #33
0
    def addShowByID(self,
                    indexername=None,
                    seriesid=None,
                    show_name=None,
                    which_series=None,
                    indexer_lang=None,
                    root_dir=None,
                    default_status=None,
                    quality_preset=None,
                    any_qualities=None,
                    best_qualities=None,
                    season_folders=None,
                    subtitles=None,
                    full_show_path=None,
                    other_shows=None,
                    skip_show=None,
                    provided_indexer=None,
                    anime=None,
                    scene=None,
                    blacklist=None,
                    whitelist=None,
                    default_status_after=None,
                    configure_show_options=False):
        """
        Add's a new show with provided show options by indexer_id.
        Currently only TVDB and IMDB id's supported.
        """
        series_id = seriesid
        if indexername != 'tvdb':
            series_id = helpers.get_tvdb_from_id(seriesid, indexername.upper())
            if not series_id:
                logger.log(u'Unable to to find tvdb ID to add %s' % show_name)
                ui.notifications.error(
                    'Unable to add %s' % show_name,
                    'Could not add %s.  We were unable to locate the tvdb id at this time.'
                    % show_name)
                return

        if Show.find_by_id(app.showList, INDEXER_TVDBV2, series_id):
            return

        # Sanitize the parameter allowed_qualities and preferred_qualities. As these would normally be passed as lists
        if any_qualities:
            any_qualities = any_qualities.split(',')
        else:
            any_qualities = []

        if best_qualities:
            best_qualities = best_qualities.split(',')
        else:
            best_qualities = []

        # If configure_show_options is enabled let's use the provided settings
        configure_show_options = config.checkbox_to_value(
            configure_show_options)

        if configure_show_options:
            # prepare the inputs for passing along
            scene = config.checkbox_to_value(scene)
            anime = config.checkbox_to_value(anime)
            season_folders = config.checkbox_to_value(season_folders)
            subtitles = config.checkbox_to_value(subtitles)

            if whitelist:
                whitelist = short_group_names(whitelist)
            if blacklist:
                blacklist = short_group_names(blacklist)

            if not any_qualities:
                any_qualities = []

            if not best_qualities or try_int(quality_preset, None):
                best_qualities = []

            if not isinstance(any_qualities, list):
                any_qualities = [any_qualities]

            if not isinstance(best_qualities, list):
                best_qualities = [best_qualities]

            quality = Quality.combine_qualities(
                [int(q) for q in any_qualities],
                [int(q) for q in best_qualities])

            location = root_dir

        else:
            default_status = app.STATUS_DEFAULT
            quality = app.QUALITY_DEFAULT
            season_folders = app.SEASON_FOLDERS_DEFAULT
            subtitles = app.SUBTITLES_DEFAULT
            anime = app.ANIME_DEFAULT
            scene = app.SCENE_DEFAULT
            default_status_after = app.STATUS_DEFAULT_AFTER

            if app.ROOT_DIRS:
                root_dirs = app.ROOT_DIRS
                location = root_dirs[int(root_dirs[0]) + 1]
            else:
                location = None

        if not location:
            logger.log(
                u'There was an error creating the show, '
                u'no root directory setting found', logger.WARNING)
            return 'No root directories setup, please go back and add one.'

        show_name = get_showname_from_indexer(INDEXER_TVDBV2, series_id)
        show_dir = None

        # add the show
        app.show_queue_scheduler.action.addShow(INDEXER_TVDBV2,
                                                int(series_id),
                                                show_dir,
                                                int(default_status),
                                                quality,
                                                season_folders,
                                                indexer_lang,
                                                subtitles,
                                                anime,
                                                scene,
                                                None,
                                                blacklist,
                                                whitelist,
                                                int(default_status_after),
                                                root_dir=location)

        ui.notifications.message(
            'Show added', 'Adding the specified show {0}'.format(show_name))

        # done adding show
        return self.redirect('/home/')
Beispiel #34
0
def search_for_needed_episodes(scheduler_start_time, force=False):
    """Search providers for needed episodes.

    :param force: run the search even if no episodes are needed
    :param scheduler_start_time: timestamp of the start of the search scheduler
    :return: list of found episodes
    """
    show_list = app.showList
    from_date = datetime.date.fromordinal(1)
    episodes = []

    for cur_show in show_list:
        if cur_show.paused:
            log.debug(
                u'Not checking for needed episodes of {0} because the show is paused',
                cur_show.name,
            )
            continue
        episodes.extend(wanted_episodes(cur_show, from_date))

    if not episodes and not force:
        # nothing wanted so early out, ie: avoid whatever arbitrarily
        # complex thing a provider cache update entails, for example,
        # reading rss feeds
        return []

    providers = enabled_providers(u'daily')
    if not providers:
        log.warning(
            u'No NZB/Torrent providers found or enabled in the application config for daily searches.'
            u' Please check your settings')
        return []

    original_thread_name = threading.currentThread().name
    log.info(u'Using daily search providers')

    for cur_provider in providers:
        threading.currentThread().name = u'{thread} :: [{provider}]'.format(
            thread=original_thread_name, provider=cur_provider.name)
        cur_provider.cache.update_cache(scheduler_start_time)

    single_results = {}
    multi_results = []
    for cur_provider in providers:
        threading.currentThread().name = u'{thread} :: [{provider}]'.format(
            thread=original_thread_name, provider=cur_provider.name)
        try:
            found_results = cur_provider.cache.find_needed_episodes(episodes)
        except AuthException as error:
            log.error(u'Authentication error: {0}', ex(error))
            continue

        # pick a single result for each episode, respecting existing results
        for episode_no, results in iteritems(found_results):
            if results[0].series.paused:
                log.debug(u'Skipping {0} because the show is paused.',
                          results[0].series.name)
                continue

            # if all results were rejected move on to the next episode
            wanted_results = filter_results(results)
            if not wanted_results:
                log.debug(u'All found results for {0} were rejected.',
                          results[0].series.name)
                continue

            best_result = pick_result(wanted_results)
            # Skip the result if search delay is enabled for the provider.
            if delay_search(best_result):
                continue

            if episode_no in (SEASON_RESULT, MULTI_EP_RESULT):
                multi_results.append(best_result)
            else:
                # if it's already in the list (from another provider) and
                # the newly found quality is no better then skip it
                if episode_no in single_results:
                    allowed_qualities, preferred_qualities = results[
                        0].series.current_qualities
                    if not Quality.is_higher_quality(
                            single_results[episode_no].quality,
                            best_result.quality, allowed_qualities,
                            preferred_qualities):
                        continue

                single_results[episode_no] = best_result

    threading.currentThread().name = original_thread_name

    return combine_results(multi_results, list(itervalues(single_results)))
    """A test `Series` object that does not need DB access."""
    def __init__(self, indexer, indexer_id, lang, quality):
        """Initialize the object."""
        super(TestTVShow, self).__init__(indexer, indexer_id, lang, quality)

    def _load_from_db(self):
        """Override Series._load_from_db to avoid DB access during testing."""
        pass


@pytest.mark.parametrize(
    'p',
    [
        {  # p0: Downloaded a quality not in quality system : yes
            'status':
            Quality.composite_status(DOWNLOADED, Quality.SDTV),
            'show_obj':
            TestTVShow(
                indexer=1,
                indexer_id=1,
                lang='',
                quality=Quality.combine_qualities(
                    [Quality.HDTV],  # Allowed Qualities
                    [Quality.HDWEBDL])),  # Preferred Qualities
            'manually_searched':
            False,
            'expected':
            True
        },
        {  # p1: Current status is SKIPPED: no
            'status':
Beispiel #36
0
             'quality': Quality.FULLHDTV
         },
         {  # 2
             'name': 'Show.Name.S03E04.iNTERNAL.1080p.WEB-DL.x264-RlsGrp',
             'quality': Quality.FULLHDWEBDL
         },
     ]
 },
 {  # p4 - preferred lower quality
     'config': {
         'PREFERRED_WORDS': [],
         'UNDESIRED_WORDS': [],
     },
     'series': {
         'quality': Quality.combine_qualities(
             [Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY],
             [Quality.HDTV]
         ),
     },
     'expected': 1,  # Index of the expected result
     'results': [
         {  # 0
             'name': 'Show.Name.S03E04.1080p.WEB-DL.x264-RlsGrp',
             'quality': Quality.FULLHDWEBDL
         },
         {  # 1
             'name': 'Show.Name.S03E04.720p.HDTV.x264-RlsGrp',
             'quality': Quality.HDTV
         },
         {  # 2
             'name': 'Show.Name.S03E04.1080p.HDTV.x264-RlsGrp',
             'quality': Quality.FULLHDTV
Beispiel #37
0
    def get_quality(self, item, anime=False):
        """Get scene quality of the result."""
        (title, _) = self._get_title_and_url(item)
        quality = Quality.scene_quality(title, anime)

        return quality
Beispiel #38
0
    def _get_proper_results(self):  # pylint: disable=too-many-locals, too-many-branches, too-many-statements
        """Retrieve a list of recently aired episodes, and search for these episodes in the different providers."""
        propers = {}

        # For each provider get the list of propers
        original_thread_name = threading.currentThread().name
        providers = enabled_providers('backlog')

        search_date = datetime.datetime.today() - datetime.timedelta(
            days=app.PROPERS_SEARCH_DAYS)
        main_db_con = db.DBConnection()
        if not app.POSTPONE_IF_NO_SUBS:
            # Get the recently aired (last 2 days) shows from DB
            search_q_params = ','.join('?' for _ in Quality.DOWNLOADED)
            recently_aired = main_db_con.select(
                b'SELECT showid, season, episode, status, airdate'
                b' FROM tv_episodes'
                b' WHERE airdate >= ?'
                b' AND status IN ({0})'.format(search_q_params),
                [search_date.toordinal()] + Quality.DOWNLOADED)
        else:
            # Get recently subtitled episodes (last 2 days) from DB
            # Episode status becomes downloaded only after found subtitles
            last_subtitled = search_date.strftime(History.date_format)
            recently_aired = main_db_con.select(
                b'SELECT showid, season, episode FROM history '
                b"WHERE date >= ? AND action LIKE '%10'", [last_subtitled])

        if not recently_aired:
            log.info('No recently aired new episodes, nothing to search for')
            return []

        # Loop through the providers, and search for releases
        for cur_provider in providers:
            threading.currentThread().name = '{thread} :: [{provider}]'.format(
                thread=original_thread_name, provider=cur_provider.name)

            log.info('Searching for any new PROPER releases from {provider}',
                     {'provider': cur_provider.name})

            try:
                cur_propers = cur_provider.find_propers(recently_aired)
            except AuthException as e:
                log.debug('Authentication error: {error}', {'error': ex(e)})
                continue

            # if they haven't been added by a different provider than add the proper to the list
            for proper in cur_propers:
                name = self._sanitize_name(proper.name)
                if name not in propers:
                    log.debug('Found new possible proper result: {name}',
                              {'name': proper.name})
                    propers[name] = proper

        threading.currentThread().name = original_thread_name

        # take the list of unique propers and get it sorted by
        sorted_propers = sorted(propers.values(),
                                key=operator.attrgetter('date'),
                                reverse=True)
        final_propers = []

        # Keep only items from last PROPER_SEARCH_DAYS setting in processed propers:
        latest_proper = datetime.datetime.now() - datetime.timedelta(
            days=app.PROPERS_SEARCH_DAYS)
        self.processed_propers = [
            p for p in self.processed_propers if p.get('date') >= latest_proper
        ]

        # Get proper names from processed propers
        processed_propers_names = [
            proper.get('name') for proper in self.processed_propers
            if proper.get('name')
        ]

        for cur_proper in sorted_propers:

            if not self.ignore_processed_propers and cur_proper.name in processed_propers_names:
                log.debug(u'Proper already processed. Skipping: {proper_name}',
                          {'proper_name': cur_proper.name})
                continue

            try:
                cur_proper.parse_result = NameParser().parse(cur_proper.name)
            except (InvalidNameException, InvalidShowException) as error:
                log.debug('{error}', {'error': error})
                continue

            if not cur_proper.parse_result.proper_tags:
                log.info('Skipping non-proper: {name}',
                         {'name': cur_proper.name})
                continue

            log.debug(
                'Proper tags for {proper}: {tags}', {
                    'proper': cur_proper.name,
                    'tags': cur_proper.parse_result.proper_tags
                })

            if not cur_proper.parse_result.series_name:
                log.debug('Ignoring invalid show: {name}',
                          {'name': cur_proper.name})
                if cur_proper.name not in processed_propers_names:
                    self.processed_propers.append({
                        'name': cur_proper.name,
                        'date': cur_proper.date
                    })
                continue

            if not cur_proper.parse_result.episode_numbers:
                log.debug('Ignoring full season instead of episode: {name}',
                          {'name': cur_proper.name})
                if cur_proper.name not in processed_propers_names:
                    self.processed_propers.append({
                        'name': cur_proper.name,
                        'date': cur_proper.date
                    })
                continue

            log.debug(
                'Successful match! Matched {original_name} to show {new_name}',
                {
                    'original_name': cur_proper.parse_result.original_name,
                    'new_name': cur_proper.parse_result.show.name
                })

            # Map the indexerid in the db to the show's indexerid
            cur_proper.indexerid = cur_proper.parse_result.show.indexerid

            # Map the indexer in the db to the show's indexer
            cur_proper.indexer = cur_proper.parse_result.show.indexer

            # Map our Proper instance
            cur_proper.show = cur_proper.parse_result.show
            cur_proper.actual_season = cur_proper.parse_result.season_number \
                if cur_proper.parse_result.season_number is not None else 1
            cur_proper.actual_episodes = cur_proper.parse_result.episode_numbers
            cur_proper.release_group = cur_proper.parse_result.release_group
            cur_proper.version = cur_proper.parse_result.version
            cur_proper.quality = cur_proper.parse_result.quality
            cur_proper.content = None
            cur_proper.proper_tags = cur_proper.parse_result.proper_tags

            # filter release, in this case, it's just a quality gate. As we only send one result.
            best_result = pick_best_result(cur_proper)

            if not best_result:
                log.info('Rejected proper: {name}', {'name': cur_proper.name})
                if cur_proper.name not in processed_propers_names:
                    self.processed_propers.append({
                        'name': cur_proper.name,
                        'date': cur_proper.date
                    })
                continue

            # only get anime proper if it has release group and version
            if best_result.show.is_anime:
                if not best_result.release_group and best_result.version == -1:
                    log.info(
                        'Ignoring proper without release group and version: {name}',
                        {'name': best_result.name})
                    if cur_proper.name not in processed_propers_names:
                        self.processed_propers.append({
                            'name': cur_proper.name,
                            'date': cur_proper.date
                        })
                    continue

            # check if we have the episode as DOWNLOADED
            main_db_con = db.DBConnection()
            sql_results = main_db_con.select(
                b"SELECT status, release_name FROM tv_episodes WHERE "
                b"showid = ? AND season = ? AND episode = ? AND status LIKE '%04'",
                [
                    best_result.indexerid, best_result.actual_season,
                    best_result.actual_episodes[0]
                ])
            if not sql_results:
                log.info(
                    "Ignoring proper because this episode doesn't have 'DOWNLOADED' status: {name}",
                    {'name': best_result.name})
                continue

            # only keep the proper if we have already downloaded an episode with the same quality
            _, old_quality = Quality.split_composite_status(
                int(sql_results[0][b'status']))
            if old_quality != best_result.quality:
                log.info(
                    'Ignoring proper because quality is different: {name}',
                    {'name': best_result.name})
                if cur_proper.name not in processed_propers_names:
                    self.processed_propers.append({
                        'name': cur_proper.name,
                        'date': cur_proper.date
                    })
                continue

            # only keep the proper if we have already downloaded an episode with the same codec
            release_name = sql_results[0][b'release_name']
            if release_name:
                current_codec = NameParser()._parse_string(
                    release_name).video_codec
                # Ignore proper if codec differs from downloaded release codec
                if all([
                        current_codec, best_result.parse_result.video_codec,
                        best_result.parse_result.video_codec != current_codec
                ]):
                    log.info(
                        'Ignoring proper because codec is different: {name}',
                        {'name': best_result.name})
                    if best_result.name not in processed_propers_names:
                        self.processed_propers.append({
                            'name': best_result.name,
                            'date': best_result.date
                        })
                    continue
            else:
                log.debug(
                    "Coudn't find a release name in database. Skipping codec comparison for: {name}",
                    {'name': best_result.name})

            # check if we actually want this proper (if it's the right release group and a higher version)
            if best_result.show.is_anime:
                main_db_con = db.DBConnection()
                sql_results = main_db_con.select(
                    b'SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
                    [
                        best_result.indexerid, best_result.actual_season,
                        best_result.actual_episodes[0]
                    ])

                old_version = int(sql_results[0][b'version'])
                old_release_group = (sql_results[0][b'release_group'])

                if -1 < old_version < best_result.version:
                    log.info(
                        'Found new anime version {new} to replace existing version {old}: {name}',
                        {
                            'old': old_version,
                            'new': best_result.version,
                            'name': best_result.name
                        })
                else:
                    log.info(
                        'Ignoring proper with the same or lower version: {name}',
                        {'name': best_result.name})
                    if cur_proper.name not in processed_propers_names:
                        self.processed_propers.append({
                            'name': best_result.name,
                            'date': best_result.date
                        })
                    continue

                if old_release_group != best_result.release_group:
                    log.info(
                        'Ignoring proper from release group {new} instead of current group {old}',
                        {
                            'new': best_result.release_group,
                            'old': old_release_group
                        })
                    if best_result.name not in processed_propers_names:
                        self.processed_propers.append({
                            'name': best_result.name,
                            'date': best_result.date
                        })
                    continue

            # if the show is in our list and there hasn't been a proper already added for that particular episode
            # then add it to our list of propers
            if best_result.indexerid != -1 and (
                    best_result.indexerid, best_result.actual_season,
                    best_result.actual_episodes[0]) not in map(
                        operator.attrgetter('indexerid', 'actual_season',
                                            'actual_episode'), final_propers):
                log.info('Found a desired proper: {name}',
                         {'name': best_result.name})
                final_propers.append(best_result)

            if best_result.name not in processed_propers_names:
                self.processed_propers.append({
                    'name': best_result.name,
                    'date': best_result.date
                })

        return final_propers
Beispiel #39
0
    def mass_edit_show(
        self, show_obj, location=None, allowed_qualities=None, preferred_qualities=None,
        season_folders=None, paused=None, air_by_date=None, sports=None, dvd_order=None, subtitles=None,
        anime=None, scene=None, default_ep_status=None
    ):
        """A variation of the original `editShow`, where `directCall` is always true."""
        allowed_qualities = allowed_qualities or []
        preferred_qualities = preferred_qualities or []

        errors = 0

        do_update_scene_numbering = not (scene == show_obj.scene and anime == show_obj.anime)

        if not isinstance(allowed_qualities, list):
            allowed_qualities = [allowed_qualities]

        if not isinstance(preferred_qualities, list):
            preferred_qualities = [preferred_qualities]

        with show_obj.lock:
            new_quality = Quality.combine_qualities([int(q) for q in allowed_qualities],
                                                    [int(q) for q in preferred_qualities])
            show_obj.quality = new_quality

            # reversed for now
            if bool(show_obj.season_folders) != bool(season_folders):
                show_obj.season_folders = season_folders
                try:
                    app.show_queue_scheduler.action.refreshShow(show_obj)
                except CantRefreshShowException as error:
                    errors += 1
                    log.warning("Unable to refresh show '{show}': {error}", {
                        'show': show_obj.name, 'error': error
                    })

            # Check if we should erase parsed cached results for that show
            do_erase_parsed_cache = False
            for item in [('scene', scene), ('anime', anime), ('sports', sports),
                         ('air_by_date', air_by_date), ('dvd_order', dvd_order)]:
                if getattr(show_obj, item[0]) != item[1]:
                    do_erase_parsed_cache = True
                    # Break if at least one setting was changed
                    break

            show_obj.paused = paused
            show_obj.scene = scene
            show_obj.anime = anime
            show_obj.sports = sports
            show_obj.subtitles = subtitles
            show_obj.air_by_date = air_by_date
            show_obj.default_ep_status = int(default_ep_status)
            show_obj.dvd_order = dvd_order

            # if we change location clear the db of episodes, change it, write to db, and rescan
            old_location = path.normpath(show_obj._location)
            new_location = path.normpath(location)
            if old_location != new_location:
                changed_location = True
                log.info('Changing show location to: {new}', {'new': new_location})
                if not path.isdir(new_location):
                    if app.CREATE_MISSING_SHOW_DIRS:
                        log.info("Show directory doesn't exist, creating it")
                        try:
                            mkdir(new_location)
                        except OSError as error:
                            errors += 1
                            changed_location = False
                            log.warning("Unable to create the show directory '{location}'. Error: {msg}", {
                                        'location': new_location, 'msg': error})
                        else:
                            log.info('New show directory created')
                            helpers.chmod_as_parent(new_location)
                    else:
                        changed_location = False
                        log.warning("New location '{location}' does not exist. "
                                    "Enable setting 'Create missing show dirs'", {'location': location})

                # Save new location to DB only if we changed it
                if changed_location:
                    show_obj.location = new_location

                if changed_location and path.isdir(new_location):
                    try:
                        app.show_queue_scheduler.action.refreshShow(show_obj)
                    except CantRefreshShowException as error:
                        errors += 1
                        log.warning("Unable to refresh show '{show}'. Error: {error}", {
                                    'show': show_obj.name, 'error': error})

            # Save all settings changed while in show_obj.lock
            show_obj.save_to_db()

        if do_update_scene_numbering or do_erase_parsed_cache:
            try:
                xem_refresh(show_obj)
            except CantUpdateShowException as error:
                errors += 1
                log.warning("Unable to update scene numbering for show '{show}': {error}",
                            {'show': show_obj.name, 'error': error})

            # Must erase cached DB results when toggling scene numbering
            show_obj.erase_provider_cache()

            # Erase parsed cached names as we are changing scene numbering
            show_obj.flush_episodes()
            show_obj.erase_cached_parse()

            # Need to refresh show as we updated scene numbering or changed show format
            try:
                app.show_queue_scheduler.action.refreshShow(show_obj)
            except CantRefreshShowException as error:
                errors += 1
                log.warning(
                    "Unable to refresh show '{show}'. Please manually trigger a full show refresh. "
                    'Error: {error!r}'.format(show=show_obj.name, error=error),
                    {'show': show_obj.name, 'error': error}
                )

        return errors
Beispiel #40
0
    def addShowByID(self, indexername=None, seriesid=None, show_name=None, which_series=None,
                    indexer_lang=None, root_dir=None, default_status=None,
                    quality_preset=None, any_qualities=None, best_qualities=None,
                    season_folders=None, subtitles=None, full_show_path=None,
                    other_shows=None, skip_show=None, provided_indexer=None,
                    anime=None, scene=None, blacklist=None, whitelist=None,
                    default_status_after=None, configure_show_options=False):
        """
        Add's a new show with provided show options by indexer_id.
        Currently only TVDB and IMDB id's supported.
        """
        series_id = seriesid
        if indexername != 'tvdb':
            series_id = helpers.get_tvdb_from_id(seriesid, indexername.upper())
            if not series_id:
                log.info('Unable to find tvdb ID to add {name}', {'name': show_name})
                ui.notifications.error(
                    'Unable to add {0}'.format(show_name),
                    'Could not add {0}. We were unable to locate the tvdb id at this time.'.format(show_name)
                )
                return json_response(
                    result=False,
                    message='Unable to find tvdb ID to add {show}'.format(show=show_name)
                )

        if Show.find_by_id(app.showList, INDEXER_TVDBV2, series_id):
            return json_response(
                result=False,
                message='Show already exists'
            )

        # Sanitize the parameter allowed_qualities and preferred_qualities. As these would normally be passed as lists
        if any_qualities:
            any_qualities = any_qualities.split(',')
        else:
            any_qualities = []

        if best_qualities:
            best_qualities = best_qualities.split(',')
        else:
            best_qualities = []

        # If configure_show_options is enabled let's use the provided settings
        configure_show_options = config.checkbox_to_value(configure_show_options)

        if configure_show_options:
            # prepare the inputs for passing along
            scene = config.checkbox_to_value(scene)
            anime = config.checkbox_to_value(anime)
            season_folders = config.checkbox_to_value(season_folders)
            subtitles = config.checkbox_to_value(subtitles)

            if whitelist:
                whitelist = short_group_names(whitelist)
            if blacklist:
                blacklist = short_group_names(blacklist)

            if not any_qualities:
                any_qualities = []

            if not best_qualities or try_int(quality_preset, None):
                best_qualities = []

            if not isinstance(any_qualities, list):
                any_qualities = [any_qualities]

            if not isinstance(best_qualities, list):
                best_qualities = [best_qualities]

            quality = Quality.combine_qualities([int(q) for q in any_qualities], [int(q) for q in best_qualities])

            location = root_dir

        else:
            default_status = app.STATUS_DEFAULT
            quality = app.QUALITY_DEFAULT
            season_folders = app.SEASON_FOLDERS_DEFAULT
            subtitles = app.SUBTITLES_DEFAULT
            anime = app.ANIME_DEFAULT
            scene = app.SCENE_DEFAULT
            default_status_after = app.STATUS_DEFAULT_AFTER

            if app.ROOT_DIRS:
                root_dirs = app.ROOT_DIRS
                location = root_dirs[int(root_dirs[0]) + 1]
            else:
                location = None

        if not location:
            log.warning('There was an error creating the show, no root directory setting found')
            return json_response(
                result=False,
                message='No root directories set up, please go back and add one.'
            )

        show_name = get_showname_from_indexer(INDEXER_TVDBV2, series_id)
        show_dir = None

        # add the show
        app.show_queue_scheduler.action.addShow(INDEXER_TVDBV2, int(series_id), show_dir, int(default_status), quality,
                                                season_folders, indexer_lang, subtitles, anime, scene, None, blacklist,
                                                whitelist, int(default_status_after), root_dir=location)

        ui.notifications.message('Show added', 'Adding the specified show {0}'.format(show_name))

        # done adding show
        return json_response(
            message='Adding the specified show {0}'.format(show_name),
            redirect='home'
        )
Beispiel #41
0
    def post(self):
        """Perform a mass update action."""
        required_options = (
            'paused', 'defaultEpisodeStatus', 'anime', 'sports', 'scene',
            'airByDate', 'seasonFolders', 'subtitles', 'qualities'
        )
        data = json_decode(self.request.body)
        shows = data.get('shows', [])
        options = data.get('options')
        errors = 0

        if not options:
            return self._bad_request('Options missing')

        missing_options = []
        for req_option in required_options:
            if req_option not in options:
                missing_options.append(req_option)

        if missing_options:
            return self._bad_request(f"Missing options: {', '.join(missing_options)}")

        paused = options.get('paused')
        default_ep_status = options.get('defaultEpisodeStatus')
        if isinstance(default_ep_status, str):
            default_ep_status = {v: k for k, v in statusStrings.items()}.get(default_ep_status)
        anime = options.get('anime')
        sports = options.get('sports')
        scene = options.get('scene')
        air_by_date = options.get('airByDate')
        dvd_order = options.get('dvdOrder')
        season_folders = options.get('seasonFolders')
        subtitles = options.get('subtitles')
        qualities = options.get('qualities')

        for show_slug in shows:
            identifier = SeriesIdentifier.from_slug(show_slug)
            show_obj = Series.find_by_identifier(identifier)

            if not show_obj:
                continue

            cur_root_dir = path.dirname(show_obj._location)
            cur_show_dir = path.basename(show_obj._location)
            for root_dir in options.get('rootDirs'):
                if cur_root_dir != root_dir['old']:
                    continue

                if root_dir['old'] != root_dir['new']:
                    new_show_dir = path.join(root_dir['new'], cur_show_dir)
                    log.info('For show {show_name} changing dir from {old_location} to {new_location}', {
                             'show_name': show_obj.name, 'old_location': show_obj._location, 'new_location': new_show_dir})
                else:
                    new_show_dir = show_obj._location

            new_paused = show_obj.paused if paused is None else paused
            new_default_ep_status = show_obj.default_ep_status if default_ep_status is None else default_ep_status
            new_anime = show_obj.anime if anime is None else anime
            new_sports = show_obj.sports if sports is None else sports
            new_scene = show_obj.scene if scene is None else scene
            new_air_by_date = show_obj.air_by_date if air_by_date is None else air_by_date
            new_dvd_order = show_obj.dvd_order if dvd_order is None else dvd_order
            new_season_folders = show_obj.season_folders if season_folders is None else season_folders
            new_subtitles = show_obj.subtitles if subtitles is None else subtitles

            # If both are false (two empty arrays), use the shows current value.
            if not qualities['allowed'] and not qualities['preferred']:
                new_quality_allowed, new_quality_preferred = show_obj.current_qualities
            else:
                new_quality_allowed, new_quality_preferred = qualities['allowed'], qualities['preferred']

            # If user set quality_preset remove all preferred_qualities
            if Quality.combine_qualities(new_quality_allowed, new_quality_preferred) in qualityPresets:
                new_quality_preferred = []

            errors += self.mass_edit_show(
                show_obj, location=new_show_dir,
                allowed_qualities=new_quality_allowed, preferred_qualities=new_quality_preferred,
                season_folders=new_season_folders, paused=new_paused, air_by_date=new_air_by_date, sports=new_sports,
                dvd_order=new_dvd_order, subtitles=new_subtitles, anime=new_anime, scene=new_scene,
                default_ep_status=new_default_ep_status,
            )

        return self._created(data={'errors': errors})
Beispiel #42
0
@pytest.mark.parametrize(
    'p',
    [
        {  # p0: Downloaded a quality not in quality system : yes
            'status':
            DOWNLOADED,
            'quality':
            Quality.SDTV,
            'show_obj':
            TestTVShow(
                indexer=1,
                indexer_id=1,
                lang='',
                quality=Quality.combine_qualities(
                    [Quality.HDTV],  # Allowed Qualities
                    [Quality.HDWEBDL])),  # Preferred Qualities
            'manually_searched':
            False,
            'expected':
            True
        },
        {  # p1: Current status is SKIPPED: no
            'status':
            SKIPPED,
            'quality':
            Quality.NA,
            'show_obj':
            TestTVShow(
                indexer=1,
                indexer_id=1,
Beispiel #43
0
    def get_quality(self, item, anime=False):
        """Get quality of the result from its name."""
        (title, _) = self._get_title_and_url(item)
        quality = Quality.quality_from_name(title, anime)

        return quality
Beispiel #44
0
def pick_best_result(results):  # pylint: disable=too-many-branches
    """
    Find the best result out of a list of search results for a show.

    :param results: list of result objects
    :return: best result object
    """
    results = results if isinstance(results, list) else [results]

    log.debug(u'Picking the best result out of {0}', [x.name for x in results])

    best_result = None

    # find the best result for the current episode
    for cur_result in results:
        assert cur_result.series, 'Every SearchResult object should have a series object available at this point.'

        # Every SearchResult object should have a show attribute available at this point.
        series_obj = cur_result.series

        # build the black and white list
        if series_obj.is_anime:
            if not series_obj.release_groups.is_valid(cur_result):
                continue

        log.info(u'Quality of {0} is {1}', cur_result.name,
                 Quality.qualityStrings[cur_result.quality])

        allowed_qualities, preferred_qualities = series_obj.current_qualities

        if cur_result.quality not in allowed_qualities + preferred_qualities:
            log.debug(u'{0} is an unwanted quality, rejecting it',
                      cur_result.name)
            continue

        wanted_ep = True

        if cur_result.actual_episodes:
            wanted_ep = False
            for episode in cur_result.actual_episodes:
                if series_obj.want_episode(cur_result.actual_season,
                                           episode,
                                           cur_result.quality,
                                           cur_result.forced_search,
                                           cur_result.download_current_quality,
                                           search_type=cur_result.search_type):
                    wanted_ep = True

        if not wanted_ep:
            continue

        # If doesnt have min seeders OR min leechers then discard it
        if cur_result.seeders not in (-1, None) and cur_result.leechers not in (-1, None) \
            and hasattr(cur_result.provider, u'minseed') and hasattr(cur_result.provider, u'minleech') \
            and (int(cur_result.seeders) < int(cur_result.provider.minseed) or
                 int(cur_result.leechers) < int(cur_result.provider.minleech)):
            log.info(
                u'Discarding torrent because it does not meet the minimum provider setting '
                u'S:{0} L:{1}. Result has S:{2} L:{3}',
                cur_result.provider.minseed,
                cur_result.provider.minleech,
                cur_result.seeders,
                cur_result.leechers,
            )
            continue

        ignored_words = series_obj.show_words().ignored_words
        required_words = series_obj.show_words().required_words
        found_ignored_word = naming.contains_at_least_one_word(
            cur_result.name, ignored_words)
        found_required_word = naming.contains_at_least_one_word(
            cur_result.name, required_words)

        if ignored_words and found_ignored_word:
            log.info(u'Ignoring {0} based on ignored words filter: {1}',
                     cur_result.name, found_ignored_word)
            continue

        if required_words and not found_required_word:
            log.info(u'Ignoring {0} based on required words filter: {1}',
                     cur_result.name, required_words)
            continue

        if not naming.filter_bad_releases(cur_result.name, parse=False):
            continue

        if hasattr(cur_result, u'size'):
            if app.USE_FAILED_DOWNLOADS and failed_history.has_failed(
                    cur_result.name, cur_result.size,
                    cur_result.provider.name):
                log.info(u'{0} has previously failed, rejecting it',
                         cur_result.name)
                continue

        preferred_words = []
        if app.PREFERRED_WORDS:
            preferred_words = [_.lower() for _ in app.PREFERRED_WORDS]
        undesired_words = []
        if app.UNDESIRED_WORDS:
            undesired_words = [_.lower() for _ in app.UNDESIRED_WORDS]

        if not best_result:
            best_result = cur_result
        if Quality.is_higher_quality(best_result.quality, cur_result.quality,
                                     allowed_qualities, preferred_qualities):
            best_result = cur_result
        elif best_result.quality == cur_result.quality:
            if any(ext in cur_result.name.lower() for ext in preferred_words):
                log.info(u'Preferring {0} (preferred words)', cur_result.name)
                best_result = cur_result
            if cur_result.proper_tags:
                log.info(
                    u'Preferring {0} (repack/proper/real/rerip over nuked)',
                    cur_result.name)
                best_result = cur_result
            if any(ext in best_result.name.lower()
                   for ext in undesired_words) and not any(
                       ext in cur_result.name.lower()
                       for ext in undesired_words):
                log.info(u'Unwanted release {0} (contains undesired word(s))',
                         cur_result.name)
                best_result = cur_result

    if best_result:
        log.debug(u'Picked {0} as the best', best_result.name)
    else:
        log.debug(u'No result picked.')

    return best_result
Beispiel #45
0
    def __init__(self, indexer, indexer_id, lang, quality):
        """Initialize the object."""
        super(TestTVShow, self).__init__(indexer, indexer_id, lang, quality)

    def _load_from_db(self):
        """Override Series._load_from_db to avoid DB access during testing."""
        pass


@pytest.mark.parametrize('p', [
    {  # p0: Downloaded a quality not in quality system : yes
        'status': DOWNLOADED,
        'quality': Quality.SDTV,
        'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='',
                               quality=Quality.combine_qualities([Quality.HDTV],  # Allowed Qualities
                                                                 [Quality.HDWEBDL])),  # Preferred Qualities
        'manually_searched': False,
        'expected': True
    },
    {  # p1: Current status is SKIPPED: no
        'status': SKIPPED,
        'quality': Quality.NA,
        'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='',
                               quality=Quality.combine_qualities([Quality.HDTV],  # Allowed Qualities
                                                                 [Quality.HDWEBDL])),  # Preferred Qualities
        'manually_searched': False,
        'expected': False
    },
    {  # p2: Current status is IGNORED: no
        'status': IGNORED,
        'quality': Quality.NA,
Beispiel #46
0
    def addNewShow(self,
                   whichSeries=None,
                   indexer_lang=None,
                   rootDir=None,
                   defaultStatus=None,
                   quality_preset=None,
                   allowed_qualities=None,
                   preferred_qualities=None,
                   season_folders=None,
                   subtitles=None,
                   fullShowPath=None,
                   other_shows=None,
                   skipShow=None,
                   providedIndexer=None,
                   anime=None,
                   scene=None,
                   blacklist=None,
                   whitelist=None,
                   defaultStatusAfter=None):
        """
        Receive tvdb id, dir, and other options and create a show from them. If extra show dirs are
        provided then it forwards back to newShow, if not it goes to /home.
        """
        provided_indexer = providedIndexer

        indexer_lang = app.INDEXER_DEFAULT_LANGUAGE if not indexer_lang else indexer_lang

        # grab our list of other dirs if given
        if not other_shows:
            other_shows = []
        elif not isinstance(other_shows, list):
            other_shows = [other_shows]

        def finishAddShow():
            # if there are no extra shows then go home
            if not other_shows:
                return json_redirect('/home/')

            # go to add the next show
            return json_redirect(
                '/addShows/newShow/',
                [('show_to_add' if not i else 'other_shows', cur_dir)
                 for i, cur_dir in enumerate(other_shows)])

        # if we're skipping then behave accordingly
        if skipShow:
            return finishAddShow()

        # sanity check on our inputs
        if (not rootDir and not fullShowPath) or not whichSeries:
            return 'Missing params, no Indexer ID or folder:{series!r} and {root!r}/{path!r}'.format(
                series=whichSeries, root=rootDir, path=fullShowPath)

        # figure out what show we're adding and where
        series_pieces = whichSeries.split('|')
        if (whichSeries and rootDir) or (whichSeries and fullShowPath
                                         and len(series_pieces) > 1):
            if len(series_pieces) < 6:
                logger.log(
                    u'Unable to add show due to show selection. Not enough arguments: %s'
                    % (repr(series_pieces)), logger.ERROR)
                ui.notifications.error(
                    'Unknown error. Unable to add show due to problem with show selection.'
                )
                return json_redirect('/addShows/existingShows/')

            indexer = int(series_pieces[1])
            indexer_id = int(series_pieces[3])
            show_name = series_pieces[4]
        else:
            # if no indexer was provided use the default indexer set in General settings
            if not provided_indexer:
                provided_indexer = app.INDEXER_DEFAULT

            indexer = int(provided_indexer)
            indexer_id = int(whichSeries)
            show_name = os.path.basename(os.path.normpath(fullShowPath))

        # use the whole path if it's given, or else append the show name to the root dir to get the full show path
        if fullShowPath:
            show_dir = os.path.normpath(fullShowPath)
        else:
            show_dir = os.path.join(rootDir, sanitize_filename(show_name))

        # blanket policy - if the dir exists you should have used 'add existing show' numbnuts
        if os.path.isdir(show_dir) and not fullShowPath:
            ui.notifications.error(
                'Unable to add show',
                'Folder {path} exists already'.format(path=show_dir))
            return json_redirect('/addShows/existingShows/')

        # don't create show dir if config says not to
        if app.ADD_SHOWS_WO_DIR:
            logger.log(
                u'Skipping initial creation of {path} due to config.ini setting'
                .format(path=show_dir))
        else:
            dir_exists = helpers.make_dir(show_dir)
            if not dir_exists:
                logger.log(
                    u'Unable to create the folder {path}, can\'t add the show'.
                    format(path=show_dir), logger.ERROR)
                ui.notifications.error(
                    'Unable to add show',
                    'Unable to create the folder {path}, can\'t add the show'.
                    format(path=show_dir))
                # Don't redirect to default page because user wants to see the new show
                return json_redirect('/home/')
            else:
                helpers.chmod_as_parent(show_dir)

        # prepare the inputs for passing along
        scene = config.checkbox_to_value(scene)
        anime = config.checkbox_to_value(anime)
        season_folders = config.checkbox_to_value(season_folders)
        subtitles = config.checkbox_to_value(subtitles)

        if whitelist:
            whitelist = short_group_names(whitelist)
        if blacklist:
            blacklist = short_group_names(blacklist)

        if not allowed_qualities:
            allowed_qualities = []
        if not preferred_qualities or try_int(quality_preset, None):
            preferred_qualities = []
        if not isinstance(allowed_qualities, list):
            allowed_qualities = [allowed_qualities]
        if not isinstance(preferred_qualities, list):
            preferred_qualities = [preferred_qualities]
        new_quality = Quality.combine_qualities(
            [int(q) for q in allowed_qualities],
            [int(q) for q in preferred_qualities])

        # add the show
        app.show_queue_scheduler.action.addShow(indexer, indexer_id, show_dir,
                                                int(defaultStatus),
                                                new_quality, season_folders,
                                                indexer_lang, subtitles, anime,
                                                scene, None, blacklist,
                                                whitelist,
                                                int(defaultStatusAfter))
        ui.notifications.message(
            'Show added',
            'Adding the specified show into {path}'.format(path=show_dir))

        return finishAddShow()
Beispiel #47
0
def get_provider_cache_results(series_obj, show_all_results=None, perform_search=None,
                               season=None, episode=None, manual_search_type=None, **search_show):
    """Check all provider cache tables for search results."""
    down_cur_quality = 0
    preferred_words = series_obj.show_words().preferred_words
    undesired_words = series_obj.show_words().undesired_words
    ignored_words = series_obj.show_words().ignored_words
    required_words = series_obj.show_words().required_words

    main_db_con = db.DBConnection('cache.db')

    provider_results = {'last_prov_updates': {}, 'error': {}, 'found_items': []}
    original_thread_name = threading.currentThread().name

    cached_results_total = []
    combined_sql_q = []
    combined_sql_params = []

    for cur_provider in enabled_providers('manualsearch'):
        threading.currentThread().name = '{thread} :: [{provider}]'.format(thread=original_thread_name, provider=cur_provider.name)

        # Let's check if this provider table already exists
        table_exists = main_db_con.select(
            'SELECT name '
            'FROM sqlite_master '
            "WHERE type='table'"
            ' AND name=?',
            [cur_provider.get_id()]
        )

        columns = []
        if table_exists:
            table_columns = main_db_con.select("PRAGMA table_info('{0}')".format(cur_provider.get_id()))
            columns = [table_column['name'] for table_column in table_columns]

        minseed = int(cur_provider.minseed) if getattr(cur_provider, 'minseed', None) else -1
        minleech = int(cur_provider.minleech) if getattr(cur_provider, 'minleech', None) else -1

        # TODO: the implicit sqlite rowid is used, should be replaced with an explicit PK column
        # If table doesn't exist, start a search to create table and new columns seeders, leechers and size
        required_columns = ['indexer', 'indexerid', 'seeders', 'leechers', 'size', 'proper_tags', 'date_added']
        if table_exists and all(required_column in columns for required_column in required_columns):
            # The default sql, that's executed for each providers cache table
            common_sql = (
                "SELECT rowid, ? AS 'provider_type', ? AS 'provider_image',"
                " ? AS 'provider', ? AS 'provider_id', ? 'provider_minseed',"
                " ? 'provider_minleech', name, season, episodes, indexer, indexerid,"
                ' url, proper_tags, quality, release_group, version,'
                ' seeders, leechers, size, time, pubdate, date_added '
                "FROM '{provider_id}' "
                'WHERE indexer = ? AND indexerid = ? AND quality > 0 '.format(
                    provider_id=cur_provider.get_id()
                )
            )

            # Let's start by adding the default parameters, which are used to substitute the '?'s.
            add_params = [cur_provider.provider_type.title(), cur_provider.image_name(),
                          cur_provider.name, cur_provider.get_id(), minseed, minleech,
                          series_obj.indexer, series_obj.series_id]

            if manual_search_type != 'season':
                # If were not looking for all results, meaning don't do the filter on season + ep, add sql
                if not int(show_all_results):
                    # If it's an episode search, pass season and episode.
                    common_sql += ' AND season = ? AND episodes LIKE ? '
                    add_params += [season, '%|{0}|%'.format(episode)]

            else:
                # If were not looking for all results, meaning don't do the filter on season + ep, add sql
                if not int(show_all_results):
                    list_of_episodes = '{0}{1}'.format(' episodes LIKE ', ' AND episodes LIKE '.join(
                        ['?' for _ in series_obj.get_all_episodes(season)]
                    ))

                    common_sql += ' AND season = ? AND (episodes LIKE ? OR {list_of_episodes})'.format(
                        list_of_episodes=list_of_episodes
                    )
                    add_params += [season, '||']  # When the episodes field is empty.
                    add_params += ['%|{episode}|%'.format(episode=ep.episode) for ep in series_obj.get_all_episodes(season)]

            # Add the created sql, to lists, that are used down below to perform one big UNIONED query
            combined_sql_q.append(common_sql)
            combined_sql_params += add_params

            # Get the last updated cache items timestamp
            last_update = main_db_con.select('SELECT max(time) AS lastupdate '
                                             "FROM '{provider_id}'".format(provider_id=cur_provider.get_id()))
            provider_results['last_prov_updates'][cur_provider.get_id()] = last_update[0]['lastupdate'] if last_update[0]['lastupdate'] else 0

    # Check if we have the combined sql strings
    if combined_sql_q:
        sql_prepend = 'SELECT * FROM ('
        sql_append = ') ORDER BY quality DESC, proper_tags DESC, seeders DESC'

        # Add all results
        cached_results_total += main_db_con.select('{0} {1} {2}'.
                                                   format(sql_prepend, ' UNION ALL '.join(combined_sql_q), sql_append),
                                                   combined_sql_params)

    # Always start a search when no items found in cache
    if not cached_results_total or int(perform_search):
        # retrieve the episode object and fail if we can't get one
        ep_obj = series_obj.get_episode(season, episode)
        if isinstance(ep_obj, str):
            provider_results['error'] = 'Something went wrong when starting the manual search for show {0}, \
            and episode: {1}x{2}'.format(series_obj.name, season, episode)

        # make a queue item for it and put it on the queue
        ep_queue_item = ForcedSearchQueueItem(ep_obj.series, [ep_obj], bool(int(down_cur_quality)), True, manual_search_type)  # pylint: disable=maybe-no-member

        app.forced_search_queue_scheduler.action.add_item(ep_queue_item)

        # give the CPU a break and some time to start the queue
        time.sleep(cpu_presets[app.CPU_PRESET])
    else:
        for i in cached_results_total:
            threading.currentThread().name = '{thread} :: [{provider}]'.format(
                thread=original_thread_name, provider=i['provider'])

            i['quality_name'] = Quality.split_quality(int(i['quality']))
            i['time'] = datetime.fromtimestamp(i['time'])
            i['release_group'] = i['release_group'] or 'None'
            i['provider_img_link'] = 'images/providers/' + i['provider_image'] or 'missing.png'
            i['provider'] = i['provider'] if i['provider_image'] else 'missing provider'
            i['proper_tags'] = i['proper_tags'].replace('|', ', ')
            i['pretty_size'] = pretty_file_size(i['size']) if i['size'] > -1 else 'N/A'
            i['seeders'] = i['seeders'] if i['seeders'] >= 0 else '-'
            i['leechers'] = i['leechers'] if i['leechers'] >= 0 else '-'
            i['pubdate'] = parser.parse(i['pubdate']).astimezone(app_timezone) if i['pubdate'] else ''
            i['date_added'] = datetime.fromtimestamp(float(i['date_added']), tz=app_timezone) if i['date_added'] else ''

            release_group = i['release_group']
            if ignored_words and release_group in ignored_words:
                i['rg_highlight'] = 'ignored'
            elif required_words and release_group in required_words:
                i['rg_highlight'] = 'required'
            elif preferred_words and release_group in preferred_words:
                i['rg_highlight'] = 'preferred'
            elif undesired_words and release_group in undesired_words:
                i['rg_highlight'] = 'undesired'
            else:
                i['rg_highlight'] = ''
            if contains_at_least_one_word(i['name'], required_words):
                i['name_highlight'] = 'required'
            elif contains_at_least_one_word(i['name'], ignored_words) or not filter_bad_releases(i['name'], parse=False):
                i['name_highlight'] = 'ignored'
            elif contains_at_least_one_word(i['name'], undesired_words):
                i['name_highlight'] = 'undesired'
            elif contains_at_least_one_word(i['name'], preferred_words):
                i['name_highlight'] = 'preferred'
            else:
                i['name_highlight'] = ''

            i['seed_highlight'] = 'ignored'
            if i['seeders'] == '-' or i['provider_minseed'] <= i['seeders']:
                i['seed_highlight'] = ''

            i['leech_highlight'] = 'ignored'
            if i['leechers'] == '-' or i['provider_minleech'] <= i['leechers']:
                i['leech_highlight'] = ''

        provider_results['found_items'] = cached_results_total

    # Remove provider from thread name before return results
    threading.currentThread().name = original_thread_name

    # Sanitize the last_prov_updates key
    provider_results['last_prov_updates'] = json.dumps(provider_results['last_prov_updates'])
    return provider_results