예제 #1
0
    def find_search_results(self,
                            show,
                            episodes,
                            search_mode,
                            forced_search=False,
                            download_current_quality=False,
                            manual_search=False,
                            manual_search_type='episode'):
        """Search episodes based on param."""
        self._check_auth()
        self.show = show

        results = {}
        items_list = []

        for episode in episodes:
            if not manual_search:
                cache_result = self.cache.search_cache(
                    episode,
                    forced_search=forced_search,
                    down_cur_quality=download_current_quality)
                if cache_result:
                    if episode.episode not in results:
                        results[episode.episode] = cache_result
                    else:
                        results[episode.episode].extend(cache_result)

                    continue

            search_strings = []
            season_search = (len(episodes) > 1 or manual_search_type
                             == 'season') and search_mode == 'sponly'
            if season_search:
                search_strings = self._get_season_search_strings(episode)
            elif search_mode == 'eponly':
                search_strings = self._get_episode_search_strings(episode)

            for search_string in search_strings:
                # Find results from the provider
                items_list += self.search(search_string,
                                          ep_obj=episode,
                                          manual_search=manual_search)

            # In season search, we can't loop in episodes lists as we only need one episode to get the season string
            if search_mode == 'sponly':
                break

        if len(results) == len(episodes):
            return results

        if items_list:
            # categorize the items into lists by quality
            items = defaultdict(list)
            for item in items_list:
                items[self.get_quality(item, anime=show.is_anime)].append(item)

            # temporarily remove the list of items with unknown quality
            unknown_items = items.pop(Quality.UNKNOWN, [])

            # make a generator to sort the remaining items by descending quality
            items_list = (items[quality]
                          for quality in sorted(items, reverse=True))

            # unpack all of the quality lists into a single sorted list
            items_list = list(chain(*items_list))

            # extend the list with the unknown qualities, now sorted at the bottom of the list
            items_list.extend(unknown_items)

        cl = []

        # Move through each item and parse it into a quality
        search_results = []
        for item in items_list:

            # Make sure we start with a TorrentSearchResult, NZBDataSearchResult or NZBSearchResult search result obj.
            search_result = self.get_result()
            search_results.append(search_result)
            search_result.item = item
            search_result.download_current_quality = download_current_quality
            # FIXME: Should be changed to search_result.search_type
            search_result.forced_search = forced_search

            (search_result.name,
             search_result.url) = self._get_title_and_url(item)
            (search_result.seeders,
             search_result.leechers) = self._get_result_info(item)

            search_result.size = self._get_size(item)
            search_result.pubdate = self._get_pubdate(item)

            search_result.result_wanted = True

            try:
                search_result.parsed_result = NameParser(
                    parse_method=('normal', 'anime')[show.is_anime]).parse(
                        search_result.name)
            except (InvalidNameException, InvalidShowException) as error:
                log.debug(
                    'Error during parsing of release name: {release_name}, with error: {error}',
                    {
                        'release_name': search_result.name,
                        'error': error
                    })
                search_result.add_cache_entry = False
                search_result.result_wanted = False
                continue

            # I don't know why i'm doing this. Maybe remove it later on all together, now i've added the parsed_result
            # to the search_result.
            search_result.show = search_result.parsed_result.show
            search_result.quality = search_result.parsed_result.quality
            search_result.release_group = search_result.parsed_result.release_group
            search_result.version = search_result.parsed_result.version
            search_result.actual_season = search_result.parsed_result.season_number
            search_result.actual_episodes = search_result.parsed_result.episode_numbers

            if not manual_search:
                if not (search_result.show.air_by_date
                        or search_result.show.sports):
                    if search_mode == 'sponly':
                        if search_result.parsed_result.episode_numbers:
                            log.debug(
                                'This is supposed to be a season pack search but the result {0} is not a valid '
                                'season pack, skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue
                        elif not [
                                ep for ep in episodes
                                if search_result.parsed_result.season_number ==
                            (ep.season, ep.scene_season)[ep.series.is_scene]
                        ]:
                            log.debug(
                                'This season result {0} is for a season we are not searching for, '
                                'skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue
                    else:
                        # I'm going to split these up for better readability
                        # Check if at least got a season parsed.
                        if search_result.parsed_result.season_number is None:
                            log.debug(
                                "The result {0} doesn't seem to have a valid season that we are currently trying to "
                                "snatch, skipping it", search_result.name)
                            search_result.result_wanted = False
                            continue

                        # Check if we at least got some episode numbers parsed.
                        if not search_result.parsed_result.episode_numbers:
                            log.debug(
                                "The result {0} doesn't seem to match an episode that we are currently trying to "
                                "snatch, skipping it", search_result.name)
                            search_result.result_wanted = False
                            continue

                        # Compare the episodes and season from the result with what was searched.
                        if not [
                                searched_episode
                                for searched_episode in episodes
                                if searched_episode.season ==
                                search_result.parsed_result.season_number and
                            (searched_episode.episode, searched_episode.
                             scene_episode)[searched_episode.series.is_scene]
                                in search_result.parsed_result.episode_numbers
                        ]:
                            log.debug(
                                "The result {0} doesn't seem to match an episode that we are currently trying to "
                                "snatch, skipping it", search_result.name)
                            search_result.result_wanted = False
                            continue

                    # We've performed some checks to decided if we want to continue with this result.
                    # If we've hit this, that means this is not an air_by_date and not a sports show. And it seems to be
                    # a valid result. Let's store the parsed season and episode number and continue.
                    search_result.actual_season = search_result.parsed_result.season_number
                    search_result.actual_episodes = search_result.parsed_result.episode_numbers
                else:
                    # air_by_date or sportshow.
                    search_result.same_day_special = False

                    if not search_result.parsed_result.is_air_by_date:
                        log.debug(
                            "This is supposed to be a date search but the result {0} didn't parse as one, "
                            "skipping it", search_result.name)
                        search_result.result_wanted = False
                        continue
                    else:
                        # Use a query against the tv_episodes table, to match the parsed air_date against.
                        air_date = search_result.parsed_result.air_date.toordinal(
                        )
                        db = DBConnection()
                        sql_results = db.select(
                            'SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?',
                            [search_result.show.indexerid, air_date])

                        if len(sql_results) == 2:
                            if int(sql_results[0][b'season']) == 0 and int(
                                    sql_results[1][b'season']) != 0:
                                search_result.actual_season = int(
                                    sql_results[1][b'season'])
                                search_result.actual_episodes = [
                                    int(sql_results[1][b'episode'])
                                ]
                                search_result.same_day_special = True
                            elif int(sql_results[1][b'season']) == 0 and int(
                                    sql_results[0][b'season']) != 0:
                                search_result.actual_season = int(
                                    sql_results[0][b'season'])
                                search_result.actual_episodes = [
                                    int(sql_results[0][b'episode'])
                                ]
                                search_result.same_day_special = True
                        elif len(sql_results) != 1:
                            log.warning(
                                "Tried to look up the date for the episode {0} but the database didn't return proper "
                                "results, skipping it", search_result.name)
                            search_result.result_wanted = False
                            continue

                        # @TODO: Need to verify and test this.
                        if search_result.result_wanted and not search_result.same_day_special:
                            search_result.actual_season = int(
                                sql_results[0][b'season'])
                            search_result.actual_episodes = [
                                int(sql_results[0][b'episode'])
                            ]

        # Iterate again over the search results, and see if there is anything we want.
        for search_result in search_results:

            # Try to cache the item if we want to.
            cache_result = search_result.add_result_to_cache(self.cache)
            if cache_result is not None:
                cl.append(cache_result)

            if not search_result.result_wanted:
                log.debug(
                    "We aren't interested in this result: {0} with url: {1}",
                    search_result.name, search_result.url)
                continue

            log.debug('Found result {0} at {1}', search_result.name,
                      search_result.url)

            episode_object = search_result.create_episode_object()
            # result = self.get_result(episode_object, search_result)
            search_result.finish_search_result(self)

            if not episode_object:
                episode_number = SEASON_RESULT
                log.debug('Found season pack result {0} at {1}',
                          search_result.name, search_result.url)
            elif len(episode_object) == 1:
                episode_number = episode_object[0].episode
                log.debug('Found single episode result {0} at {1}',
                          search_result.name, search_result.url)
            else:
                episode_number = MULTI_EP_RESULT
                log.debug(
                    'Found multi-episode ({0}) result {1} at {2}', ', '.join(
                        map(str, search_result.parsed_result.episode_numbers)),
                    search_result.name, search_result.url)
            if episode_number not in results:
                results[episode_number] = [search_result]
            else:
                results[episode_number].append(search_result)

        if cl:
            # Access to a protected member of a client class
            db = self.cache._get_db()
            db.mass_action(cl)

        return results
예제 #2
0
    def find_search_results(self,
                            series,
                            episodes,
                            search_mode,
                            forced_search=False,
                            download_current_quality=False,
                            manual_search=False,
                            manual_search_type='episode'):
        """
        Search episodes based on param.

        Search the provider using http queries.
        :param series: Series object
        :param episodes: List of Episode objects
        :param search_mode: 'eponly' or 'sponly'
        :param forced_search: Flag if the search was triggered by a forced search
        :param download_current_quality: Flag if we want to include an already downloaded quality in the new search
        :param manual_search: Flag if the search was triggered by a manual search
        :param manual_search_type: How the manual search was started: For example an 'episode' or 'season'

        :return: A dict of search results, ordered by episode number.
        """
        self._check_auth()
        self.series = series

        season_search = (len(episodes) > 1 or manual_search_type
                         == 'season') and search_mode == 'sponly'
        results = []

        for episode in episodes:
            search_strings = []
            if season_search:
                search_strings = self._get_season_search_strings(episode)
            elif search_mode == 'eponly':
                search_strings = self._get_episode_search_strings(episode)

            for search_string in search_strings:
                # Find results from the provider
                items = self.search(search_string,
                                    ep_obj=episode,
                                    manual_search=manual_search)
                for item in items:
                    result = self.get_result(series=series, item=item)
                    if result not in results:
                        result.quality = Quality.quality_from_name(
                            result.name, series.is_anime)
                        results.append(result)

            # In season search, we can't loop in episodes lists as we
            # only need one episode to get the season string
            if search_mode == 'sponly':
                break

        log.debug('Found {0} unique search results', len(results))

        # sort qualities in descending order
        results.sort(key=operator.attrgetter('quality'), reverse=True)

        # Move through each item and parse with NameParser()
        for search_result in results:

            if forced_search:
                search_result.search_type = FORCED_SEARCH
            search_result.download_current_quality = download_current_quality
            search_result.result_wanted = True

            try:
                search_result.parsed_result = NameParser(
                    parse_method=('normal', 'anime')[series.is_anime]).parse(
                        search_result.name)
            except (InvalidNameException, InvalidShowException) as error:
                log.debug(
                    'Error during parsing of release name: {release_name}, with error: {error}',
                    {
                        'release_name': search_result.name,
                        'error': error
                    })
                search_result.add_cache_entry = False
                search_result.result_wanted = False
                continue

            # I don't know why i'm doing this. Maybe remove it later on all together, now i've added the parsed_result
            # to the search_result.
            search_result.series = search_result.parsed_result.series
            search_result.quality = search_result.parsed_result.quality
            search_result.release_group = search_result.parsed_result.release_group
            search_result.version = search_result.parsed_result.version
            search_result.actual_season = search_result.parsed_result.season_number
            search_result.actual_episodes = search_result.parsed_result.episode_numbers

            if not manual_search:
                if not (search_result.series.air_by_date
                        or search_result.series.sports):
                    if search_mode == 'sponly':
                        if search_result.parsed_result.episode_numbers:
                            log.debug(
                                'This is supposed to be a season pack search but the result {0} is not a valid '
                                'season pack, skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue
                        elif not [
                                ep for ep in episodes
                                if search_result.parsed_result.season_number ==
                            (ep.season, ep.scene_season)[ep.series.is_scene]
                        ]:
                            log.debug(
                                'This season result {0} is for a season we are not searching for, '
                                'skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue
                    else:
                        # I'm going to split these up for better readability
                        # Check if at least got a season parsed.
                        if search_result.parsed_result.season_number is None:
                            log.debug(
                                "The result {0} doesn't seem to have a valid season that we are currently trying to "
                                'snatch, skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue

                        # Check if we at least got some episode numbers parsed.
                        if not search_result.parsed_result.episode_numbers:
                            log.debug(
                                "The result {0} doesn't seem to match an episode that we are currently trying to "
                                'snatch, skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue

                        # Compare the episodes and season from the result with what was searched.
                        wanted_ep = False
                        for searched_ep in episodes:
                            if searched_ep.series.is_scene and searched_ep.scene_episode:
                                season = searched_ep.scene_season
                                episode = searched_ep.scene_episode
                            else:
                                season = searched_ep.season
                                episode = searched_ep.episode

                            if (season ==
                                    search_result.parsed_result.season_number
                                    and episode in search_result.parsed_result.
                                    episode_numbers):
                                wanted_ep = True
                                break

                        if not wanted_ep:
                            log.debug(
                                "The result {0} doesn't seem to match an episode that we are currently trying to "
                                'snatch, skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue

                    # We've performed some checks to decided if we want to continue with this result.
                    # If we've hit this, that means this is not an air_by_date and not a sports show. And it seems to be
                    # a valid result. Let's store the parsed season and episode number and continue.
                    search_result.actual_season = search_result.parsed_result.season_number
                    search_result.actual_episodes = search_result.parsed_result.episode_numbers
                else:
                    # air_by_date or sportshow.
                    search_result.same_day_special = False

                    if not search_result.parsed_result.is_air_by_date:
                        log.debug(
                            "This is supposed to be a date search but the result {0} didn't parse as one, "
                            'skipping it', search_result.name)
                        search_result.result_wanted = False
                        continue
                    else:
                        # Use a query against the tv_episodes table, to match the parsed air_date against.
                        air_date = search_result.parsed_result.air_date.toordinal(
                        )
                        db = DBConnection()
                        sql_results = db.select(
                            'SELECT season, episode FROM tv_episodes WHERE indexer = ? AND showid = ? AND airdate = ?',
                            [
                                search_result.series.indexer,
                                search_result.series.series_id, air_date
                            ])

                        if len(sql_results) == 2:
                            if int(sql_results[0]['season']) == 0 and int(
                                    sql_results[1]['season']) != 0:
                                search_result.actual_season = int(
                                    sql_results[1]['season'])
                                search_result.actual_episodes = [
                                    int(sql_results[1]['episode'])
                                ]
                                search_result.same_day_special = True
                            elif int(sql_results[1]['season']) == 0 and int(
                                    sql_results[0]['season']) != 0:
                                search_result.actual_season = int(
                                    sql_results[0]['season'])
                                search_result.actual_episodes = [
                                    int(sql_results[0]['episode'])
                                ]
                                search_result.same_day_special = True
                        elif len(sql_results) != 1:
                            log.warning(
                                "Tried to look up the date for the episode {0} but the database didn't return proper "
                                'results, skipping it', search_result.name)
                            search_result.result_wanted = False
                            continue

                        # @TODO: Need to verify and test this.
                        if search_result.result_wanted and not search_result.same_day_special:
                            search_result.actual_season = int(
                                sql_results[0]['season'])
                            search_result.actual_episodes = [
                                int(sql_results[0]['episode'])
                            ]

        final_results = {}
        cl = []
        # Iterate again over the search results, and see if there is anything we want.
        for search_result in results:

            # Try to cache the item if we want to.
            cache_result = search_result.add_result_to_cache(self.cache)
            if cache_result is not None:
                cl.append(cache_result)

            if not search_result.result_wanted:
                log.debug(
                    "We aren't interested in this result: {0} with url: {1}",
                    search_result.name, search_result.url)
                continue

            log.debug('Found result {0} at {1}', search_result.name,
                      search_result.url)

            search_result.update_search_result()

            if search_result.episode_number == SEASON_RESULT:
                log.debug('Found season pack result {0} at {1}',
                          search_result.name, search_result.url)
            elif search_result.episode_number == MULTI_EP_RESULT:
                log.debug(
                    'Found multi-episode ({0}) result {1} at {2}', ', '.join(
                        map(str, search_result.parsed_result.episode_numbers)),
                    search_result.name, search_result.url)
            else:
                log.debug('Found single episode result {0} at {1}',
                          search_result.name, search_result.url)

            if search_result.episode_number not in final_results:
                final_results[search_result.episode_number] = [search_result]
            else:
                final_results[search_result.episode_number].append(
                    search_result)

        if cl:
            # Access to a protected member of a client class
            db = self.cache._get_db()
            db.mass_action(cl)

        return final_results
예제 #3
0
파일: daily.py 프로젝트: pymedusa/SickRage
    def run(self, force=False):
        """
        Run the daily searcher, queuing selected episodes for search.

        :param force: Force search
        """
        if self.amActive:
            log.debug('Daily search is still running, not starting it again')
            return
        elif app.forced_search_queue_scheduler.action.is_forced_search_in_progress() and not force:
            log.warning('Manual search is running. Unable to start Daily search')
            return

        self.amActive = True
        # Let's keep track of the exact time the scheduler kicked in,
        # as we need to compare to this time for each provider.
        scheduler_start_time = int(time())

        if not network_dict:
            update_network_dict()

        # The tvshows airdate_offset field is used to configure a search offset for specific shows.
        # This way we can search/accept results early or late, depending on the value.
        main_db_con = DBConnection()
        min_offset_show = main_db_con.select(
            'SELECT COUNT(*) as offsets, MIN(airdate_offset) AS min_offset '
            'FROM tv_shows '
            'WHERE paused = 0 AND airdate_offset < 0'
        )
        additional_search_offset = 0
        if min_offset_show and min_offset_show[0]['offsets'] > 0:
            additional_search_offset = int(ceil(abs(min_offset_show[0]['min_offset']) / 24.0))
            log.debug('Using an airdate offset of {min_offset_show} as we found show(s) with an airdate'
                      ' offset configured.', {'min_offset_show': min_offset_show[0]['min_offset']})

        cur_time = datetime.now(app_timezone)

        cur_date = (
            date.today() + timedelta(days=1 if network_dict else 2) + timedelta(days=additional_search_offset)
        ).toordinal()

        episodes_from_db = main_db_con.select(
            'SELECT indexer, showid, airdate, season, episode '
            'FROM tv_episodes '
            'WHERE status = ? AND (airdate <= ? and airdate > 1)',
            [common.UNAIRED, cur_date]
        )

        new_releases = []
        series_obj = None

        for db_episode in episodes_from_db:
            indexer_id = db_episode['indexer']
            series_id = db_episode['showid']
            try:
                if not series_obj or series_id != series_obj.indexerid:
                    series_obj = Show.find_by_id(app.showList, indexer_id, series_id)

                # for when there is orphaned series in the database but not loaded into our show list
                if not series_obj or series_obj.paused:
                    continue

            except MultipleShowObjectsException:
                log.info('ERROR: expected to find a single show matching {id}',
                         {'id': series_id})
                continue

            cur_ep = series_obj.get_episode(db_episode['season'], db_episode['episode'])

            if series_obj.airs and series_obj.network:
                # This is how you assure it is always converted to local time
                show_air_time = parse_date_time(db_episode['airdate'], series_obj.airs, series_obj.network)
                end_time = show_air_time.astimezone(app_timezone) + timedelta(minutes=try_int(series_obj.runtime, 60))

                if series_obj.airdate_offset != 0:
                    log.debug(
                        '{show}: Applying an airdate offset for the episode: {episode} of {offset} hours',
                        {'show': series_obj.name, 'episode': cur_ep.pretty_name(), 'offset': series_obj.airdate_offset})

                # filter out any episodes that haven't finished airing yet
                if end_time + timedelta(hours=series_obj.airdate_offset) > cur_time:
                    continue

            with cur_ep.lock:
                cur_ep.status = series_obj.default_ep_status if cur_ep.season else common.SKIPPED
                log.info(
                    'Setting status ({status}) for show airing today: {name} {special}', {
                        'name': cur_ep.pretty_name(),
                        'status': common.statusStrings[cur_ep.status],
                        'special': '(specials are not supported)' if not cur_ep.season else '',
                    }
                )
                new_releases.append(cur_ep.get_sql())

        if new_releases:
            main_db_con = DBConnection()
            main_db_con.mass_action(new_releases)

        # queue a daily search
        app.search_queue_scheduler.action.add_item(
            DailySearchQueueItem(scheduler_start_time, force=force)
        )

        self.amActive = False
예제 #4
0
파일: daily.py 프로젝트: trentmsteel/Medusa
    def run(self, force=False):  # pylint:disable=too-many-branches
        """
        Run the daily searcher, queuing selected episodes for search.

        :param force: Force search
        """
        if self.amActive:
            log.debug('Daily search is still running, not starting it again')
            return
        elif app.forced_search_queue_scheduler.action.is_forced_search_in_progress(
        ) and not force:
            log.warning(
                'Manual search is running. Unable to start Daily search')
            return

        self.amActive = True

        if not network_dict:
            update_network_dict()

        cur_time = datetime.now(app_timezone)
        cur_date = (date.today() +
                    timedelta(days=1 if network_dict else 2)).toordinal()

        main_db_con = DBConnection()
        episodes_from_db = main_db_con.select(
            b'SELECT indexer, showid, airdate, season, episode '
            b'FROM tv_episodes '
            b'WHERE status = ? AND (airdate <= ? and airdate > 1)',
            [common.UNAIRED, cur_date])

        new_releases = []
        series_obj = None

        for db_episode in episodes_from_db:
            indexer_id = db_episode[b'indexer']
            series_id = db_episode[b'showid']
            try:
                if not series_obj or series_id != series_obj.indexerid:
                    series_obj = Show.find_by_id(app.showList, indexer_id,
                                                 series_id)

                # for when there is orphaned series in the database but not loaded into our show list
                if not series_obj or series_obj.paused:
                    continue

            except MultipleShowObjectsException:
                log.info('ERROR: expected to find a single show matching {id}',
                         {'id': series_id})
                continue

            if series_obj.airs and series_obj.network:
                # This is how you assure it is always converted to local time
                show_air_time = parse_date_time(db_episode[b'airdate'],
                                                series_obj.airs,
                                                series_obj.network)
                end_time = show_air_time.astimezone(app_timezone) + timedelta(
                    minutes=try_int(series_obj.runtime, 60))

                # filter out any episodes that haven't finished airing yet,
                if end_time > cur_time:
                    continue

            cur_ep = series_obj.get_episode(db_episode[b'season'],
                                            db_episode[b'episode'])
            with cur_ep.lock:
                cur_ep.status = series_obj.default_ep_status if cur_ep.season else common.SKIPPED
                log.info(
                    'Setting status ({status}) for show airing today: {name} {special}',
                    {
                        'name':
                        cur_ep.pretty_name(),
                        'status':
                        common.statusStrings[cur_ep.status],
                        'special':
                        '(specials are not supported)'
                        if not cur_ep.season else '',
                    })
                new_releases.append(cur_ep.get_sql())

        if new_releases:
            main_db_con = DBConnection()
            main_db_con.mass_action(new_releases)

        # queue episode for daily search
        app.search_queue_scheduler.action.add_item(
            DailySearchQueueItem(force=force))

        self.amActive = False
예제 #5
0
파일: daily.py 프로젝트: 5l1v3r1/Medusa-2
    def run(self, force=False):
        """
        Run the daily searcher, queuing selected episodes for search.

        :param force: Force search
        """
        if self.amActive:
            log.debug('Daily search is still running, not starting it again')
            return
        elif app.forced_search_queue_scheduler.action.is_forced_search_in_progress(
        ) and not force:
            log.warning(
                'Manual search is running. Unable to start Daily search')
            return

        self.amActive = True
        # Let's keep track of the exact time the scheduler kicked in,
        # as we need to compare to this time for each provider.
        scheduler_start_time = int(time())

        if not network_dict:
            update_network_dict()

        # The tvshows airdate_offset field is used to configure a search offset for specific shows.
        # This way we can search/accept results early or late, depending on the value.
        main_db_con = DBConnection()
        min_offset_show = main_db_con.select(
            'SELECT COUNT(*) as offsets, MIN(airdate_offset) AS min_offset '
            'FROM tv_shows '
            'WHERE paused = 0 AND airdate_offset < 0')
        additional_search_offset = 0
        if min_offset_show and min_offset_show[0]['offsets'] > 0:
            additional_search_offset = int(
                ceil(abs(min_offset_show[0]['min_offset']) / 24.0))
            log.debug(
                'Using an airdate offset of {min_offset_show} as we found show(s) with an airdate'
                ' offset configured.',
                {'min_offset_show': min_offset_show[0]['min_offset']})

        cur_time = datetime.now(app_timezone)

        cur_date = (date.today() + timedelta(days=1 if network_dict else 2) +
                    timedelta(days=additional_search_offset)).toordinal()

        episodes_from_db = main_db_con.select(
            'SELECT indexer, showid, airdate, season, episode '
            'FROM tv_episodes '
            'WHERE status = ? AND (airdate <= ? and airdate > 1)',
            [common.UNAIRED, cur_date])

        new_releases = []
        series_obj = None

        for db_episode in episodes_from_db:
            indexer_id = db_episode['indexer']
            series_id = db_episode['showid']
            try:
                if not series_obj or series_id != series_obj.indexerid:
                    series_obj = Show.find_by_id(app.showList, indexer_id,
                                                 series_id)

                # for when there is orphaned series in the database but not loaded into our show list
                if not series_obj or series_obj.paused:
                    continue

            except MultipleShowObjectsException:
                log.info('ERROR: expected to find a single show matching {id}',
                         {'id': series_id})
                continue

            cur_ep = series_obj.get_episode(db_episode['season'],
                                            db_episode['episode'])

            if series_obj.airs and series_obj.network:
                # This is how you assure it is always converted to local time
                show_air_time = parse_date_time(db_episode['airdate'],
                                                series_obj.airs,
                                                series_obj.network)
                end_time = show_air_time.astimezone(app_timezone) + timedelta(
                    minutes=try_int(series_obj.runtime, 60))

                if series_obj.airdate_offset != 0:
                    log.debug(
                        '{show}: Applying an airdate offset for the episode: {episode} of {offset} hours',
                        {
                            'show': series_obj.name,
                            'episode': cur_ep.pretty_name(),
                            'offset': series_obj.airdate_offset
                        })

                # filter out any episodes that haven't finished airing yet
                if end_time + timedelta(
                        hours=series_obj.airdate_offset) > cur_time:
                    continue

            with cur_ep.lock:
                cur_ep.status = series_obj.default_ep_status if cur_ep.season else common.SKIPPED
                log.info(
                    'Setting status ({status}) for show airing today: {name} {special}',
                    {
                        'name':
                        cur_ep.pretty_name(),
                        'status':
                        common.statusStrings[cur_ep.status],
                        'special':
                        '(specials are not supported)'
                        if not cur_ep.season else '',
                    })
                new_releases.append(cur_ep.get_sql())

        if new_releases:
            main_db_con = DBConnection()
            main_db_con.mass_action(new_releases)

        # queue a daily search
        app.search_queue_scheduler.action.add_item(
            DailySearchQueueItem(scheduler_start_time, force=force))

        self.amActive = False
예제 #6
0
    def find_search_results(self, series, episodes, search_mode, forced_search=False, download_current_quality=False,
                            manual_search=False, manual_search_type='episode'):
        """
        Search episodes based on param.

        Search the provider using http queries.
        :param series: Series object
        :param episodes: List of Episode objects
        :param search_mode: 'eponly' or 'sponly'
        :param forced_search: Flag if the search was triggered by a forced search
        :param download_current_quality: Flag if we want to include an already downloaded quality in the new search
        :param manual_search: Flag if the search was triggered by a manual search
        :param manual_search_type: How the manual search was started: For example an 'episode' or 'season'

        :return: A dict of search results, ordered by episode number.
        """
        self._check_auth()
        self.series = series

        results = {}
        items_list = []
        season_search = (len(episodes) > 1 or manual_search_type == 'season') and search_mode == 'sponly'

        for episode in episodes:
            search_strings = []
            if season_search:
                search_strings = self._get_season_search_strings(episode)
            elif search_mode == 'eponly':
                search_strings = self._get_episode_search_strings(episode)

            for search_string in search_strings:
                # Find results from the provider
                items_list += self.search(
                    search_string, ep_obj=episode, manual_search=manual_search
                )

            # In season search, we can't loop in episodes lists as we
            # only need one episode to get the season string
            if search_mode == 'sponly':
                break

        # Remove duplicate items
        unique_items = self.remove_duplicate_mappings(items_list)
        log.debug('Found {0} unique items', len(unique_items))

        # categorize the items into lists by quality
        categorized_items = defaultdict(list)
        for item in unique_items:
            quality = self.get_quality(item, anime=series.is_anime)
            categorized_items[quality].append(item)

        # sort qualities in descending order
        sorted_qualities = sorted(categorized_items, reverse=True)
        log.debug('Found qualities: {0}', sorted_qualities)

        # chain items sorted by quality
        sorted_items = chain.from_iterable(
            categorized_items[quality]
            for quality in sorted_qualities
        )

        # unpack all of the quality lists into a single sorted list
        items_list = list(sorted_items)

        # Move through each item and parse it into a quality
        search_results = []
        for item in items_list:

            # Make sure we start with a TorrentSearchResult, NZBDataSearchResult or NZBSearchResult search result obj.
            search_result = self.get_result()
            search_results.append(search_result)
            search_result.item = item
            search_result.download_current_quality = download_current_quality
            # FIXME: Should be changed to search_result.search_type
            search_result.forced_search = forced_search

            (search_result.name, search_result.url) = self._get_title_and_url(item)
            (search_result.seeders, search_result.leechers) = self._get_result_info(item)

            search_result.size = self._get_size(item)
            search_result.pubdate = self._get_pubdate(item)

            search_result.result_wanted = True

            try:
                search_result.parsed_result = NameParser(
                    parse_method=('normal', 'anime')[series.is_anime]).parse(
                        search_result.name)
            except (InvalidNameException, InvalidShowException) as error:
                log.debug('Error during parsing of release name: {release_name}, with error: {error}',
                          {'release_name': search_result.name, 'error': error})
                search_result.add_cache_entry = False
                search_result.result_wanted = False
                continue

            # I don't know why i'm doing this. Maybe remove it later on all together, now i've added the parsed_result
            # to the search_result.
            search_result.series = search_result.parsed_result.series
            search_result.quality = search_result.parsed_result.quality
            search_result.release_group = search_result.parsed_result.release_group
            search_result.version = search_result.parsed_result.version
            search_result.actual_season = search_result.parsed_result.season_number
            search_result.actual_episodes = search_result.parsed_result.episode_numbers

            if not manual_search:
                if not (search_result.series.air_by_date or search_result.series.sports):
                    if search_mode == 'sponly':
                        if search_result.parsed_result.episode_numbers:
                            log.debug(
                                'This is supposed to be a season pack search but the result {0} is not a valid '
                                'season pack, skipping it', search_result.name
                            )
                            search_result.result_wanted = False
                            continue
                        elif not [ep for ep in episodes if
                                  search_result.parsed_result.season_number == (ep.season, ep.scene_season)
                                  [ep.series.is_scene]]:
                            log.debug(
                                'This season result {0} is for a season we are not searching for, '
                                'skipping it', search_result.name
                            )
                            search_result.result_wanted = False
                            continue
                    else:
                        # I'm going to split these up for better readability
                        # Check if at least got a season parsed.
                        if search_result.parsed_result.season_number is None:
                            log.debug(
                                "The result {0} doesn't seem to have a valid season that we are currently trying to "
                                'snatch, skipping it', search_result.name
                            )
                            search_result.result_wanted = False
                            continue

                        # Check if we at least got some episode numbers parsed.
                        if not search_result.parsed_result.episode_numbers:
                            log.debug(
                                "The result {0} doesn't seem to match an episode that we are currently trying to "
                                'snatch, skipping it', search_result.name
                            )
                            search_result.result_wanted = False
                            continue

                        # Compare the episodes and season from the result with what was searched.
                        if not [searched_episode for searched_episode in episodes
                                if searched_episode.season == search_result.parsed_result.season_number and
                                (searched_episode.episode, searched_episode.scene_episode)
                                [searched_episode.series.is_scene] in
                                search_result.parsed_result.episode_numbers]:
                            log.debug(
                                "The result {0} doesn't seem to match an episode that we are currently trying to "
                                'snatch, skipping it', search_result.name
                            )
                            search_result.result_wanted = False
                            continue

                    # We've performed some checks to decided if we want to continue with this result.
                    # If we've hit this, that means this is not an air_by_date and not a sports show. And it seems to be
                    # a valid result. Let's store the parsed season and episode number and continue.
                    search_result.actual_season = search_result.parsed_result.season_number
                    search_result.actual_episodes = search_result.parsed_result.episode_numbers
                else:
                    # air_by_date or sportshow.
                    search_result.same_day_special = False

                    if not search_result.parsed_result.is_air_by_date:
                        log.debug(
                            "This is supposed to be a date search but the result {0} didn't parse as one, "
                            'skipping it', search_result.name
                        )
                        search_result.result_wanted = False
                        continue
                    else:
                        # Use a query against the tv_episodes table, to match the parsed air_date against.
                        air_date = search_result.parsed_result.air_date.toordinal()
                        db = DBConnection()
                        sql_results = db.select(
                            'SELECT season, episode FROM tv_episodes WHERE indexer = ? AND showid = ? AND airdate = ?',
                            [search_result.series.indexer, search_result.series.series_id, air_date]
                        )

                        if len(sql_results) == 2:
                            if int(sql_results[0]['season']) == 0 and int(sql_results[1]['season']) != 0:
                                search_result.actual_season = int(sql_results[1]['season'])
                                search_result.actual_episodes = [int(sql_results[1]['episode'])]
                                search_result.same_day_special = True
                            elif int(sql_results[1]['season']) == 0 and int(sql_results[0]['season']) != 0:
                                search_result.actual_season = int(sql_results[0]['season'])
                                search_result.actual_episodes = [int(sql_results[0]['episode'])]
                                search_result.same_day_special = True
                        elif len(sql_results) != 1:
                            log.warning(
                                "Tried to look up the date for the episode {0} but the database didn't return proper "
                                'results, skipping it', search_result.name
                            )
                            search_result.result_wanted = False
                            continue

                        # @TODO: Need to verify and test this.
                        if search_result.result_wanted and not search_result.same_day_special:
                            search_result.actual_season = int(sql_results[0]['season'])
                            search_result.actual_episodes = [int(sql_results[0]['episode'])]

        cl = []
        # Iterate again over the search results, and see if there is anything we want.
        for search_result in search_results:

            # Try to cache the item if we want to.
            cache_result = search_result.add_result_to_cache(self.cache)
            if cache_result is not None:
                cl.append(cache_result)

            if not search_result.result_wanted:
                log.debug("We aren't interested in this result: {0} with url: {1}",
                          search_result.name, search_result.url)
                continue

            log.debug('Found result {0} at {1}', search_result.name, search_result.url)

            search_result.create_episode_object()
            # result = self.get_result(episode_object, search_result)
            search_result.finish_search_result(self)

            if not search_result.actual_episodes:
                episode_number = SEASON_RESULT
                log.debug('Found season pack result {0} at {1}', search_result.name, search_result.url)
            elif len(search_result.actual_episodes) == 1:
                episode_number = search_result.actual_episode
                log.debug('Found single episode result {0} at {1}', search_result.name, search_result.url)
            else:
                episode_number = MULTI_EP_RESULT
                log.debug('Found multi-episode ({0}) result {1} at {2}',
                          ', '.join(map(str, search_result.parsed_result.episode_numbers)),
                          search_result.name,
                          search_result.url)

            if episode_number not in results:
                results[episode_number] = [search_result]
            else:
                results[episode_number].append(search_result)

        if cl:
            # Access to a protected member of a client class
            db = self.cache._get_db()
            db.mass_action(cl)

        return results