def find_propers(self, search_date=None):
        results = []
        db = DBConnection()
        placeholder = ','.join([
            str(x) for x in Quality.DOWNLOADED + Quality.SNATCHED +
            Quality.SNATCHED_BEST
        ])
        sql_results = db.select(
            'SELECT s.show_name, s.lang, e.showid, e.season, e.episode, e.status, e.airdate'
            ' FROM tv_episodes AS e'
            ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)'
            ' WHERE e.airdate >= ' + str(search_date.toordinal()) +
            ' AND e.status IN (' + placeholder + ')')

        for result in sql_results or []:
            show = Show.find(sickbeard.showList, int(result['showid']))

            if show:
                episode = show.getEpisode(result['season'], result['episode'])

                for term in self.proper_strings:
                    search_strings = self._get_episode_search_strings(
                        episode, add_string=term)

                    for item in self.search(search_strings[0]):
                        title, url = self._get_title_and_url(item)

                        results.append(
                            Proper(title, url, datetime.today(), show))

        return results
Beispiel #2
0
    def overall_stats():
        db = DBConnection()
        shows = sickbeard.showList
        today = str(date.today().toordinal())

        downloaded_status = Quality.DOWNLOADED + Quality.ARCHIVED + Quality.WATCHED
        snatched_status = Quality.SNATCHED + Quality.SNATCHED_PROPER
        total_status = [SKIPPED, WANTED]

        results = db.select(
            "SELECT airdate, status " "FROM tv_episodes " "WHERE season > 0 " "AND episode > 0 " "AND airdate > 1"
        )

        stats = {
            "episodes": {"downloaded": 0, "snatched": 0, "total": 0},
            "shows": {
                "active": len([show for show in shows if show.paused == 0 and show.status == "Continuing"]),
                "total": len(shows),
            },
        }

        for result in results:
            if result["status"] in downloaded_status:
                stats["episodes"]["downloaded"] += 1
                stats["episodes"]["total"] += 1
            elif result["status"] in snatched_status:
                stats["episodes"]["snatched"] += 1
                stats["episodes"]["total"] += 1
            elif result["airdate"] <= today and result["status"] in total_status:
                stats["episodes"]["total"] += 1

        return stats
Beispiel #3
0
    def find_propers(self, search_date=None):
        results = []
        db = DBConnection()
        placeholder = ','.join([str(x) for x in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST])
        sql_results = db.select(
            'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate'
            ' FROM tv_episodes AS e'
            ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)'
            ' WHERE e.airdate >= ' + str(search_date.toordinal()) +
            ' AND e.status IN (' + placeholder + ')'
        )

        for result in sql_results or []:
            show = Show.find(sickbeard.showList, int(result['showid']))

            if show:
                episode = show.getEpisode(result['season'], result['episode'])

                for term in self.proper_strings:
                    search_strings = self._get_episode_search_strings(episode, add_string=term)

                    for item in self.search(search_strings[0]):
                        title, url = self._get_title_and_url(item)

                        results.append(Proper(title, url, datetime.today(), show))

        return results
Beispiel #4
0
    def overall_stats():
        db = DBConnection()
        shows = sickbeard.showList
        today = str(date.today().toordinal())

        downloaded_status = Quality.DOWNLOADED + Quality.ARCHIVED
        snatched_status = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST
        total_status = [SKIPPED, WANTED]

        results = db.select('SELECT airdate, status '
                            'FROM tv_episodes '
                            'WHERE season > 0 '
                            'AND episode > 0 '
                            'AND airdate > 1')

        stats = {
            'episodes': {
                'downloaded': 0,
                'snatched': 0,
                'total': 0,
            },
            'shows': {
                'active':
                len([
                    show for show in shows
                    if show.paused == 0 and show.status == 'Continuing'
                ]),
                'total':
                len(shows),
            },
        }

        for result in results:
            if result[b'status'] in downloaded_status:
                stats['episodes']['downloaded'] += 1
                stats['episodes']['total'] += 1
            elif result[b'status'] in snatched_status:
                stats['episodes']['snatched'] += 1
                stats['episodes']['total'] += 1
            elif result[b'airdate'] <= today and result[
                    b'status'] in total_status:
                stats['episodes']['total'] += 1

        return stats
Beispiel #5
0
    def overall_stats():
        db = DBConnection()
        shows = sickbeard.showList
        today = str(date.today().toordinal())

        downloaded_status = Quality.DOWNLOADED + Quality.ARCHIVED
        snatched_status = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST
        total_status = [SKIPPED, WANTED]

        results = db.select(
            'SELECT airdate, status '
            'FROM tv_episodes '
            'WHERE season > 0 '
            'AND episode > 0 '
            'AND airdate > 1'
        )

        stats = {
            'episodes': {
                'downloaded': 0,
                'snatched': 0,
                'total': 0,
            },
            'shows': {
                'active': len([show for show in shows if show.paused == 0 and show.status == 'Continuing']),
                'total': len(shows),
            },
        }

        for result in results:
            if result[b'status'] in downloaded_status:
                stats['episodes']['downloaded'] += 1
                stats['episodes']['total'] += 1
            elif result[b'status'] in snatched_status:
                stats['episodes']['snatched'] += 1
                stats['episodes']['total'] += 1
            elif result[b'airdate'] <= today and result[b'status'] in total_status:
                stats['episodes']['total'] += 1

        return stats
Beispiel #6
0
 def __init__(self):
     self.db = DBConnection()
Beispiel #7
0
class History:
    date_format = '%Y%m%d%H%M%S'

    def __init__(self):
        self.db = DBConnection()

    def clear(self):
        """
        Clear all the history
        """
        self.db.action('DELETE ' 'FROM history ' 'WHERE 1 = 1')

    def get(self, limit=100, action=None):
        """
        :param limit: The maximum number of elements to return
        :param action: The type of action to filter in the history. Either 'downloaded' or 'snatched'. Anything else or
                        no value will return everything (up to ``limit``)
        :return: The last ``limit`` elements of type ``action`` in the history
        """

        action = action.lower() if isinstance(action, str) else ''
        limit = int(limit)

        if action == 'downloaded':
            actions = Quality.DOWNLOADED
        elif action == 'snatched':
            actions = Quality.SNATCHED
        else:
            actions = []

        common_sql = 'SELECT action, date, episode, provider, h.quality, resource, season, show_name, showid ' \
                     'FROM history h, tv_shows s ' \
                     'WHERE h.showid = s.indexer_id '
        filter_sql = 'AND action in (' + ','.join(['?'] * len(actions)) + ') '
        order_sql = 'ORDER BY date DESC '

        if limit == 0:
            if len(actions) > 0:
                results = self.db.select(common_sql + filter_sql + order_sql,
                                         actions)
            else:
                results = self.db.select(common_sql + order_sql)
        else:
            if len(actions) > 0:
                results = self.db.select(
                    common_sql + filter_sql + order_sql + 'LIMIT ?',
                    actions + [limit])
            else:
                results = self.db.select(common_sql + order_sql + 'LIMIT ?',
                                         [limit])

        data = []
        for result in results:
            data.append({
                'action': result['action'],
                'date': result['date'],
                'episode': result['episode'],
                'provider': result['provider'],
                'quality': result['quality'],
                'resource': result['resource'],
                'season': result['season'],
                'show_id': result['showid'],
                'show_name': result['show_name']
            })

        return data

    def trim(self):
        """
        Remove all elements older than 30 days from the history
        """

        self.db.action('DELETE '
                       'FROM history '
                       'WHERE date < ?',
                       [(datetime.today() - timedelta(days=30)).strftime(
                           History.date_format)])
Beispiel #8
0
    def find_search_results(self, show, episodes, search_mode,  # pylint: disable=too-many-branches,too-many-arguments,too-many-locals,too-many-statements
                            manual_search=False, download_current_quality=False):
        self._check_auth()
        self.show = show

        results = {}
        items_list = []
        searched_scene_season = None

        for episode in episodes:
            cache_result = self.cache.searchCache(episode, manualSearch=manual_search,
                                                  downCurQuality=download_current_quality)
            if cache_result:
                if episode.episode not in results:
                    results[episode.episode] = cache_result
                else:
                    results[episode.episode].extend(cache_result)

                continue

            if len(episodes) > 1 and search_mode == 'sponly' and searched_scene_season == episode.scene_season:
                continue

            search_strings = []
            searched_scene_season = episode.scene_season

            if len(episodes) > 1 and search_mode == 'sponly':
                search_strings = self._get_season_search_strings(episode)
            elif search_mode == 'eponly':
                search_strings = self._get_episode_search_strings(episode)

            for search_string in search_strings:
                items_list += self.search(search_string, ep_obj=episode)

        if len(results) == len(episodes):
            return results

        if items_list:
            items = {}
            unknown_items = []

            for item in items_list:
                quality = self.get_quality(item, anime=show.is_anime)

                if quality == Quality.UNKNOWN:
                    unknown_items.append(item)
                else:
                    if quality not in items:
                        items[quality] = []
                    items[quality].append(item)

            items_list = list(chain(*[v for (k_, v) in sorted(items.iteritems(), reverse=True)]))
            items_list += unknown_items

        cl = []

        for item in items_list:
            (title, url) = self._get_title_and_url(item)

            try:
                parse_result = NameParser(parse_method=('normal', 'anime')[show.is_anime]).parse(title)
            except (InvalidNameException, InvalidShowException) as error:
                logger.log(u"{0}".format(error), logger.DEBUG)
                continue

            show_object = parse_result.show
            quality = parse_result.quality
            release_group = parse_result.release_group
            version = parse_result.version
            add_cache_entry = False

            if not (show_object.air_by_date or show_object.sports):
                if search_mode == 'sponly':
                    if parse_result.episode_numbers:
                        logger.log(
                            u'This is supposed to be a season pack search but the result {0} is not a valid season pack, skipping it'.format(title),
                            logger.DEBUG
                        )
                        add_cache_entry = True
                    elif not [ep for ep in episodes if parse_result.season_number == (ep.season, ep.scene_season)[ep.show.is_scene]]:
                        logger.log(
                            u'This season result {0} is for a season we are not searching for, skipping it'.format(title),
                            logger.DEBUG
                        )
                        add_cache_entry = True

                else:
                    if not all([
                        # pylint: disable=bad-continuation
                        parse_result.season_number is not None,
                        parse_result.episode_numbers,
                        [ep for ep in episodes if (ep.season, ep.scene_season)[ep.show.is_scene] ==
                        (parse_result.season_number, parse_result.scene_season)[ep.show.is_scene] and
                        (ep.episode, ep.scene_episode)[ep.show.is_scene] in parse_result.episode_numbers]
                    ]):

                        logger.log(
                            u'The result {0} doesn\'t seem to match an episode that we are currently trying to snatch, skipping it'.format(title),
                            logger.DEBUG)
                        add_cache_entry = True

                if not add_cache_entry:
                    actual_season = parse_result.season_number
                    actual_episodes = parse_result.episode_numbers
            else:
                same_day_special = False

                if not parse_result.is_air_by_date:
                    logger.log(
                        u'This is supposed to be a date search but the result {0} didn\'t parse as one, skipping it'.format(title),
                        logger.DEBUG)
                    add_cache_entry = True
                else:
                    air_date = parse_result.air_date.toordinal()
                    db = DBConnection()
                    sql_results = db.select(
                        'SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?',
                        [show_object.indexerid, air_date]
                    )

                    if len(sql_results) == 2:
                        if int(sql_results[0]['season']) == 0 and int(sql_results[1]['season']) != 0:
                            actual_season = int(sql_results[1]['season'])
                            actual_episodes = [int(sql_results[1]['episode'])]
                            same_day_special = True
                        elif int(sql_results[1]['season']) == 0 and int(sql_results[0]['season']) != 0:
                            actual_season = int(sql_results[0]['season'])
                            actual_episodes = [int(sql_results[0]['episode'])]
                            same_day_special = True
                    elif len(sql_results) != 1:
                        logger.log(
                            u'Tried to look up the date for the episode {0} but the database didn\'t give proper results, skipping it'.format(title),
                            logger.WARNING)
                        add_cache_entry = True

                if not add_cache_entry and not same_day_special:
                    actual_season = int(sql_results[0]['season'])
                    actual_episodes = [int(sql_results[0]['episode'])]

            if add_cache_entry:
                logger.log(u'Adding item from search to cache: {0}'.format(title), logger.DEBUG)
                # pylint: disable=protected-access
                # Access to a protected member of a client class
                ci = self.cache._addCacheEntry(title, url, parse_result=parse_result)

                if ci is not None:
                    cl.append(ci)

                continue

            episode_wanted = True

            for episode_number in actual_episodes:
                if not show_object.wantEpisode(actual_season, episode_number, quality, manual_search,
                                               download_current_quality):
                    episode_wanted = False
                    break

            if not episode_wanted:
                logger.log(u'Ignoring result {0}.'.format(title), logger.DEBUG)
                continue

            logger.log(u'Found result {0} at {1}'.format(title, url), logger.DEBUG)

            episode_object = []
            for current_episode in actual_episodes:
                episode_object.append(show_object.getEpisode(actual_season, current_episode))

            result = self.get_result(episode_object)
            result.show = show_object
            result.url = url
            result.name = title
            result.quality = quality
            result.release_group = release_group
            result.version = version
            result.content = None
            result.size = self._get_size(item)

            if len(episode_object) == 1:
                episode_number = episode_object[0].episode
                logger.log(u'Single episode result.', logger.DEBUG)
            elif len(episode_object) > 1:
                episode_number = MULTI_EP_RESULT
                logger.log(u'Separating multi-episode result to check for later - result contains episodes: {0}'.format(
                    parse_result.episode_numbers), logger.DEBUG)
            elif len(episode_object) == 0:
                episode_number = SEASON_RESULT
                logger.log(u'Separating full season result to check for later', logger.DEBUG)

            if episode_number not in results:
                results[episode_number] = [result]
            else:
                results[episode_number].append(result)

        if cl:
            # pylint: disable=protected-access
            # Access to a protected member of a client class
            cache_db = self.cache._getDB()
            cache_db.mass_action(cl)

        return results
Beispiel #9
0
    def get_coming_episodes(categories,
                            sort,
                            group,
                            paused=sickbeard.COMING_EPS_DISPLAY_PAUSED):
        """
        :param categories: The categories of coming episodes. See ``ComingEpisodes.categories``
        :param sort: The sort to apply to the coming episodes. See ``ComingEpisodes.sorts``
        :param group: ``True`` to group the coming episodes by category, ``False`` otherwise
        :param paused: ``True`` to include paused shows, ``False`` otherwise
        :return: The list of coming episodes
        """

        categories = ComingEpisodes._get_categories(categories)
        sort = ComingEpisodes._get_sort(sort)

        today = date.today().toordinal()
        recently = (
            date.today() -
            timedelta(days=sickbeard.COMING_EPS_MISSED_RANGE)).toordinal()
        next_week = (date.today() + timedelta(days=7)).toordinal()

        db = DBConnection(row_type='dict')
        fields_to_select = ', '.join([
            'airdate', 'airs', 'e.description as description', 'episode',
            'imdb_id', 'e.indexer', 'indexer_id', 'e.location', 'name',
            'network', 'paused', 'quality', 'runtime', 'season', 'show_name',
            'showid', 'e.status as epstatus', 's.status'
        ])

        status_list = [WANTED, UNAIRED] + SNATCHED

        sql_l = []
        for show_obj in sickbeard.showList:
            next_air_date = show_obj.nextEpisode()
            sql_l.append([
                'SELECT DISTINCT {0} '.format(fields_to_select) +
                'FROM tv_episodes e, tv_shows s '
                'WHERE showid = ? '
                'AND airdate <= ? '
                'AND airdate >= ? '
                'AND s.indexer_id = e.showid '
                'AND e.status IN (' + ','.join(['?'] * len(status_list)) + ')',
                [show_obj.indexerid, next_air_date or today, recently] +
                status_list
            ])

        results = []
        for sql_i in sql_l:
            if results:
                results += db.select(*sql_i)
            else:
                results = db.select(*sql_i)

        for index, item in enumerate(results):
            results[index][b'localtime'] = sbdatetime.convert_to_setting(
                parse_date_time(item[b'airdate'], item[b'airs'],
                                item[b'network']))
            results[index][b'snatchedsort'] = int(
                not results[index][b'epstatus'] in SNATCHED)

        results.sort(key=ComingEpisodes.sorts[sort])

        if not group:
            return results

        grouped_results = ComingEpisodes._get_categories_map(categories)

        for result in results:
            if result[b'paused'] and not paused:
                continue

            result[b'airs'] = str(result[b'airs']).replace(
                'am', ' AM').replace('pm', ' PM').replace('  ', ' ')
            result[b'airdate'] = result[b'localtime'].toordinal()

            if result[b'epstatus'] in SNATCHED:
                if result[b'location']:
                    continue
                else:
                    category = 'snatched'
            elif result[b'airdate'] < today:
                category = 'missed'
            elif result[b'airdate'] >= next_week:
                category = 'later'
            elif result[b'airdate'] == today:
                category = 'today'
            else:
                category = 'soon'

            if len(categories) > 0 and category not in categories:
                continue

            if not result[b'network']:
                result[b'network'] = ''

            result[b'quality'] = get_quality_string(result[b'quality'])
            result[b'airs'] = sbdatetime.sbftime(
                result[b'localtime'],
                t_preset=timeFormat).lstrip('0').replace(' 0', ' ')
            result[b'weekday'] = 1 + result[b'localtime'].weekday()
            result[b'tvdbid'] = result[b'indexer_id']
            result[b'airdate'] = sbdatetime.sbfdate(result[b'localtime'],
                                                    d_preset=dateFormat)
            result[b'localtime'] = result[b'localtime'].toordinal()

            grouped_results[category].append(result)

        return grouped_results
Beispiel #10
0
    def run(self, force=False):  # pylint:disable=too-many-branches
        """
        Runs the daily searcher, queuing selected episodes for search

        :param force: Force search
        """
        if self.amActive:
            logger.log('Daily search is still running, not starting it again', logger.DEBUG)
            return
        elif sickbeard.forcedSearchQueueScheduler.action.is_forced_search_in_progress() and not force:
            logger.log('Manual search is running. Can\'t start Daily search', logger.WARNING)
            return

        self.amActive = True

        logger.log('Searching for newly released episodes ...')

        if not network_dict:
            update_network_dict()

        cur_time = datetime.now(sb_timezone)
        cur_date = (
            date.today() + timedelta(days=1 if network_dict else 2)
        ).toordinal()

        main_db_con = DBConnection()
        episodes_from_db = main_db_con.select(
            b'SELECT showid, airdate, season, episode '
            b'FROM tv_episodes '
            b'WHERE status = ? AND (airdate <= ? and airdate > 1)',
            [common.UNAIRED, cur_date]
        )

        new_releases = []
        show = None

        for db_episode in episodes_from_db:
            try:
                show_id = int(db_episode[b'showid'])
                if not show or show_id != show.indexerid:
                    show = Show.find(sickbeard.showList, show_id)

                # for when there is orphaned series in the database but not loaded into our show list
                if not show or show.paused:
                    continue

            except MultipleShowObjectsException:
                logger.log('ERROR: expected to find a single show matching {id}'.format(id=show_id))
                continue

            if show.airs and show.network:
                # This is how you assure it is always converted to local time
                show_air_time = parse_date_time(db_episode[b'airdate'], show.airs, show.network)
                end_time = show_air_time.astimezone(sb_timezone) + timedelta(minutes=try_int(show.runtime, 60))

                # filter out any episodes that haven't finished airing yet,
                if end_time > cur_time:
                    continue

            cur_ep = show.get_episode(db_episode[b'season'], db_episode[b'episode'])
            with cur_ep.lock:
                cur_ep.status = show.default_ep_status if cur_ep.season else common.SKIPPED
                logger.log('Setting status ({status}) for show airing today: {name} {special}'.format(
                    name=cur_ep.pretty_name(),
                    status=common.statusStrings[cur_ep.status],
                    special='(specials are not supported)' if not cur_ep.season else ''
                ))
                new_releases.append(cur_ep.get_sql())

        if new_releases:
            main_db_con = DBConnection()
            main_db_con.mass_action(new_releases)
        else:
            logger.log('No newly released episodes found ...')

        # queue episode for daily search
        sickbeard.searchQueueScheduler.action.add_item(
            DailySearchQueueItem()
        )

        self.amActive = False
Beispiel #11
0
class History:
    date_format = '%Y%m%d%H%M%S'

    def __init__(self):
        self.db = DBConnection()

    def clear(self):
        """
        Clear all the history
        """
        self.db.action(
            'DELETE '
            'FROM history '
            'WHERE 1 = 1'
        )

    def get(self, limit=100, action=None):
        """
        :param limit: The maximum number of elements to return
        :param action: The type of action to filter in the history. Either 'downloaded' or 'snatched'. Anything else or
                        no value will return everything (up to ``limit``)
        :return: The last ``limit`` elements of type ``action`` in the history
        """

        actions = History._get_actions(action)
        limit = History._get_limit(limit)

        common_sql = 'SELECT action, date, episode, provider, h.quality, resource, season, show_name, showid ' \
                     'FROM history h, tv_shows s ' \
                     'WHERE h.showid = s.indexer_id '
        filter_sql = 'AND action in (' + ','.join(['?'] * len(actions)) + ') '
        order_sql = 'ORDER BY date DESC '

        if limit == 0:
            if len(actions) > 0:
                results = self.db.select(common_sql + filter_sql + order_sql, actions)
            else:
                results = self.db.select(common_sql + order_sql)
        else:
            if len(actions) > 0:
                results = self.db.select(common_sql + filter_sql + order_sql + 'LIMIT ?', actions + [limit])
            else:
                results = self.db.select(common_sql + order_sql + 'LIMIT ?', [limit])

        data = []
        for result in results:
            data.append({
                'action': result['action'],
                'date': result['date'],
                'episode': result['episode'],
                'provider': result['provider'],
                'quality': result['quality'],
                'resource': result['resource'],
                'season': result['season'],
                'show_id': result['showid'],
                'show_name': result['show_name']
            })

        return data

    def trim(self):
        """
        Remove all elements older than 30 days from the history
        """

        self.db.action(
            'DELETE '
            'FROM history '
            'WHERE date < ?',
            [(datetime.today() - timedelta(days=30)).strftime(History.date_format)]
        )

    @staticmethod
    def _get_actions(action):
        action = action.lower() if isinstance(action, (str, unicode)) else ''

        if action == 'downloaded':
            return Quality.DOWNLOADED

        if action == 'snatched':
            return Quality.SNATCHED

        return []

    @staticmethod
    def _get_limit(limit):
        limit = try_int(limit, 0)

        return max(limit, 0)
Beispiel #12
0
    def get_coming_episodes(categories, sort, group, paused=sickbeard.COMING_EPS_DISPLAY_PAUSED):
        """
        :param categories: The categories of coming episodes. See ``ComingEpisodes.categories``
        :param sort: The sort to apply to the coming episodes. See ``ComingEpisodes.sorts``
        :param group: ``True`` to group the coming episodes by category, ``False`` otherwise
        :param paused: ``True`` to include paused shows, ``False`` otherwise
        :return: The list of coming episodes
        """

        if not isinstance(categories, list):
            categories = categories.split("|")

        if sort not in ComingEpisodes.sorts.keys():
            sort = "date"

        today = date.today().toordinal()
        next_week = (date.today() + timedelta(days=7)).toordinal()
        recently = (date.today() - timedelta(days=sickbeard.COMING_EPS_MISSED_RANGE)).toordinal()
        qualities_list = (
            Quality.DOWNLOADED
            + Quality.SNATCHED
            + Quality.SNATCHED_BEST
            + Quality.SNATCHED_PROPER
            + Quality.ARCHIVED
            + [IGNORED]
        )

        db = DBConnection()
        fields_to_select = ", ".join(
            [
                "airdate",
                "airs",
                "description",
                "episode",
                "imdb_id",
                "e.indexer",
                "indexer_id",
                "name",
                "network",
                "paused",
                "quality",
                "runtime",
                "season",
                "show_name",
                "showid",
                "s.status",
            ]
        )
        results = db.select(
            "SELECT %s " % fields_to_select + "FROM tv_episodes e, tv_shows s "
            "WHERE season != 0 "
            "AND airdate >= ? "
            "AND airdate < ? "
            "AND s.indexer_id = e.showid "
            "AND e.status NOT IN (" + ",".join(["?"] * len(qualities_list)) + ")",
            [today, next_week] + qualities_list,
        )

        done_shows_list = [int(result[b"showid"]) for result in results]
        placeholder = ",".join(["?"] * len(done_shows_list))
        placeholder2 = ",".join(
            ["?"] * len(Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER)
        )

        results += db.select(
            "SELECT %s " % fields_to_select + "FROM tv_episodes e, tv_shows s "
            "WHERE season != 0 "
            "AND showid NOT IN (" + placeholder + ") "
            "AND s.indexer_id = e.showid "
            "AND airdate = (SELECT airdate "
            "FROM tv_episodes inner_e "
            "WHERE inner_e.season != 0 "
            "AND inner_e.showid = e.showid "
            "AND inner_e.airdate >= ? "
            "ORDER BY inner_e.airdate ASC LIMIT 1) "
            "AND e.status NOT IN (" + placeholder2 + ")",
            done_shows_list
            + [next_week]
            + Quality.DOWNLOADED
            + Quality.SNATCHED
            + Quality.SNATCHED_BEST
            + Quality.SNATCHED_PROPER,
        )

        results += db.select(
            "SELECT %s " % fields_to_select + "FROM tv_episodes e, tv_shows s "
            "WHERE season != 0 "
            "AND s.indexer_id = e.showid "
            "AND airdate < ? "
            "AND airdate >= ? "
            "AND e.status IN (?,?) "
            "AND e.status NOT IN (" + ",".join(["?"] * len(qualities_list)) + ")",
            [today, recently, WANTED, UNAIRED] + qualities_list,
        )

        results = [dict(result) for result in results]

        for index, item in enumerate(results):
            results[index][b"localtime"] = sbdatetime.convert_to_setting(
                parse_date_time(item[b"airdate"], item[b"airs"], item[b"network"])
            )

        results.sort(ComingEpisodes.sorts[sort])

        if not group:
            return results

        grouped_results = {category: [] for category in categories}

        for result in results:
            if result[b"paused"] and not paused:
                continue

            result[b"airs"] = str(result[b"airs"]).replace("am", " AM").replace("pm", " PM").replace("  ", " ")
            result[b"airdate"] = result[b"localtime"].toordinal()

            if result[b"airdate"] < today:
                category = "missed"
            elif result[b"airdate"] >= next_week:
                category = "later"
            elif result[b"airdate"] == today:
                category = "today"
            else:
                category = "soon"

            if len(categories) > 0 and category not in categories:
                continue

            if not result[b"network"]:
                result[b"network"] = ""

            result[b"quality"] = get_quality_string(result[b"quality"])
            result[b"airs"] = (
                sbdatetime.sbftime(result[b"localtime"], t_preset=timeFormat).lstrip("0").replace(" 0", " ")
            )
            result[b"weekday"] = 1 + date.fromordinal(result[b"airdate"]).weekday()
            result[b"tvdbid"] = result[b"indexer_id"]
            result[b"airdate"] = sbdatetime.sbfdate(result[b"localtime"], d_preset=dateFormat)
            result[b"localtime"] = result[b"localtime"].toordinal()

            grouped_results[category].append(result)

        return grouped_results
Beispiel #13
0
    def find_search_results(self, show, episodes, search_mode, manual_search=False, download_current_quality=False):  # pylint: disable=too-many-branches,too-many-arguments,too-many-locals,too-many-statements
        self._check_auth()
        self.show = show

        results = {}
        items_list = []
        searched_scene_season = None

        for episode in episodes:
            cache_result = self.cache.searchCache(episode, manualSearch=manual_search,
                                                  downCurQuality=download_current_quality)

            if cache_result:
                if episode.episode not in results:
                    results[episode.episode] = cache_result
                else:
                    results[episode.episode].extend(cache_result)

                continue

            if len(episodes) > 1 and search_mode == 'sponly' and searched_scene_season == episode.scene_season:
                continue

            search_strings = []
            searched_scene_season = episode.scene_season

            if len(episodes) > 1 and search_mode == 'sponly':
                search_strings = self._get_season_search_strings(episode)
            elif search_mode == 'eponly':
                search_strings = self._get_episode_search_strings(episode)

            first = search_strings and isinstance(search_strings[0], dict) and 'rid' in search_strings[0]
            if first:
                logger.log(u'First search_string has rid', logger.DEBUG)

            for search_string in search_strings:
                items_list += self.search(search_string, ep_obj=episode)

                if first:
                    first = False

                    if items_list:
                        logger.log(u'First search_string had rid, and returned results, skipping query by string',
                                   logger.DEBUG)
                        break

                    logger.log(u'First search_string had rid, but returned no results, searching with string query',
                               logger.DEBUG)

        if len(results) == len(episodes):
            return results

        if items_list:
            items = {}
            unknown_items = []

            for item in items_list:
                quality = self.get_quality(item, anime=show.is_anime)

                if quality == Quality.UNKNOWN:
                    unknown_items.append(item)
                else:
                    if quality not in items:
                        items[quality] = []
                    items[quality].append(item)

            items_list = list(chain(*[v for (_, v) in sorted(items.iteritems(), reverse=True)]))
            items_list += unknown_items

        cl = []

        for item in items_list:
            (title, url) = self._get_title_and_url(item)

            try:
                parser = NameParser(parse_method=('normal', 'anime')[show.is_anime])
                parse_result = parser.parse(title)
            except InvalidNameException:
                logger.log(u'Unable to parse the filename %s into a valid episode' % title, logger.DEBUG)
                continue
            except InvalidShowException:
                logger.log(u'Unable to parse the filename %s into a valid show' % title, logger.DEBUG)
                continue

            show_object = parse_result.show
            quality = parse_result.quality
            release_group = parse_result.release_group
            version = parse_result.version
            add_cache_entry = False

            if not (show_object.air_by_date or show_object.sports):
                if search_mode == 'sponly':
                    if len(parse_result.episode_numbers):
                        logger.log(
                            u'This is supposed to be a season pack search but the result %s is not a valid season pack, skipping it' % title,
                            logger.DEBUG
                        )
                        add_cache_entry = True

                    if len(parse_result.episode_numbers) and \
                       (parse_result.season_number not in set([ep.season for ep in episodes]) or
                        not [ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers]):
                        logger.log(
                            u'The result %s doesn\'t seem to be a valid episode that we are trying to snatch, ignoring' % title,
                            logger.DEBUG)
                        add_cache_entry = True
                else:
                    if not len(parse_result.episode_numbers) and parse_result.season_number and not [ep for ep in
                                                                                                     episodes if
                                                                                                     ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
                        logger.log(
                            u'The result %s doesn\'t seem to be a valid season that we are trying to snatch, ignoring' % title,
                            logger.DEBUG)
                        add_cache_entry = True
                    elif len(parse_result.episode_numbers) and not [ep for ep in episodes if
                                                                    ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
                        logger.log(
                            u'The result %s doesn\'t seem to be a valid episode that we are trying to snatch, ignoring' % title,
                            logger.DEBUG)
                        add_cache_entry = True

                if not add_cache_entry:
                    actual_season = parse_result.season_number
                    actual_episodes = parse_result.episode_numbers
            else:
                same_day_special = False

                if not parse_result.is_air_by_date:
                    logger.log(
                        u'This is supposed to be a date search but the result %s didn\'t parse as one, skipping it' % title,
                        logger.DEBUG)
                    add_cache_entry = True
                else:
                    air_date = parse_result.air_date.toordinal()
                    db = DBConnection()
                    sql_results = db.select(
                        'SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?',
                        [show_object.indexerid, air_date]
                    )

                    if len(sql_results) == 2:
                        if int(sql_results[0]['season']) == 0 and int(sql_results[1]['season']) != 0:
                            actual_season = int(sql_results[1]['season'])
                            actual_episodes = [int(sql_results[1]['episode'])]
                            same_day_special = True
                        elif int(sql_results[1]['season']) == 0 and int(sql_results[0]['season']) != 0:
                            actual_season = int(sql_results[0]['season'])
                            actual_episodes = [int(sql_results[0]['episode'])]
                            same_day_special = True
                    elif len(sql_results) != 1:
                        logger.log(
                            u'Tried to look up the date for the episode %s but the database didn\'t give proper results, skipping it' % title,
                            logger.WARNING)
                        add_cache_entry = True

                if not add_cache_entry and not same_day_special:
                    actual_season = int(sql_results[0]['season'])
                    actual_episodes = [int(sql_results[0]['episode'])]

            if add_cache_entry:
                logger.log(u'Adding item from search to cache: %s' % title, logger.DEBUG)
                # pylint: disable=protected-access
                # Access to a protected member of a client class
                ci = self.cache._addCacheEntry(title, url, parse_result=parse_result)

                if ci is not None:
                    cl.append(ci)

                continue

            episode_wanted = True

            for episode_number in actual_episodes:
                if not show_object.wantEpisode(actual_season, episode_number, quality, manual_search,
                                               download_current_quality):
                    episode_wanted = False
                    break

            if not episode_wanted:
                logger.log(u'Ignoring result %s because we don\'t want an episode that is %s' % (
                    title, Quality.qualityStrings[quality]), logger.INFO)
                continue

            logger.log(u'Found result %s at %s' % (title, url), logger.DEBUG)

            episode_object = []
            for current_episode in actual_episodes:
                episode_object.append(show_object.getEpisode(actual_season, current_episode))

            result = self.get_result(episode_object)
            result.show = show_object
            result.url = url
            result.name = title
            result.quality = quality
            result.release_group = release_group
            result.version = version
            result.content = None
            result.size = self._get_size(item)

            if len(episode_object) == 1:
                episode_number = episode_object[0].episode
                logger.log(u'Single episode result.', logger.DEBUG)
            elif len(episode_object) > 1:
                episode_number = MULTI_EP_RESULT
                logger.log(u'Separating multi-episode result to check for later - result contains episodes: %s' % str(
                    parse_result.episode_numbers), logger.DEBUG)
            elif len(episode_object) == 0:
                episode_number = SEASON_RESULT
                logger.log(u'Separating full season result to check for later', logger.DEBUG)

            if episode_number not in results:
                results[episode_number] = [result]
            else:
                results[episode_number].append(result)

        if len(cl) > 0:
            # pylint: disable=protected-access
            # Access to a protected member of a client class
            db = self.cache._getDB()
            db.mass_action(cl)

        return results
Beispiel #14
0
    def get_coming_episodes(categories, sort, group, paused=sickbeard.COMING_EPS_DISPLAY_PAUSED):
        """
        :param categories: The categories of coming episodes. See ``ComingEpisodes.categories``
        :param sort: The sort to apply to the coming episodes. See ``ComingEpisodes.sorts``
        :param group: ``True`` to group the coming episodes by category, ``False`` otherwise
        :param paused: ``True`` to include paused shows, ``False`` otherwise
        :return: The list of coming episodes
        """

        categories = ComingEpisodes._get_categories(categories)
        sort = ComingEpisodes._get_sort(sort)

        today = date.today().toordinal()
        recently = (date.today() - timedelta(days=sickbeard.COMING_EPS_MISSED_RANGE)).toordinal()
        next_week = (date.today() + timedelta(days=7)).toordinal()

        db = DBConnection(row_type='dict')
        fields_to_select = ', '.join(
            ['airdate', 'airs', 'e.description as description', 'episode', 'imdb_id', 'e.indexer', 'indexer_id', 'name', 'network',
             'paused', 'quality', 'runtime', 'season', 'show_name', 'showid', 's.status']
        )

        sql_l = []
        for show_obj in sickbeard.showList:
            next_air_date = show_obj.nextEpisode()
            if next_air_date:
                sql_l.append(
                    [
                        'SELECT DISTINCT {0} '.format(fields_to_select) +
                        'FROM tv_episodes e, tv_shows s '
                        'WHERE showid = ? '
                        'AND airdate <= ? '
                        'AND airdate >= ? '
                        'AND s.indexer_id = e.showid '
                        'AND e.status IN (' + ','.join(['?'] * 2) + ')',
                        [show_obj.indexerid, next_air_date, recently, WANTED, UNAIRED]
                    ]
                )

        results = []
        for sql_i in sql_l:
            if results:
                results += db.select(*sql_i)
            else:
                results = db.select(*sql_i)

        for index, item in enumerate(results):
            results[index][b'localtime'] = sbdatetime.convert_to_setting(
                parse_date_time(item[b'airdate'], item[b'airs'], item[b'network']))

        results.sort(key=itemgetter(b'localtime'))
        results.sort(ComingEpisodes.sorts[sort])

        if not group:
            return results

        grouped_results = ComingEpisodes._get_categories_map(categories)

        for result in results:
            if result[b'paused'] and not paused:
                continue

            result[b'airs'] = str(result[b'airs']).replace('am', ' AM').replace('pm', ' PM').replace('  ', ' ')
            result[b'airdate'] = result[b'localtime'].toordinal()

            if result[b'airdate'] < today:
                category = 'missed'
            elif result[b'airdate'] >= next_week:
                category = 'later'
            elif result[b'airdate'] == today:
                category = 'today'
            else:
                category = 'soon'

            if len(categories) > 0 and category not in categories:
                continue

            if not result[b'network']:
                result[b'network'] = ''

            result[b'quality'] = get_quality_string(result[b'quality'])
            result[b'airs'] = sbdatetime.sbftime(result[b'localtime'], t_preset=timeFormat).lstrip('0').replace(' 0', ' ')
            result[b'weekday'] = 1 + result[b'localtime'].weekday()
            result[b'tvdbid'] = result[b'indexer_id']
            result[b'airdate'] = sbdatetime.sbfdate(result[b'localtime'], d_preset=dateFormat)
            result[b'localtime'] = result[b'localtime'].toordinal()

            grouped_results[category].append(result)

        return grouped_results
Beispiel #15
0
class History(object):
    date_format = '%Y%m%d%H%M%S'

    def __init__(self):
        self.db = DBConnection()

    def remove(self, toRemove):
        """
        Removes the selected history
        :param toRemove: Contains the properties of the log entries to remove
        """
        query = ''

        for item in toRemove:
            query = query + ' OR ' if query != '' else ''
            query = query + '(date IN ({0}) AND showid = {1} ' \
                            'AND season = {2} AND episode = {3})' \
                            .format(','.join(item['dates']), item['show_id'], \
                                    item['season'], item['episode'])

        self.db.action('DELETE FROM history WHERE ' + query)

    def clear(self):
        """
        Clear all the history
        """
        self.db.action('DELETE ' 'FROM history ' 'WHERE 1 = 1')

    def get(self, limit=100, action=None):
        """
        :param limit: The maximum number of elements to return
        :param action: The type of action to filter in the history. Either 'downloaded' or 'snatched'. Anything else or
                        no value will return everything (up to ``limit``)
        :return: The last ``limit`` elements of type ``action`` in the history
        """

        actions = History._get_actions(action)
        limit = History._get_limit(limit)

        common_sql = 'SELECT action, date, episode, provider, h.quality, resource, season, show_name, showid ' \
                     'FROM history h, tv_shows s ' \
                     'WHERE h.showid = s.indexer_id '
        filter_sql = 'AND action in (' + ','.join(['?'] * len(actions)) + ') '
        order_sql = 'ORDER BY date DESC '

        if limit == 0:
            if actions:
                results = self.db.select(common_sql + filter_sql + order_sql,
                                         actions)
            else:
                results = self.db.select(common_sql + order_sql)
        else:
            if actions:
                results = self.db.select(
                    common_sql + filter_sql + order_sql + 'LIMIT ?',
                    actions + [limit])
            else:
                results = self.db.select(common_sql + order_sql + 'LIMIT ?',
                                         [limit])

        data = []
        for result in results:
            data.append({
                'action': result[b'action'],
                'date': result[b'date'],
                'episode': result[b'episode'],
                'provider': result[b'provider'],
                'quality': result[b'quality'],
                'resource': result[b'resource'],
                'season': result[b'season'],
                'show_id': result[b'showid'],
                'show_name': result[b'show_name']
            })

        return data

    def trim(self):
        """
        Remove all elements older than 30 days from the history
        """

        self.db.action('DELETE '
                       'FROM history '
                       'WHERE date < ?',
                       [(datetime.today() - timedelta(days=30)).strftime(
                           History.date_format)])

    @staticmethod
    def _get_actions(action):
        action = action.lower() if isinstance(action, six.string_types) else ''

        if action == 'downloaded':
            return Quality.DOWNLOADED

        if action == 'snatched':
            return Quality.SNATCHED

        return []

    @staticmethod
    def _get_limit(limit):
        limit = try_int(limit, 0)

        return max(limit, 0)
    def find_search_results(self, show, episodes, search_mode, forced_search=False,
                            download_current_quality=False, manual_search=False,
                            manual_search_type='episode'):  # pylint: disable=too-many-branches,too-many-arguments,too-many-locals,too-many-statements
        self._check_auth()
        self.show = show

        results = {}
        items_list = []
        searched_scene_season = None

        for episode in episodes:
            if not manual_search:
                cache_result = self.cache.searchCache(episode, forced_search=forced_search,
                                                      downCurQuality=download_current_quality)
                if cache_result:
                    if episode.episode not in results:
                        results[episode.episode] = cache_result
                    else:
                        results[episode.episode].extend(cache_result)

                    continue

            search_strings = []
            searched_scene_season = episode.scene_season

            if (len(episodes) > 1 or manual_search_type == 'season') and search_mode == 'sponly':
                search_strings = self._get_season_search_strings(episode)
            elif search_mode == 'eponly':
                search_strings = self._get_episode_search_strings(episode)

            for search_string in search_strings:
                # Find results from the provider
                items_list += self.search(search_string, ep_obj=episode)

        if len(results) == len(episodes):
            return results

        if items_list:
            # categorize the items into lists by quality
            items = defaultdict(list)
            for item in items_list:
                items[self.get_quality(item, anime=show.is_anime)].append(item)

            # temporarily remove the list of items with unknown quality
            unknown_items = items.pop(Quality.UNKNOWN, [])

            # make a generator to sort the remaining items by descending quality
            items_list = (items[quality] for quality in sorted(items, reverse=True))

            # unpack all of the quality lists into a single sorted list
            items_list = list(chain(*items_list))

            # extend the list with the unknown qualities, now sorted at the bottom of the list
            items_list.extend(unknown_items)

        cl = []

        for item in items_list:
            (title, url) = self._get_title_and_url(item)
            (seeders, leechers) = self._get_result_info(item)
            size = self._get_size(item)
            pubdate = self._get_pubdate(item)
            torrent_hash = self._get_hash(item)

            try:
                parse_result = NameParser(parse_method=('normal', 'anime')[show.is_anime]).parse(title)
            except (InvalidNameException, InvalidShowException) as error:
                logger.log(u"{error}".format(error=error), logger.DEBUG)
                continue

            show_object = parse_result.show
            quality = parse_result.quality
            release_group = parse_result.release_group
            version = parse_result.version
            add_cache_entry = False

            if not manual_search:
                if not (show_object.air_by_date or show_object.sports):
                    if search_mode == 'sponly':
                        if parse_result.episode_numbers:
                            logger.log(
                                'This is supposed to be a season pack search but the result %s is not a valid season pack, skipping it' % title,
                                logger.DEBUG
                            )
                            add_cache_entry = True
                        elif not [ep for ep in episodes if parse_result.season_number == (ep.season, ep.scene_season)[ep.show.is_scene]]:
                            logger.log(
                                'This season result %s is for a season we are not searching for, skipping it' % title,
                                logger.DEBUG
                            )
                            add_cache_entry = True

                    else:
                        if not all([
                            # pylint: disable=bad-continuation
                            parse_result.season_number is not None,
                            parse_result.episode_numbers,
                            [ep for ep in episodes if (ep.season, ep.scene_season)[ep.show.is_scene] ==
                             parse_result.season_number and
                             (ep.episode, ep.scene_episode)[ep.show.is_scene] in
                             parse_result.episode_numbers]
                        ]):

                            logger.log(
                                'The result %s doesn\'t seem to match an episode that we are currently trying to snatch, skipping it' % title,
                                logger.DEBUG)
                            add_cache_entry = True

                    if not add_cache_entry:
                        actual_season = parse_result.season_number
                        actual_episodes = parse_result.episode_numbers
                else:
                    same_day_special = False

                    if not parse_result.is_air_by_date:
                        logger.log(
                            'This is supposed to be a date search but the result %s didn\'t parse as one, skipping it' % title,
                            logger.DEBUG)
                        add_cache_entry = True
                    else:
                        air_date = parse_result.air_date.toordinal()
                        db = DBConnection()
                        sql_results = db.select(
                            'SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?',
                            [show_object.indexerid, air_date]
                        )

                        if len(sql_results) == 2:
                            if int(sql_results[0][b'season']) == 0 and int(sql_results[1][b'season']) != 0:
                                actual_season = int(sql_results[1][b'season'])
                                actual_episodes = [int(sql_results[1][b'episode'])]
                                same_day_special = True
                            elif int(sql_results[1][b'season']) == 0 and int(sql_results[0][b'season']) != 0:
                                actual_season = int(sql_results[0][b'season'])
                                actual_episodes = [int(sql_results[0][b'episode'])]
                                same_day_special = True
                        elif len(sql_results) != 1:
                            logger.log(
                                'Tried to look up the date for the episode %s but the database didn\'t give proper results, skipping it' % title,
                                logger.WARNING)
                            add_cache_entry = True

                    if not add_cache_entry and not same_day_special:
                        actual_season = int(sql_results[0][b'season'])
                        actual_episodes = [int(sql_results[0][b'episode'])]
            else:
                actual_season = parse_result.season_number
                actual_episodes = parse_result.episode_numbers

            if add_cache_entry:
                logger.log('Adding item from search to cache: %s' % title, logger.DEBUG)
                # pylint: disable=protected-access
                # Access to a protected member of a client class
                ci = self.cache._addCacheEntry(title, url, seeders, leechers, size, pubdate, torrent_hash)

                if ci is not None:
                    cl.append(ci)

                continue

            episode_wanted = True

            if not manual_search:
                for episode_number in actual_episodes:
                    if not show_object.wantEpisode(actual_season, episode_number, quality, forced_search,
                                                   download_current_quality):
                        episode_wanted = False
                        break

                if not episode_wanted:
                    logger.log('Ignoring result %s.' % (title), logger.DEBUG)
                    continue

            logger.log('Found result %s at %s' % (title, url), logger.DEBUG)

            episode_object = []
            for current_episode in actual_episodes:
                episode_object.append(show_object.getEpisode(actual_season, current_episode))

            result = self.get_result(episode_object)
            result.show = show_object
            result.url = url
            result.seeders = seeders
            result.leechers = leechers
            result.name = title
            result.quality = quality
            result.release_group = release_group
            result.version = version
            result.content = None
            result.size = self._get_size(item)
            result.pubdate = self._get_pubdate(item)
            result.hash = self._get_hash(item)

            if not episode_object:
                episode_number = SEASON_RESULT
                logger.log('Separating full season result to check for later', logger.DEBUG)
            elif len(episode_object) == 1:
                episode_number = episode_object[0].episode
                logger.log('Single episode result.', logger.DEBUG)
            else:
                episode_number = MULTI_EP_RESULT
                logger.log('Separating multi-episode result to check for later - result contains episodes: %s' % str(
                    parse_result.episode_numbers), logger.DEBUG)

            if episode_number not in results:
                results[episode_number] = [result]
            else:
                results[episode_number].append(result)

        if cl:
            # pylint: disable=protected-access
            # Access to a protected member of a client class
            db = self.cache._get_db()
            db.mass_action(cl)

        return results
Beispiel #17
0
    def find_search_results(
            self,
            show,
            episodes,
            search_mode,  # pylint: disable=too-many-branches,too-many-arguments,too-many-locals,too-many-statements
            manual_search=False,
            download_current_quality=False):
        self._check_auth()
        self.show = show

        results = {}
        items_list = []
        searched_scene_season = None

        for episode in episodes:
            cache_result = self.cache.search_cache(
                episode,
                manual_search=manual_search,
                down_cur_quality=download_current_quality)
            if cache_result:
                if episode.episode not in results:
                    results[episode.episode] = cache_result
                else:
                    results[episode.episode].extend(cache_result)

                continue

            if len(
                    episodes
            ) > 1 and search_mode == 'sponly' and searched_scene_season == episode.scene_season:
                continue

            search_strings = []
            searched_scene_season = episode.scene_season

            if len(episodes) > 1 and search_mode == 'sponly':
                search_strings = self.get_season_search_strings(episode)
            elif search_mode == 'eponly':
                search_strings = self.get_episode_search_strings(episode)

            for search_string in search_strings:
                items_list += self.search(search_string, ep_obj=episode)

        if len(results) == len(episodes):
            return results

        if items_list:
            items = {}
            unknown_items = []

            for item in items_list:
                quality = self.get_quality(item, anime=show.is_anime)

                if quality == Quality.UNKNOWN:
                    unknown_items.append(item)
                elif quality == Quality.NONE:
                    pass  # Skipping an HEVC when HEVC is not allowed by settings
                else:
                    if quality not in items:
                        items[quality] = []
                    items[quality].append(item)

            items_list = list(
                chain(*[
                    v for (k_, v) in sorted(six.iteritems(items), reverse=True)
                ]))
            items_list += unknown_items

        cl = []

        for item in items_list:
            (title, url) = self._get_title_and_url(item)

            try:
                parse_result = NameParser(
                    parse_method=('normal',
                                  'anime')[show.is_anime]).parse(title)
            except (InvalidNameException, InvalidShowException) as error:
                logger.log("{0}".format(error), logger.DEBUG)
                continue

            show_object = parse_result.show
            quality = parse_result.quality
            release_group = parse_result.release_group
            version = parse_result.version
            add_cache_entry = False

            if not (show_object.air_by_date or show_object.sports):
                if search_mode == 'sponly':
                    if parse_result.episode_numbers:
                        logger.log(
                            'This is supposed to be a season pack search but the result {0} is not a valid season pack, skipping it'
                            .format(title), logger.DEBUG)
                        add_cache_entry = True
                    elif not [
                            ep
                            for ep in episodes if parse_result.season_number ==
                        (ep.season, ep.scene_season)[ep.show.is_scene]
                    ]:
                        logger.log(
                            'This season result {0} is for a season we are not searching for, skipping it'
                            .format(title), logger.DEBUG)
                        add_cache_entry = True

                else:
                    if not all([
                            # pylint: disable=bad-continuation
                            parse_result.season_number is not None,
                            parse_result.episode_numbers,
                        [
                            ep for ep in episodes if
                            (ep.season, ep.scene_season)[ep.show.is_scene] ==
                            (parse_result.season_number,
                             parse_result.scene_season)[ep.show.is_scene] and
                            (ep.episode, ep.scene_episode)[ep.show.is_scene] in
                            parse_result.episode_numbers
                        ]
                    ]) and not all([
                            # fallback for anime on absolute numbering
                            parse_result.is_anime,
                            parse_result.ab_episode_numbers is not None,
                        [
                            ep for ep in episodes if ep.show.is_anime and ep.
                            absolute_number in parse_result.ab_episode_numbers
                        ]
                    ]):

                        logger.log(
                            'The result {0} doesn\'t seem to match an episode that we are currently trying to snatch, skipping it'
                            .format(title), logger.DEBUG)
                        add_cache_entry = True

                if not add_cache_entry:
                    actual_season = parse_result.season_number
                    actual_episodes = parse_result.episode_numbers
            else:
                same_day_special = False

                if not parse_result.is_air_by_date:
                    logger.log(
                        'This is supposed to be a date search but the result {0} didn\'t parse as one, skipping it'
                        .format(title), logger.DEBUG)
                    add_cache_entry = True
                else:
                    air_date = parse_result.air_date.toordinal()
                    db = DBConnection()
                    sql_results = db.select(
                        'SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?',
                        [show_object.indexerid, air_date])

                    if len(sql_results) == 2:
                        if int(sql_results[0][b'season']) == 0 and int(
                                sql_results[1][b'season']) != 0:
                            actual_season = int(sql_results[1][b'season'])
                            actual_episodes = [int(sql_results[1][b'episode'])]
                            same_day_special = True
                        elif int(sql_results[1][b'season']) == 0 and int(
                                sql_results[0][b'season']) != 0:
                            actual_season = int(sql_results[0][b'season'])
                            actual_episodes = [int(sql_results[0][b'episode'])]
                            same_day_special = True
                    elif len(sql_results) != 1:
                        logger.log(
                            'Tried to look up the date for the episode {0} but the database didn\'t give proper results, skipping it'
                            .format(title), logger.WARNING)
                        add_cache_entry = True

                if not add_cache_entry and not same_day_special:
                    actual_season = int(sql_results[0][b'season'])
                    actual_episodes = [int(sql_results[0][b'episode'])]

            if add_cache_entry:
                logger.log(
                    'Adding item from search to cache: {0}'.format(title),
                    logger.DEBUG)
                # pylint: disable=protected-access
                # Access to a protected member of a client class
                ci = self.cache._add_cache_entry(title,
                                                 url,
                                                 parse_result=parse_result)

                if ci is not None:
                    cl.append(ci)

                continue

            episode_wanted = True

            for episode_number in actual_episodes:
                if not show_object.wantEpisode(actual_season, episode_number,
                                               quality, manual_search,
                                               download_current_quality):
                    episode_wanted = False
                    break

            if not episode_wanted:
                logger.log('Ignoring result {0}.'.format(title), logger.DEBUG)
                continue

            logger.log('Found result {0} at {1}'.format(title, url),
                       logger.DEBUG)

            episode_object = []
            for current_episode in actual_episodes:
                episode_object.append(
                    show_object.getEpisode(actual_season, current_episode))

            result = self.get_result(episode_object)
            result.show = show_object
            result.url = url
            result.name = title
            result.quality = quality
            result.release_group = release_group
            result.version = version
            result.content = None
            result.size = self._get_size(item)

            if len(episode_object) == 1:
                episode_number = episode_object[0].episode
                logger.log('Single episode result.', logger.DEBUG)
            elif len(episode_object) > 1:
                episode_number = MULTI_EP_RESULT
                logger.log(
                    'Separating multi-episode result to check for later - result contains episodes: {0}'
                    .format(parse_result.episode_numbers), logger.DEBUG)
            elif len(episode_object) == 0:
                episode_number = SEASON_RESULT
                logger.log('Separating full season result to check for later',
                           logger.DEBUG)

            if episode_number not in results:
                results[episode_number] = [result]
            else:
                results[episode_number].append(result)

        if cl:
            # pylint: disable=protected-access
            # Access to a protected member of a client class
            cache_db = self.cache._get_db()
            cache_db.mass_action(cl)

        return results
Beispiel #18
0
class History(object):
    date_format = '%Y%m%d%H%M%S'

    def __init__(self):
        self.db = DBConnection()

    def clear(self):
        """
        Clear all the history
        """
        self.db.action(
            'DELETE '
            'FROM history '
            'WHERE 1 = 1'
        )

    def get(self, limit=100, action=None):
        """
        :param limit: The maximum number of elements to return
        :param action: The type of action to filter in the history. Either 'downloaded' or 'snatched'. Anything else or
                        no value will return everything (up to ``limit``)
        :return: The last ``limit`` elements of type ``action`` in the history
        """

        # TODO: Make this a generator instead
        # TODO: Split compact and detailed into separate methods
        # TODO: Add a date limit as well
        # TODO: Clean up history.mako

        actions = History._get_actions(action)
        limit = max(try_int(limit), 0)

        common_sql = 'SELECT show_name, showid, season, episode, h.quality, ' \
                     'action, provider, resource, date ' \
                     'FROM history h, tv_shows s ' \
                     'WHERE h.showid = s.indexer_id '
        filter_sql = 'AND action in (' + ','.join(['?'] * len(actions)) + ') '
        order_sql = 'ORDER BY date DESC '

        if actions:
            sql_results = self.db.select(common_sql + filter_sql + order_sql,
                                         actions)
        else:
            sql_results = self.db.select(common_sql + order_sql)

        detailed = []
        compact = dict()

        # TODO: Convert to a defaultdict and compact items as needed
        # TODO: Convert to using operators to combine items
        for row in sql_results:
            row = History.Item(*row)
            if row.index in compact:
                compact[row.index].actions.append(row.cur_action)
            elif not limit or len(compact) < limit:
                detailed.append(row)
                compact[row.index] = row.compacted()

        results = namedtuple('results', ['detailed', 'compact'])
        return results(detailed, compact.values())

    def trim(self, days=30):
        """
        Remove expired elements from history

        :param days: number of days to keep
        """
        date = datetime.today() - timedelta(days)
        self.db.action(
            'DELETE '
            'FROM history '
            'WHERE date < ?',
            [date.strftime(History.date_format)]
        )

    @staticmethod
    def _get_actions(action):
        action = action.lower() if isinstance(action, (str, unicode)) else ''

        result = None
        if action == 'downloaded':
            result = Quality.DOWNLOADED
        elif action == 'snatched':
            result = Quality.SNATCHED

        return result or []

    action_fields = ('action', 'provider', 'resource', 'date', )
    # A specific action from history
    Action = namedtuple('Action', action_fields)
    Action.width = len(action_fields)

    index_fields = ('show_id', 'season', 'episode', 'quality', )
    # An index for an item or compact item from history
    Index = namedtuple('Index', index_fields)
    Index.width = len(index_fields)

    compact_fields = ('show_name', 'index', 'actions', )
    # Related items compacted with a list of actions from history
    CompactItem = namedtuple('CompactItem', compact_fields)

    item_fields = tuple(  # make it a tuple so its immutable
        ['show_name'] + list(index_fields) + list(action_fields)
    )

    class Item(namedtuple('Item', item_fields)):
        # TODO: Allow items to be added to a compact item
        """
        An individual row item from history
        """
        # prevent creation of a __dict__ when subclassing
        # from a class that uses __slots__
        __slots__ = ()

        @property
        def index(self):
            """
            Create a look-up index for the item
            """
            return History.Index(
                self.show_id,
                self.season,
                self.episode,
                self.quality,
            )

        @property
        def cur_action(self):
            """
            Create the current action from action_fields
            """
            return History.Action(
                self.action,
                self.provider,
                self.resource,
                self.date,
            )

        def compacted(self):
            """
            Create a CompactItem

            :returns: the current item in compact form
            """
            result = History.CompactItem(
                self.show_name,
                self.index,
                [self.cur_action],  # actions
            )
            return result

        def __add__(self, other):
            """
            Combines two history items with the same index

            :param other: The other item to add
            :returns: a compact item with elements from both items
            :raises AssertionError: if indexes do not match
            """
            # Index comparison and validation is done by _radd_
            return self.compacted() + other

        def __radd__(self, other):
            """
            Adds a history item to a compact item

            :param other: The compact item to append
            :returns: the updated compact item
            :raises AssertionError: if indexes do not match
            """
            if self.index == other.index:
                other.actions.append(self.cur_action)
                return other
            else:
                raise AssertionError('cannot add items with different indexes')
Beispiel #19
0
class History:
    date_format = '%Y%m%d%H%M%S'

    def __init__(self):
        self.db = DBConnection()

    def clear(self):
        self.db.action(
            'DELETE '
            'FROM history '
            'WHERE 1 = 1'
        )

    def get(self, limit=100, action=None):
        action = action.lower() if isinstance(action, str) else ''
        limit = int(limit)

        if action == 'downloaded':
            actions = Quality.DOWNLOADED
        elif action == 'snatched':
            actions = Quality.SNATCHED
        else:
            actions = Quality.SNATCHED + Quality.DOWNLOADED

        if limit == 0:
            results = self.db.select(
                'SELECT h.*, show_name '
                'FROM history h, tv_shows s '
                'WHERE h.showid = s.indexer_id '
                'AND action in (' + ','.join(['?'] * len(actions)) + ') '
                                                                     'ORDER BY date DESC',
                actions
            )
        else:
            results = self.db.select(
                'SELECT h.*, show_name '
                'FROM history h, tv_shows s '
                'WHERE h.showid = s.indexer_id '
                'AND action in (' + ','.join(['?'] * len(actions)) + ') '
                                                                     'ORDER BY date DESC '
                                                                     'LIMIT ?',
                actions + [limit]
            )

        data = []
        for result in results:
            data.append({
                'action': result['action'],
                'date': result['date'],
                'episode': result['episode'],
                'provider': result['provider'],
                'quality': result['quality'],
                'resource': result['resource'],
                'season': result['season'],
                'show_id': result['showid'],
                'show_name': result['show_name']
            })

        return data

    def trim(self):
        self.db.action(
            'DELETE '
            'FROM history '
            'WHERE date < ?',
            [(datetime.today() - timedelta(days=30)).strftime(History.date_format)]
        )
Beispiel #20
0
 def __init__(self):
     self.db = DBConnection()
    def get_coming_episodes(categories, sort, group, paused=sickbeard.COMING_EPS_DISPLAY_PAUSED):
        """
        :param categories: The categories of coming episodes. See ``ComingEpisodes.categories``
        :param sort: The sort to apply to the coming episodes. See ``ComingEpisodes.sorts``
        :param group: ``True`` to group the coming episodes by category, ``False`` otherwise
        :param paused: ``True`` to include paused shows, ``False`` otherwise
        :return: The list of coming episodes
        """

        categories = ComingEpisodes._get_categories(categories)
        sort = ComingEpisodes._get_sort(sort)

        today = date.today().toordinal()
        next_week = (date.today() + timedelta(days=7)).toordinal()
        recently = (date.today() - timedelta(days=sickbeard.COMING_EPS_MISSED_RANGE)).toordinal()
        qualities_list = Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER + Quality.ARCHIVED + [IGNORED]

        db = DBConnection()
        fields_to_select = ', '.join(
            ['airdate', 'airs', 'description', 'episode', 'imdb_id', 'e.indexer', 'indexer_id', 'name', 'network',
             'paused', 'quality', 'runtime', 'season', 'show_name', 'showid', 's.status']
        )
        results = db.select(
            'SELECT %s ' % fields_to_select +
            'FROM tv_episodes e, tv_shows s '
            'WHERE season != 0 '
            'AND airdate >= ? '
            'AND airdate < ? '
            'AND s.indexer_id = e.showid '
            'AND e.status NOT IN (' + ','.join(['?'] * len(qualities_list)) + ')',
            [today, next_week] + qualities_list
        )

        done_shows_list = [int(result['showid']) for result in results]
        placeholder = ','.join(['?'] * len(done_shows_list))
        placeholder2 = ','.join(['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER))

        results += db.select(
            'SELECT %s ' % fields_to_select +
            'FROM tv_episodes e, tv_shows s '
            'WHERE season != 0 '
            'AND showid NOT IN (' + placeholder + ') '
                                                  'AND s.indexer_id = e.showid '
                                                  'AND airdate = (SELECT airdate '
                                                  'FROM tv_episodes inner_e '
                                                  'WHERE inner_e.season != 0 '
                                                  'AND inner_e.showid = e.showid '
                                                  'AND inner_e.airdate >= ? '
                                                  'ORDER BY inner_e.airdate ASC LIMIT 1) '
                                                  'AND e.status NOT IN (' + placeholder2 + ')',
            done_shows_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER
        )

        results += db.select(
            'SELECT %s ' % fields_to_select +
            'FROM tv_episodes e, tv_shows s '
            'WHERE season != 0 '
            'AND s.indexer_id = e.showid '
            'AND airdate < ? '
            'AND airdate >= ? '
            'AND e.status IN (?,?) '
            'AND e.status NOT IN (' + ','.join(['?'] * len(qualities_list)) + ')',
            [today, recently, WANTED, UNAIRED] + qualities_list
        )

        results = [dict(result) for result in results]

        for index, item in enumerate(results):
            results[index]['localtime'] = sbdatetime.convert_to_setting(
                parse_date_time(item['airdate'], item['airs'], item['network']))

        results.sort(ComingEpisodes.sorts[sort])

        if not group:
            return results

        grouped_results = ComingEpisodes._get_categories_map(categories)

        for result in results:
            if result['paused'] and not paused:
                continue

            result['airs'] = str(result['airs']).replace('am', ' AM').replace('pm', ' PM').replace('  ', ' ')
            result['airdate'] = result['localtime'].toordinal()

            if result['airdate'] < today:
                category = 'missed'
            elif result['airdate'] >= next_week:
                category = 'later'
            elif result['airdate'] == today:
                category = 'today'
            else:
                category = 'soon'

            if len(categories) > 0 and category not in categories:
                continue

            if not result['network']:
                result['network'] = ''

            result['quality'] = get_quality_string(result['quality'])
            result['airs'] = sbdatetime.sbftime(result['localtime'], t_preset=timeFormat).lstrip('0').replace(' 0', ' ')
            result['weekday'] = 1 + date.fromordinal(result['airdate']).weekday()
            result['tvdbid'] = result['indexer_id']
            result['airdate'] = sbdatetime.sbfdate(result['localtime'], d_preset=dateFormat)
            result['localtime'] = result['localtime'].toordinal()

            grouped_results[category].append(result)

        return grouped_results