예제 #1
0
    def map_fields(queue_item):
        """Translate fields with Enums."""
        if queue_item.get('priority') is not None:
            if queue_item['priority'] == QueuePriorities.LOW:
                priority = 'low'
            elif queue_item['priority'] == QueuePriorities.NORMAL:
                priority = 'normal'
            elif queue_item['priority'] == QueuePriorities.HIGH:
                priority = 'high'
            else:
                priority = queue_item['priority']
            queue_item['priority'] = priority

        if queue_item.get('updateTime') is not None:
            dt = datetime.datetime.strptime(queue_item['updateTime'],
                                            '%Y-%m-%d %H:%M:%S.%f')
            queue_item['updateTime'] = sbdatetime.convert_to_setting(
                dt.replace(microsecond=0)).isoformat()

        if queue_item.get('startTime') is not None:
            dt = datetime.datetime.strptime(queue_item['startTime'],
                                            '%Y-%m-%d %H:%M:%S.%f')
            queue_item['startTime'] = sbdatetime.convert_to_setting(
                dt.replace(microsecond=0)).isoformat()

        return queue_item
예제 #2
0
파일: utils.py 프로젝트: swipswaps/Medusa
def _scheduler_to_json(key, name, scheduler):
    scheduler = getattr(app, scheduler)
    if not scheduler:
        # Not initialized
        return {
            'key': key,
            'name': name,
        }

    return {
        'key':
        key,
        'name':
        name,
        'isAlive':
        scheduler.is_alive(),
        'isEnabled':
        _is_enabled(key, scheduler),
        'isActive':
        _is_active(key, scheduler),
        'startTime':
        scheduler.start_time.isoformat() if scheduler.start_time else None,
        'cycleTime':
        int(scheduler.cycleTime.total_seconds()) or None,
        'nextRun':
        int(scheduler.timeLeft().total_seconds())
        if scheduler.enable else None,
        'lastRun':
        sbdatetime.convert_to_setting(
            scheduler.lastRun.replace(microsecond=0)).isoformat(),
        'isSilent':
        bool(scheduler.silent),
        'queueLength':
        _queue_length(key, scheduler),
    }
예제 #3
0
def _queued_show_to_json(item):
    try:
        show_slug = item.show.slug
    except AttributeError:
        show_slug = None

    show_title = None
    show_dir = None
    try:
        show_title = item.show.name
    except AttributeError:
        if item.action_id == ShowQueueActions.ADD:
            show_dir = item.show_dir

    if item.priority == QueuePriorities.LOW:
        priority = 'low'
    elif item.priority == QueuePriorities.NORMAL:
        priority = 'normal'
    elif item.priority == QueuePriorities.HIGH:
        priority = 'high'
    else:
        priority = item.priority

    return {
        'showSlug':
        show_slug,
        'showTitle':
        show_title,
        'showDir':
        show_dir,
        'inProgress':
        bool(item.inProgress),
        'priority':
        priority,
        'added':
        sbdatetime.convert_to_setting(
            item.added.replace(microsecond=0)).isoformat(),
        'actionId':
        item.action_id,
        'queueType':
        ShowQueueActions.names[item.action_id],
    }
예제 #4
0
    def resource_get_episode_backlog(self):
        """Collect backlog search information for each show."""
        status = self.get_argument('status' '').strip()
        period = self.get_argument('period', '').strip()

        available_backlog_status = {
            'all': [Overview.QUAL, Overview.WANTED],
            'quality': [Overview.QUAL],
            'wanted': [Overview.WANTED]
        }

        available_backlog_periods = {
            'all': None,
            'one_day': datetime.timedelta(days=1),
            'three_days': datetime.timedelta(days=3),
            'one_week': datetime.timedelta(days=7),
            'one_month': datetime.timedelta(days=30),
        }

        if status not in available_backlog_status:
            return self._bad_request(
                "allowed status values are: 'all', 'quality' and 'wanted'")

        if period not in available_backlog_periods:
            return self._bad_request(
                "allowed period values are: 'all', 'one_day', 'three_days', 'one_week' and 'one_month'"
            )

        backlog_status = available_backlog_status.get(status)
        backlog_period = available_backlog_periods.get(period)

        main_db_con = db.DBConnection()
        results = []

        for cur_show in app.showList:
            if cur_show.paused:
                continue

            ep_counts = {
                'wanted': 0,
                'allowed': 0,
            }
            ep_cats = {}

            sql_results = main_db_con.select(
                """
                SELECT e.status, e.quality, e.season,
                e.episode, e.name, e.airdate, e.manually_searched
                FROM tv_episodes as e
                WHERE e.season IS NOT NULL AND
                      e.indexer = ? AND e.showid = ?
                ORDER BY e.season DESC, e.episode DESC
                """, [cur_show.indexer, cur_show.series_id])

            filtered_episodes = []
            for cur_result in sql_results:
                cur_ep_cat = cur_show.get_overview(
                    cur_result['status'],
                    cur_result['quality'],
                    backlog_mode=True,
                    manually_searched=cur_result['manually_searched'])
                if cur_ep_cat:
                    if cur_ep_cat in backlog_status and cur_result[
                            'airdate'] != 1:
                        air_date = datetime.datetime.fromordinal(
                            cur_result['airdate'])
                        if air_date.year >= 1970 or cur_show.network:
                            air_date = sbdatetime.convert_to_setting(
                                network_timezones.parse_date_time(
                                    cur_result['airdate'], cur_show.airs,
                                    cur_show.network))
                            if backlog_period and air_date < datetime.datetime.now(
                                    app_timezone) - backlog_period:
                                continue
                        else:
                            air_date = None
                        episode_string = u'{ep}'.format(
                            ep=(episode_num(cur_result['season'],
                                            cur_result['episode'])
                                or episode_num(cur_result['season'],
                                               cur_result['episode'],
                                               numbering='absolute')))
                        cur_ep_cat_string = Overview.overviewStrings[
                            cur_ep_cat]
                        ep_cats[episode_string] = cur_ep_cat_string
                        ep_counts[cur_ep_cat_string] += 1
                        cur_result['airdate'] = air_date.isoformat('T')
                        cur_result['manuallySearched'] = cur_result[
                            'manually_searched']
                        del cur_result['manually_searched']
                        cur_result['statusString'] = statusStrings[
                            cur_result['status']]
                        cur_result['qualityString'] = Quality.qualityStrings[
                            cur_result['quality']]

                        cur_result['slug'] = episode_string
                        filtered_episodes.append(cur_result)

            if filtered_episodes:
                results.append({
                    'slug': cur_show.identifier.slug,
                    'name': cur_show.name,
                    'quality': cur_show.quality,
                    'episodeCount': ep_counts,
                    'category': ep_cats,
                    'episodes': filtered_episodes
                })

        return self._ok(data=results)
예제 #5
0
파일: manual.py 프로젝트: steflavoie/Medusa
def get_provider_cache_results(indexer,
                               show_all_results=None,
                               perform_search=None,
                               show=None,
                               season=None,
                               episode=None,
                               manual_search_type=None,
                               **search_show):
    """Check all provider cache tables for search results."""
    down_cur_quality = 0
    show_obj = Show.find(app.showList, int(show))
    preferred_words = show_obj.show_words().preferred_words
    undesired_words = show_obj.show_words().undesired_words
    ignored_words = show_obj.show_words().ignored_words
    required_words = show_obj.show_words().required_words

    main_db_con = db.DBConnection('cache.db')

    provider_results = {
        'last_prov_updates': {},
        'error': {},
        'found_items': []
    }
    original_thread_name = threading.currentThread().name

    sql_total = []
    combined_sql_q = []
    combined_sql_params = []

    for cur_provider in enabled_providers('manualsearch'):
        threading.currentThread().name = '{thread} :: [{provider}]'.format(
            thread=original_thread_name, provider=cur_provider.name)

        # Let's check if this provider table already exists
        table_exists = main_db_con.select(
            b"SELECT name "
            b"FROM sqlite_master "
            b"WHERE type='table'"
            b" AND name=?", [cur_provider.get_id()])
        columns = [
            i[1] for i in main_db_con.select("PRAGMA table_info('{0}')".format(
                cur_provider.get_id()))
        ] if table_exists else []
        minseed = int(cur_provider.minseed) if getattr(cur_provider, 'minseed',
                                                       None) else -1
        minleech = int(cur_provider.minleech) if getattr(
            cur_provider, 'minleech', None) else -1

        # TODO: the implicit sqlite rowid is used, should be replaced with an explicit PK column
        # If table doesn't exist, start a search to create table and new columns seeders, leechers and size
        required_columns = ['seeders', 'leechers', 'size', 'proper_tags']
        if table_exists and all(required_column in columns
                                for required_column in required_columns):
            # The default sql, that's executed for each providers cache table
            common_sql = (
                b"SELECT rowid, ? AS 'provider_type', ? AS 'provider_image',"
                b" ? AS 'provider', ? AS 'provider_id', ? 'provider_minseed',"
                b" ? 'provider_minleech', name, season, episodes, indexerid,"
                b" url, time, proper_tags, quality, release_group, version,"
                b" seeders, leechers, size, time, pubdate "
                b"FROM '{provider_id}' "
                b"WHERE indexerid = ? AND quality > 0 ".format(
                    provider_id=cur_provider.get_id()))

            # Let's start by adding the default parameters, which are used to subsitute the '?'s.
            add_params = [
                cur_provider.provider_type.title(),
                cur_provider.image_name(), cur_provider.name,
                cur_provider.get_id(), minseed, minleech, show
            ]

            if manual_search_type != 'season':
                # If were not looking for all results, meaning don't do the filter on season + ep, add sql
                if not int(show_all_results):
                    # If it's an episode search, pass season and episode.
                    common_sql += " AND season = ? AND episodes LIKE ? "
                    add_params += [season, "%|{0}|%".format(episode)]

            else:
                # If were not looking for all results, meaning don't do the filter on season + ep, add sql
                if not int(show_all_results):
                    list_of_episodes = '{0}{1}'.format(
                        ' episodes LIKE ', ' AND episodes LIKE '.join(
                            ['?' for _ in show_obj.get_all_episodes(season)]))

                    common_sql += " AND season = ? AND (episodes LIKE ? OR {list_of_episodes})".format(
                        list_of_episodes=list_of_episodes)
                    add_params += [season,
                                   '||']  # When the episodes field is empty.
                    add_params += [
                        '%|{episode}|%'.format(episode=ep.episode)
                        for ep in show_obj.get_all_episodes(season)
                    ]

            # Add the created sql, to lists, that are used down below to perform one big UNIONED query
            combined_sql_q.append(common_sql)
            combined_sql_params += add_params

            # Get the last updated cache items timestamp
            last_update = main_db_con.select(
                b"SELECT max(time) AS lastupdate "
                b"FROM '{provider_id}'".format(
                    provider_id=cur_provider.get_id()))
            provider_results['last_prov_updates'][
                cur_provider.get_id()] = last_update[0][
                    'lastupdate'] if last_update[0]['lastupdate'] else 0

    # Check if we have the combined sql strings
    if combined_sql_q:
        sql_prepend = b"SELECT * FROM ("
        sql_append = b") ORDER BY CASE quality WHEN '{quality_unknown}' THEN -1 ELSE CAST(quality AS DECIMAL) END DESC, " \
                     b" proper_tags DESC, seeders DESC".format(quality_unknown=Quality.UNKNOWN)

        # Add all results
        sql_total += main_db_con.select(
            b'{0} {1} {2}'.format(sql_prepend,
                                  ' UNION ALL '.join(combined_sql_q),
                                  sql_append), combined_sql_params)

    # Always start a search when no items found in cache
    if not sql_total or int(perform_search):
        # retrieve the episode object and fail if we can't get one
        ep_obj = get_episode(show, season, episode)
        if isinstance(ep_obj, str):
            provider_results[
                'error'] = 'Something went wrong when starting the manual search for show {0}, \
            and episode: {1}x{2}'.format(show_obj.name, season, episode)

        # make a queue item for it and put it on the queue
        ep_queue_item = ForcedSearchQueueItem(ep_obj.series, [ep_obj],
                                              bool(int(down_cur_quality)),
                                              True, manual_search_type)  # pylint: disable=maybe-no-member

        app.forced_search_queue_scheduler.action.add_item(ep_queue_item)

        # give the CPU a break and some time to start the queue
        time.sleep(cpu_presets[app.CPU_PRESET])
    else:
        cached_results = [dict(row) for row in sql_total]
        for i in cached_results:
            i['quality_name'] = Quality.split_quality(int(i['quality']))
            i['time'] = datetime.fromtimestamp(i['time'])
            i['release_group'] = i['release_group'] or 'None'
            i['provider_img_link'] = 'images/providers/' + i[
                'provider_image'] or 'missing.png'
            i['provider'] = i['provider'] if i[
                'provider_image'] else 'missing provider'
            i['proper_tags'] = i['proper_tags'].replace('|', ', ')
            i['pretty_size'] = pretty_file_size(
                i['size']) if i['size'] > -1 else 'N/A'
            i['seeders'] = i['seeders'] if i['seeders'] >= 0 else '-'
            i['leechers'] = i['leechers'] if i['leechers'] >= 0 else '-'
            i['pubdate'] = sbdatetime.convert_to_setting(
                parser.parse(i['pubdate'])).strftime(
                    app.DATE_PRESET + ' ' +
                    app.TIME_PRESET) if i['pubdate'] else '-'
            release_group = i['release_group']
            if ignored_words and release_group in ignored_words:
                i['rg_highlight'] = 'ignored'
            elif required_words and release_group in required_words:
                i['rg_highlight'] = 'required'
            elif preferred_words and release_group in preferred_words:
                i['rg_highlight'] = 'preferred'
            elif undesired_words and release_group in undesired_words:
                i['rg_highlight'] = 'undesired'
            else:
                i['rg_highlight'] = ''
            if contains_at_least_one_word(i['name'], required_words):
                i['name_highlight'] = 'required'
            elif contains_at_least_one_word(
                    i['name'], ignored_words) or not filter_bad_releases(
                        i['name'], parse=False):
                i['name_highlight'] = 'ignored'
            elif contains_at_least_one_word(i['name'], undesired_words):
                i['name_highlight'] = 'undesired'
            elif contains_at_least_one_word(i['name'], preferred_words):
                i['name_highlight'] = 'preferred'
            else:
                i['name_highlight'] = ''
            i['seed_highlight'] = 'ignored' if i.get(
                'provider_minseed') > i.get('seeders', -1) >= 0 else ''
            i['leech_highlight'] = 'ignored' if i.get(
                'provider_minleech') > i.get('leechers', -1) >= 0 else ''
        provider_results['found_items'] = cached_results

    # Remove provider from thread name before return results
    threading.currentThread().name = original_thread_name

    # Sanitize the last_prov_updates key
    provider_results['last_prov_updates'] = json.dumps(
        provider_results['last_prov_updates'])
    return provider_results
예제 #6
0
    def get_coming_episodes(categories, sort, group, paused=app.COMING_EPS_DISPLAY_PAUSED):
        """
        :param categories: The categories of coming episodes. See ``ComingEpisodes.categories``
        :param sort: The sort to apply to the coming episodes. See ``ComingEpisodes.sorts``
        :param group: ``True`` to group the coming episodes by category, ``False`` otherwise
        :param paused: ``True`` to include paused shows, ``False`` otherwise
        :return: The list of coming episodes
        """
        categories = ComingEpisodes._get_categories(categories)
        sort = ComingEpisodes._get_sort(sort)

        today = date.today().toordinal()
        next_week = (date.today() + timedelta(days=7)).toordinal()
        recently = (date.today() - timedelta(days=app.COMING_EPS_MISSED_RANGE)).toordinal()
        qualities_list = Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER + Quality.ARCHIVED + [IGNORED]

        db = DBConnection()
        fields_to_select = ', '.join(
            ['airdate', 'airs', 'e.description as description', 'episode', 'imdb_id', 'e.indexer', 'indexer_id', 'name', 'network',
             'paused', 'quality', 'runtime', 'season', 'show_name', 'showid', 's.status']
        )
        results = db.select(
            'SELECT %s ' % fields_to_select +
            'FROM tv_episodes e, tv_shows s '
            'WHERE season != 0 '
            'AND airdate >= ? '
            'AND airdate < ? '
            'AND s.indexer = e.indexer '
            'AND s.indexer_id = e.showid '
            'AND e.status NOT IN (' + ','.join(['?'] * len(qualities_list)) + ')',
            [today, next_week] + qualities_list
        )

        done_shows_list = [int(result[b'showid']) for result in results]
        placeholder = ','.join(['?'] * len(done_shows_list))
        placeholder2 = ','.join(['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER))

        # FIXME: This inner join is not multi indexer friendly.
        results += db.select(
            'SELECT %s ' % fields_to_select +
            'FROM tv_episodes e, tv_shows s '
            'WHERE season != 0 '
            'AND showid NOT IN (' + placeholder + ') '
            'AND s.indexer_id = e.showid '
            'AND airdate = (SELECT airdate '
            'FROM tv_episodes inner_e '
            'WHERE inner_e.season != 0 '
            'AND inner_e.showid = e.showid '
            'AND inner_e.indexer = e.indexer '
            'AND inner_e.airdate >= ? '
            'ORDER BY inner_e.airdate ASC LIMIT 1) '
            'AND e.status NOT IN (' + placeholder2 + ')',
            done_shows_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER
        )

        results += db.select(
            'SELECT %s ' % fields_to_select +
            'FROM tv_episodes e, tv_shows s '
            'WHERE season != 0 '
            'AND s.indexer_id = e.showid '
            'AND airdate < ? '
            'AND airdate >= ? '
            'AND e.status IN (?,?) '
            'AND e.status NOT IN (' + ','.join(['?'] * len(qualities_list)) + ')',
            [today, recently, WANTED, UNAIRED] + qualities_list
        )

        results = [dict(result) for result in results]

        for index, item in enumerate(results):
            item['series_slug'] = str(SeriesIdentifier.from_id(int(item['indexer']), item['indexer_id']))
            results[index]['localtime'] = sbdatetime.convert_to_setting(
                parse_date_time(item['airdate'], item['airs'], item['network']))

        results.sort(ComingEpisodes.sorts[sort])

        if not group:
            return results

        grouped_results = ComingEpisodes._get_categories_map(categories)

        for result in results:
            if result['paused'] and not paused:
                continue

            result['airs'] = str(result['airs']).replace('am', ' AM').replace('pm', ' PM').replace('  ', ' ')
            result['airdate'] = result['localtime'].toordinal()

            if result['airdate'] < today:
                category = 'missed'
            elif result['airdate'] >= next_week:
                category = 'later'
            elif result['airdate'] == today:
                category = 'today'
            else:
                category = 'soon'

            if len(categories) > 0 and category not in categories:
                continue

            if not result['network']:
                result['network'] = ''

            result['quality'] = get_quality_string(result['quality'])
            result['airs'] = sbdatetime.sbftime(result['localtime'], t_preset=timeFormat).lstrip('0').replace(' 0', ' ')
            result['weekday'] = 1 + date.fromordinal(result['airdate']).weekday()
            result['tvdbid'] = result['indexer_id']
            result['airdate'] = sbdatetime.sbfdate(result['localtime'], d_preset=dateFormat)
            result['localtime'] = result['localtime'].toordinal()

            grouped_results[category].append(result)

        return grouped_results
예제 #7
0
    def get_coming_episodes(categories, sort, group, paused=app.COMING_EPS_DISPLAY_PAUSED):
        """
        :param categories: The categories of coming episodes. See ``ComingEpisodes.categories``
        :param sort: The sort to apply to the coming episodes. See ``ComingEpisodes.sorts``
        :param group: ``True`` to group the coming episodes by category, ``False`` otherwise
        :param paused: ``True`` to include paused shows, ``False`` otherwise
        :return: The list of coming episodes
        """
        categories = ComingEpisodes._get_categories(categories)
        sort = ComingEpisodes._get_sort(sort)

        today = date.today().toordinal()
        next_week = (date.today() + timedelta(days=7)).toordinal()
        recently = (date.today() - timedelta(days=app.COMING_EPS_MISSED_RANGE)).toordinal()
        status_list = [DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER,
                       ARCHIVED, IGNORED]

        db = DBConnection()
        fields_to_select = ', '.join(
            ['airdate', 'airs', 'e.description as description', 'episode', 'imdb_id', 'e.indexer',
             'indexer_id', 'name', 'network', 'paused', 's.quality', 'runtime', 'season', 'show_name',
             'showid', 's.status']
        )
        results = db.select(
            'SELECT %s ' % fields_to_select +
            'FROM tv_episodes e, tv_shows s '
            'WHERE season != 0 '
            'AND airdate >= ? '
            'AND airdate < ? '
            'AND s.indexer = e.indexer '
            'AND s.indexer_id = e.showid '
            'AND e.status NOT IN (' + ','.join(['?'] * len(status_list)) + ')',
            [today, next_week] + status_list
        )

        done_shows_list = [int(result['showid']) for result in results]
        placeholder = ','.join(['?'] * len(done_shows_list))
        placeholder2 = ','.join(['?'] * len([DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER]))

        # FIXME: This inner join is not multi indexer friendly.
        results += db.select(
            'SELECT %s ' % fields_to_select +
            'FROM tv_episodes e, tv_shows s '
            'WHERE season != 0 '
            'AND showid NOT IN (' + placeholder + ') '
            'AND s.indexer_id = e.showid '
            'AND airdate = (SELECT airdate '
            'FROM tv_episodes inner_e '
            'WHERE inner_e.season != 0 '
            'AND inner_e.showid = e.showid '
            'AND inner_e.indexer = e.indexer '
            'AND inner_e.airdate >= ? '
            'ORDER BY inner_e.airdate ASC LIMIT 1) '
            'AND e.status NOT IN (' + placeholder2 + ')',
            done_shows_list + [next_week] + [DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER]
        )

        results += db.select(
            'SELECT %s ' % fields_to_select +
            'FROM tv_episodes e, tv_shows s '
            'WHERE season != 0 '
            'AND s.indexer_id = e.showid '
            'AND airdate < ? '
            'AND airdate >= ? '
            'AND e.status IN (?,?) '
            'AND e.status NOT IN (' + ','.join(['?'] * len(status_list)) + ')',
            [today, recently, WANTED, UNAIRED] + status_list
        )

        for index, item in enumerate(results):
            item['series_slug'] = str(SeriesIdentifier.from_id(int(item['indexer']), item['indexer_id']))
            results[index]['localtime'] = sbdatetime.convert_to_setting(
                parse_date_time(item['airdate'], item['airs'], item['network']))

        results.sort(key=ComingEpisodes.sorts[sort])

        if not group:
            return results

        grouped_results = ComingEpisodes._get_categories_map(categories)

        for result in results:
            if result['paused'] and not paused:
                continue

            result['airs'] = str(result['airs']).replace('am', ' AM').replace('pm', ' PM').replace('  ', ' ')
            result['airdate'] = result['localtime'].toordinal()

            if result['airdate'] < today:
                category = 'missed'
            elif result['airdate'] >= next_week:
                category = 'later'
            elif result['airdate'] == today:
                category = 'today'
            else:
                category = 'soon'

            if len(categories) > 0 and category not in categories:
                continue

            if not result['network']:
                result['network'] = ''

            result['quality'] = get_quality_string(result['quality'])
            result['airs'] = sbdatetime.sbftime(result['localtime'], t_preset=timeFormat).lstrip('0').replace(' 0', ' ')
            result['weekday'] = 1 + date.fromordinal(result['airdate']).weekday()
            result['tvdbid'] = result['indexer_id']
            result['airdate'] = sbdatetime.sbfdate(result['localtime'], d_preset=dateFormat)
            result['localtime'] = result['localtime'].toordinal()

            grouped_results[category].append(result)

        return grouped_results