Esempio n. 1
0
    def test_pretty_file_size(self):
        test_cases = {
            None:
            '0.00 B',
            '':
            '0.00 B',
            '1024':
            '1.00 KB',
            '1024.5':
            '1.00 KB',
            -42.5:
            '0.00 B',
            -42:
            '0.00 B',
            0:
            '0.00 B',
            25:
            '25.00 B',
            25.5:
            '25.50 B',
            2**10:
            '1.00 KB',
            50 * 2**10 + 25:
            '50.02 KB',
            2**20:
            '1.00 MB',
            100 * 2**20 + 50 * 2**10 + 25:
            '100.05 MB',
            2**30:
            '1.00 GB',
            200 * 2**30 + 100 * 2**20 + 50 * 2**10 + 25:
            '200.10 GB',
            2**40:
            '1.00 TB',
            400 * 2**40 + 200 * 2**30 + 100 * 2**20 + 50 * 2**10 + 25:
            '400.20 TB',
            2**50:
            '1.00 PB',
            800 * 2**50 + 400 * 2**40 + 200 * 2**30 + 100 * 2**20 + 50 * 2**10 + 25:
            '800.39 PB',
            2**60:
            2**60,
        }

        unicode_test_cases = {
            u'': '0.00 B',
            u'1024': '1.00 KB',
            u'1024.5': '1.00 KB',
        }

        for tests in test_cases, unicode_test_cases:
            for (size, result) in iteritems(tests):
                self.assertEqual(pretty_file_size(size), result)
Esempio n. 2
0
def get_provider_cache_results(indexer,
                               show_all_results=None,
                               perform_search=None,
                               show=None,
                               season=None,
                               episode=None,
                               manual_search_type=None,
                               **search_show):
    """Check all provider cache tables for search results."""
    down_cur_quality = 0
    show_obj = Show.find(app.showList, int(show))
    preferred_words = show_obj.show_words().preferred_words
    undesired_words = show_obj.show_words().undesired_words
    ignored_words = show_obj.show_words().ignored_words
    required_words = show_obj.show_words().required_words

    main_db_con = db.DBConnection('cache.db')

    provider_results = {
        'last_prov_updates': {},
        'error': {},
        'found_items': []
    }
    original_thread_name = threading.currentThread().name

    sql_total = []
    combined_sql_q = []
    combined_sql_params = []

    for cur_provider in enabled_providers('manualsearch'):
        threading.currentThread().name = '{thread} :: [{provider}]'.format(
            thread=original_thread_name, provider=cur_provider.name)

        # Let's check if this provider table already exists
        table_exists = main_db_con.select(
            b"SELECT name "
            b"FROM sqlite_master "
            b"WHERE type='table'"
            b" AND name=?", [cur_provider.get_id()])
        columns = [
            i[1] for i in main_db_con.select("PRAGMA table_info('{0}')".format(
                cur_provider.get_id()))
        ] if table_exists else []
        minseed = int(cur_provider.minseed) if getattr(cur_provider, 'minseed',
                                                       None) else -1
        minleech = int(cur_provider.minleech) if getattr(
            cur_provider, 'minleech', None) else -1

        # TODO: the implicit sqlite rowid is used, should be replaced with an explicit PK column
        # If table doesn't exist, start a search to create table and new columns seeders, leechers and size
        required_columns = ['seeders', 'leechers', 'size', 'proper_tags']
        if table_exists and all(required_column in columns
                                for required_column in required_columns):
            # The default sql, that's executed for each providers cache table
            common_sql = (
                b"SELECT rowid, ? AS 'provider_type', ? AS 'provider_image',"
                b" ? AS 'provider', ? AS 'provider_id', ? 'provider_minseed',"
                b" ? 'provider_minleech', name, season, episodes, indexerid,"
                b" url, time, proper_tags, quality, release_group, version,"
                b" seeders, leechers, size, time, pubdate "
                b"FROM '{provider_id}' "
                b"WHERE indexerid = ? AND quality > 0 ".format(
                    provider_id=cur_provider.get_id()))

            # Let's start by adding the default parameters, which are used to subsitute the '?'s.
            add_params = [
                cur_provider.provider_type.title(),
                cur_provider.image_name(), cur_provider.name,
                cur_provider.get_id(), minseed, minleech, show
            ]

            if manual_search_type != 'season':
                # If were not looking for all results, meaning don't do the filter on season + ep, add sql
                if not int(show_all_results):
                    # If it's an episode search, pass season and episode.
                    common_sql += " AND season = ? AND episodes LIKE ? "
                    add_params += [season, "%|{0}|%".format(episode)]

            else:
                # If were not looking for all results, meaning don't do the filter on season + ep, add sql
                if not int(show_all_results):
                    list_of_episodes = '{0}{1}'.format(
                        ' episodes LIKE ', ' AND episodes LIKE '.join(
                            ['?' for _ in show_obj.get_all_episodes(season)]))

                    common_sql += " AND season = ? AND (episodes LIKE ? OR {list_of_episodes})".format(
                        list_of_episodes=list_of_episodes)
                    add_params += [season,
                                   '||']  # When the episodes field is empty.
                    add_params += [
                        '%|{episode}|%'.format(episode=ep.episode)
                        for ep in show_obj.get_all_episodes(season)
                    ]

            # Add the created sql, to lists, that are used down below to perform one big UNIONED query
            combined_sql_q.append(common_sql)
            combined_sql_params += add_params

            # Get the last updated cache items timestamp
            last_update = main_db_con.select(
                b"SELECT max(time) AS lastupdate "
                b"FROM '{provider_id}'".format(
                    provider_id=cur_provider.get_id()))
            provider_results['last_prov_updates'][
                cur_provider.get_id()] = last_update[0][
                    'lastupdate'] if last_update[0]['lastupdate'] else 0

    # Check if we have the combined sql strings
    if combined_sql_q:
        sql_prepend = b"SELECT * FROM ("
        sql_append = b") ORDER BY CASE quality WHEN '{quality_unknown}' THEN -1 ELSE CAST(quality AS DECIMAL) END DESC, " \
                     b" proper_tags DESC, seeders DESC".format(quality_unknown=Quality.UNKNOWN)

        # Add all results
        sql_total += main_db_con.select(
            b'{0} {1} {2}'.format(sql_prepend,
                                  ' UNION ALL '.join(combined_sql_q),
                                  sql_append), combined_sql_params)

    # Always start a search when no items found in cache
    if not sql_total or int(perform_search):
        # retrieve the episode object and fail if we can't get one
        ep_obj = get_episode(show, season, episode)
        if isinstance(ep_obj, str):
            provider_results[
                'error'] = 'Something went wrong when starting the manual search for show {0}, \
            and episode: {1}x{2}'.format(show_obj.name, season, episode)

        # make a queue item for it and put it on the queue
        ep_queue_item = ForcedSearchQueueItem(ep_obj.series, [ep_obj],
                                              bool(int(down_cur_quality)),
                                              True, manual_search_type)  # pylint: disable=maybe-no-member

        app.forced_search_queue_scheduler.action.add_item(ep_queue_item)

        # give the CPU a break and some time to start the queue
        time.sleep(cpu_presets[app.CPU_PRESET])
    else:
        cached_results = [dict(row) for row in sql_total]
        for i in cached_results:
            i['quality_name'] = Quality.split_quality(int(i['quality']))
            i['time'] = datetime.fromtimestamp(i['time'])
            i['release_group'] = i['release_group'] or 'None'
            i['provider_img_link'] = 'images/providers/' + i[
                'provider_image'] or 'missing.png'
            i['provider'] = i['provider'] if i[
                'provider_image'] else 'missing provider'
            i['proper_tags'] = i['proper_tags'].replace('|', ', ')
            i['pretty_size'] = pretty_file_size(
                i['size']) if i['size'] > -1 else 'N/A'
            i['seeders'] = i['seeders'] if i['seeders'] >= 0 else '-'
            i['leechers'] = i['leechers'] if i['leechers'] >= 0 else '-'
            i['pubdate'] = sbdatetime.convert_to_setting(
                parser.parse(i['pubdate'])).strftime(
                    app.DATE_PRESET + ' ' +
                    app.TIME_PRESET) if i['pubdate'] else '-'
            release_group = i['release_group']
            if ignored_words and release_group in ignored_words:
                i['rg_highlight'] = 'ignored'
            elif required_words and release_group in required_words:
                i['rg_highlight'] = 'required'
            elif preferred_words and release_group in preferred_words:
                i['rg_highlight'] = 'preferred'
            elif undesired_words and release_group in undesired_words:
                i['rg_highlight'] = 'undesired'
            else:
                i['rg_highlight'] = ''
            if contains_at_least_one_word(i['name'], required_words):
                i['name_highlight'] = 'required'
            elif contains_at_least_one_word(
                    i['name'], ignored_words) or not filter_bad_releases(
                        i['name'], parse=False):
                i['name_highlight'] = 'ignored'
            elif contains_at_least_one_word(i['name'], undesired_words):
                i['name_highlight'] = 'undesired'
            elif contains_at_least_one_word(i['name'], preferred_words):
                i['name_highlight'] = 'preferred'
            else:
                i['name_highlight'] = ''
            i['seed_highlight'] = 'ignored' if i.get(
                'provider_minseed') > i.get('seeders', -1) >= 0 else ''
            i['leech_highlight'] = 'ignored' if i.get(
                'provider_minleech') > i.get('leechers', -1) >= 0 else ''
        provider_results['found_items'] = cached_results

    # Remove provider from thread name before return results
    threading.currentThread().name = original_thread_name

    # Sanitize the last_prov_updates key
    provider_results['last_prov_updates'] = json.dumps(
        provider_results['last_prov_updates'])
    return provider_results
Esempio n. 3
0
def test_pretty_file_size(p):
    for (size, result) in iteritems(p):
        assert sut.pretty_file_size(size) == result
Esempio n. 4
0
def get_provider_cache_results(series_obj, show_all_results=None, perform_search=None,
                               season=None, episode=None, manual_search_type=None, **search_show):
    """Check all provider cache tables for search results."""
    down_cur_quality = 0
    preferred_words = series_obj.show_words().preferred_words
    undesired_words = series_obj.show_words().undesired_words
    ignored_words = series_obj.show_words().ignored_words
    required_words = series_obj.show_words().required_words

    main_db_con = db.DBConnection('cache.db')

    provider_results = {'last_prov_updates': {}, 'error': {}, 'found_items': []}
    original_thread_name = threading.currentThread().name

    cached_results_total = []
    combined_sql_q = []
    combined_sql_params = []

    for cur_provider in enabled_providers('manualsearch'):
        threading.currentThread().name = '{thread} :: [{provider}]'.format(thread=original_thread_name, provider=cur_provider.name)

        # Let's check if this provider table already exists
        table_exists = main_db_con.select(
            'SELECT name '
            'FROM sqlite_master '
            "WHERE type='table'"
            ' AND name=?',
            [cur_provider.get_id()]
        )

        columns = []
        if table_exists:
            table_columns = main_db_con.select("PRAGMA table_info('{0}')".format(cur_provider.get_id()))
            columns = [table_column['name'] for table_column in table_columns]

        minseed = int(cur_provider.minseed) if getattr(cur_provider, 'minseed', None) else -1
        minleech = int(cur_provider.minleech) if getattr(cur_provider, 'minleech', None) else -1

        # TODO: the implicit sqlite rowid is used, should be replaced with an explicit PK column
        # If table doesn't exist, start a search to create table and new columns seeders, leechers and size
        required_columns = ['indexer', 'indexerid', 'seeders', 'leechers', 'size', 'proper_tags', 'date_added']
        if table_exists and all(required_column in columns for required_column in required_columns):
            # The default sql, that's executed for each providers cache table
            common_sql = (
                "SELECT rowid, ? AS 'provider_type', ? AS 'provider_image',"
                " ? AS 'provider', ? AS 'provider_id', ? 'provider_minseed',"
                " ? 'provider_minleech', name, season, episodes, indexer, indexerid,"
                ' url, proper_tags, quality, release_group, version,'
                ' seeders, leechers, size, time, pubdate, date_added '
                "FROM '{provider_id}' "
                'WHERE indexer = ? AND indexerid = ? AND quality > 0 '.format(
                    provider_id=cur_provider.get_id()
                )
            )

            # Let's start by adding the default parameters, which are used to substitute the '?'s.
            add_params = [cur_provider.provider_type.title(), cur_provider.image_name(),
                          cur_provider.name, cur_provider.get_id(), minseed, minleech,
                          series_obj.indexer, series_obj.series_id]

            if manual_search_type != 'season':
                # If were not looking for all results, meaning don't do the filter on season + ep, add sql
                if not int(show_all_results):
                    # If it's an episode search, pass season and episode.
                    common_sql += ' AND season = ? AND episodes LIKE ? '
                    add_params += [season, '%|{0}|%'.format(episode)]

            else:
                # If were not looking for all results, meaning don't do the filter on season + ep, add sql
                if not int(show_all_results):
                    list_of_episodes = '{0}{1}'.format(' episodes LIKE ', ' AND episodes LIKE '.join(
                        ['?' for _ in series_obj.get_all_episodes(season)]
                    ))

                    common_sql += ' AND season = ? AND (episodes LIKE ? OR {list_of_episodes})'.format(
                        list_of_episodes=list_of_episodes
                    )
                    add_params += [season, '||']  # When the episodes field is empty.
                    add_params += ['%|{episode}|%'.format(episode=ep.episode) for ep in series_obj.get_all_episodes(season)]

            # Add the created sql, to lists, that are used down below to perform one big UNIONED query
            combined_sql_q.append(common_sql)
            combined_sql_params += add_params

            # Get the last updated cache items timestamp
            last_update = main_db_con.select('SELECT max(time) AS lastupdate '
                                             "FROM '{provider_id}'".format(provider_id=cur_provider.get_id()))
            provider_results['last_prov_updates'][cur_provider.get_id()] = last_update[0]['lastupdate'] if last_update[0]['lastupdate'] else 0

    # Check if we have the combined sql strings
    if combined_sql_q:
        sql_prepend = 'SELECT * FROM ('
        sql_append = ') ORDER BY quality DESC, proper_tags DESC, seeders DESC'

        # Add all results
        cached_results_total += main_db_con.select('{0} {1} {2}'.
                                                   format(sql_prepend, ' UNION ALL '.join(combined_sql_q), sql_append),
                                                   combined_sql_params)

    # Always start a search when no items found in cache
    if not cached_results_total or int(perform_search):
        # retrieve the episode object and fail if we can't get one
        ep_obj = series_obj.get_episode(season, episode)
        if isinstance(ep_obj, str):
            provider_results['error'] = 'Something went wrong when starting the manual search for show {0}, \
            and episode: {1}x{2}'.format(series_obj.name, season, episode)

        # make a queue item for it and put it on the queue
        ep_queue_item = ForcedSearchQueueItem(ep_obj.series, [ep_obj], bool(int(down_cur_quality)), True, manual_search_type)  # pylint: disable=maybe-no-member

        app.forced_search_queue_scheduler.action.add_item(ep_queue_item)

        # give the CPU a break and some time to start the queue
        time.sleep(cpu_presets[app.CPU_PRESET])
    else:
        for i in cached_results_total:
            threading.currentThread().name = '{thread} :: [{provider}]'.format(
                thread=original_thread_name, provider=i['provider'])

            i['quality_name'] = Quality.split_quality(int(i['quality']))
            i['time'] = datetime.fromtimestamp(i['time'])
            i['release_group'] = i['release_group'] or 'None'
            i['provider_img_link'] = 'images/providers/' + i['provider_image'] or 'missing.png'
            i['provider'] = i['provider'] if i['provider_image'] else 'missing provider'
            i['proper_tags'] = i['proper_tags'].replace('|', ', ')
            i['pretty_size'] = pretty_file_size(i['size']) if i['size'] > -1 else 'N/A'
            i['seeders'] = i['seeders'] if i['seeders'] >= 0 else '-'
            i['leechers'] = i['leechers'] if i['leechers'] >= 0 else '-'
            i['pubdate'] = parser.parse(i['pubdate']).astimezone(app_timezone) if i['pubdate'] else ''
            i['date_added'] = datetime.fromtimestamp(float(i['date_added']), tz=app_timezone) if i['date_added'] else ''

            release_group = i['release_group']
            if ignored_words and release_group in ignored_words:
                i['rg_highlight'] = 'ignored'
            elif required_words and release_group in required_words:
                i['rg_highlight'] = 'required'
            elif preferred_words and release_group in preferred_words:
                i['rg_highlight'] = 'preferred'
            elif undesired_words and release_group in undesired_words:
                i['rg_highlight'] = 'undesired'
            else:
                i['rg_highlight'] = ''
            if contains_at_least_one_word(i['name'], required_words):
                i['name_highlight'] = 'required'
            elif contains_at_least_one_word(i['name'], ignored_words) or not filter_bad_releases(i['name'], parse=False):
                i['name_highlight'] = 'ignored'
            elif contains_at_least_one_word(i['name'], undesired_words):
                i['name_highlight'] = 'undesired'
            elif contains_at_least_one_word(i['name'], preferred_words):
                i['name_highlight'] = 'preferred'
            else:
                i['name_highlight'] = ''

            i['seed_highlight'] = 'ignored'
            if i['seeders'] == '-' or i['provider_minseed'] <= i['seeders']:
                i['seed_highlight'] = ''

            i['leech_highlight'] = 'ignored'
            if i['leechers'] == '-' or i['provider_minleech'] <= i['leechers']:
                i['leech_highlight'] = ''

        provider_results['found_items'] = cached_results_total

    # Remove provider from thread name before return results
    threading.currentThread().name = original_thread_name

    # Sanitize the last_prov_updates key
    provider_results['last_prov_updates'] = json.dumps(provider_results['last_prov_updates'])
    return provider_results