Esempio n. 1
0
    def fetch_popular_shows(self, list_type=REQUEST_HOT):
        """Get popular show information from IMDB."""
        series = []
        result = []

        try:
            series = Anidb(cache_dir=app.CACHE_DIR).get_list(list_type)
        except GeneralError as error:
            log.warning('Could not connect to AniDB service: {0}', error)

        for show in series:
            try:
                recommended_show = self._create_recommended_show(
                    show, storage_key=b'anidb_{0}'.format(show.aid))
                if recommended_show:
                    result.append(recommended_show)
            except MissingTvdbMapping:
                log.info(
                    'Could not parse AniDB show {0}, missing tvdb mapping',
                    show.title)
            except Exception:
                log.warning('Could not parse AniDB show, with exception: {0}',
                            traceback.format_exc())

        # Update the dogpile index. This will allow us to retrieve all stored dogpile shows from the dbm.
        update_recommended_series_cache_index(
            'anidb', [binary_type(s.series_id) for s in result])

        return result
Esempio n. 2
0
    def fetch_popular_shows(self, page_url=None, trakt_list=None):
        """Get a list of popular shows from different Trakt lists based on a provided trakt_list.

        :param page_url: the page url opened to the base api url, for retreiving a specific list
        :param trakt_list: a description of the trakt list
        :return: A list of RecommendedShow objects, an empty list of none returned
        :throw: ``Exception`` if an Exception is thrown not handled by the libtrats exceptions
        """
        trending_shows = []
        removed_from_medusa = []

        # Create a trakt settings dict
        trakt_settings = {
            'trakt_api_secret': app.TRAKT_API_SECRET,
            'trakt_api_key': app.TRAKT_API_KEY,
            'trakt_access_token': app.TRAKT_ACCESS_TOKEN,
            'trakt_refresh_token': app.TRAKT_REFRESH_TOKEN
        }

        trakt_api = TraktApi(timeout=app.TRAKT_TIMEOUT,
                             ssl_verify=app.SSL_VERIFY,
                             **trakt_settings)

        try:
            not_liked_show = ''
            if app.TRAKT_ACCESS_TOKEN != '':
                library_shows = self.fetch_and_refresh_token(trakt_api, 'sync/watched/shows?extended=noseasons') + \
                    self.fetch_and_refresh_token(trakt_api, 'sync/collection/shows?extended=full')

                medusa_shows = [
                    show.indexerid for show in app.showList if show.indexerid
                ]
                removed_from_medusa = [
                    lshow['show']['ids']['tvdb'] for lshow in library_shows
                    if lshow['show']['ids']['tvdb'] not in medusa_shows
                ]

                if app.TRAKT_BLACKLIST_NAME is not None and app.TRAKT_BLACKLIST_NAME:
                    not_liked_show = trakt_api.request(
                        'users/' + app.TRAKT_USERNAME + '/lists/' +
                        app.TRAKT_BLACKLIST_NAME + '/items') or []
                else:
                    log.debug('Trakt blacklist name is empty')

            if trakt_list not in ['recommended', 'newshow', 'newseason']:
                limit_show = '?limit=' + text_type(100 +
                                                   len(not_liked_show)) + '&'
            else:
                limit_show = '?'

            series = self.fetch_and_refresh_token(
                trakt_api,
                page_url + limit_show + 'extended=full,images') or []

            # Let's trigger a cache cleanup.
            missing_posters.clean()

            for show in series:
                try:
                    if 'show' not in show:
                        show['show'] = show

                    if not_liked_show:
                        if show['show']['ids']['tvdb'] in (
                                s['show']['ids']['tvdb']
                                for s in not_liked_show
                                if s['type'] == 'show'):
                            continue
                    else:
                        trending_shows.append(
                            self._create_recommended_show(
                                show,
                                storage_key=b'trakt_{0}'.format(
                                    show['show']['ids']['trakt'])))

                except MultipleShowObjectsException:
                    continue

            # Update the dogpile index. This will allow us to retrieve all stored dogpile shows from the dbm.
            update_recommended_series_cache_index(
                'trakt', [binary_type(s.series_id) for s in trending_shows])
            blacklist = app.TRAKT_BLACKLIST_NAME not in ''

        except TraktException as error:
            log.warning('Could not connect to Trakt service: {0}', error)
            raise

        return blacklist, trending_shows, removed_from_medusa
Esempio n. 3
0
    def fetch_popular_shows(self):
        """Get popular show information from IMDB."""
        popular_shows = []

        imdb_result = self.imdb_api.get_popular_shows()

        for imdb_show in imdb_result['ranks']:
            series = {}
            imdb_id = series['imdb_tt'] = imdb_show['id'].strip('/').split(
                '/')[-1]

            if imdb_id:
                show_details = cached_get_imdb_series_details(imdb_id)
                if show_details:
                    try:
                        series['year'] = imdb_show.get('year')
                        series['name'] = imdb_show['title']
                        series['image_url_large'] = imdb_show['image']['url']
                        series['image_path'] = posixpath.join(
                            'images', 'imdb_popular',
                            os.path.basename(series['image_url_large']))
                        series['image_url'] = '{0}{1}'.format(
                            imdb_show['image']['url'].split('V1')[0],
                            '_SY600_AL_.jpg')
                        series[
                            'imdb_url'] = 'http://www.imdb.com{imdb_id}'.format(
                                imdb_id=imdb_show['id'])
                        series['votes'] = show_details['ratings'].get(
                            'ratingCount', 0)
                        series['outline'] = show_details['plot'].get(
                            'outline', {}).get('text')
                        series['rating'] = show_details['ratings'].get(
                            'rating', 0)
                    except Exception as error:
                        log.warning(
                            'Could not parse show {imdb_id} with error: {error!r}',
                            {
                                'imdb_id': imdb_id,
                                'error': error
                            })
                else:
                    continue

            if all([series['year'], series['name'], series['imdb_tt']]):
                popular_shows.append(series)

        result = []
        for series in popular_shows:
            try:
                recommended_show = self._create_recommended_show(
                    series, storage_key=b'imdb_{0}'.format(series['imdb_tt']))
                if recommended_show:
                    result.append(recommended_show)
            except RequestException:
                log.warning(
                    u'Could not connect to indexers to check if you already have'
                    u' this show in your library: {show} ({year})', {
                        'show': series['name'],
                        'year': series['name']
                    })

        # Update the dogpile index. This will allow us to retrieve all stored dogpile shows from the dbm.
        update_recommended_series_cache_index(
            'imdb', [binary_type(s.series_id) for s in result])

        return result