예제 #1
0
 def update_local(self, asset_id, resumepoint_json, menu_caches=None):
     """Update resumepoint locally and update cache"""
     self._data.update({asset_id: resumepoint_json})
     from json import dumps
     update_cache('resume_points.json', dumps(self._data))
     if menu_caches:
         invalidate_caches(*menu_caches)
예제 #2
0
    def update_watchlater(self, episode_id, title, watch_later=None):
        """Set program watchLater status and update local copy"""

        self.refresh_watchlater(ttl=5)

        # Update
        log(3,
            "[watchLater] Update {episode_id} watchLater status",
            episode_id=episode_id)

        # watchLater status is not changed, nothing to do
        if watch_later is not None and watch_later is self.is_watchlater(
                episode_id):
            return True

        # Update local watch_later cache
        if watch_later is True:
            self._watchlater[episode_id] = dict(title=title)
        else:
            del self._watchlater[episode_id]

        # Update cache
        from json import dumps
        update_cache(self.WATCHLATER_CACHE_FILE, dumps(self._watchlater))
        invalidate_caches('watchlater-*.json')

        # Update online
        self.set_watchlater_graphql(episode_id, title, watch_later)

        return True
예제 #3
0
 def refresh(self, ttl=None):
     ''' Get a cached copy or a newer resumepoints from VRT, or fall back to a cached file '''
     if not self.is_activated():
         return
     resumepoints_json = get_cache('resume_points.json', ttl)
     if not resumepoints_json:
         from tokenresolver import TokenResolver
         xvrttoken = TokenResolver().get_xvrttoken(token_variant='user')
         if xvrttoken:
             headers = {
                 'authorization': 'Bearer ' + xvrttoken,
                 'content-type': 'application/json',
                 'Referer': 'https://www.vrt.be/vrtnu',
             }
             req = Request('https://video-user-data.vrt.be/resume_points',
                           headers=headers)
             log(2,
                 'URL post: https://video-user-data.vrt.be/resume_points')
             from json import load
             try:
                 resumepoints_json = load(urlopen(req))
             except (TypeError,
                     ValueError):  # No JSON object could be decoded
                 # Force resumepoints from cache
                 resumepoints_json = get_cache('resume_points.json',
                                               ttl=None)
             else:
                 update_cache('resume_points.json', resumepoints_json)
     if resumepoints_json:
         self._resumepoints = resumepoints_json
    def update(self, program_name, title, program_id, is_favorite=True):
        """Set a program as favorite, and update local copy"""

        # Survive any recent updates
        self.refresh(ttl=5)

        if is_favorite is self.is_favorite(program_name):
            # Already followed/unfollowed, nothing to do
            return True

        # Lookup program_id
        if program_id == 'None' or program_id is None:
            program_id = self.get_program_id_graphql(program_name)

        # Update local favorites cache
        if is_favorite is True:
            self._favorites[program_name] = dict(program_id=program_id,
                                                 title=title)
        else:
            del self._favorites[program_name]

        # Update cache dict
        from json import dumps
        update_cache(self.FAVORITES_CACHE_FILE, dumps(self._favorites))
        invalidate_caches('my-offline-*.json', 'my-recent-*.json')

        # Update online
        self.set_favorite_graphql(program_id, title, is_favorite)
        return True
예제 #5
0
    def get_tvshows(self, category=None, channel=None, feature=None):
        ''' Get all TV shows for a given category, channel or feature, optionally filtered by favorites '''
        params = dict()

        if category:
            params['facets[categories]'] = category
            cache_file = 'category.%s.json' % category

        if channel:
            params['facets[programBrands]'] = channel
            cache_file = 'channel.%s.json' % channel

        if feature:
            params['facets[programTags.title]'] = feature
            cache_file = 'featured.%s.json' % feature

        # If no facet-selection is done, we return the 'All programs' listing
        if not category and not channel and not feature:
            params['facets[transcodingStatus]'] = 'AVAILABLE'  # Required for getting results in Suggests API
            cache_file = 'programs.json'
        tvshows = get_cache(cache_file, ttl=60 * 60)  # Try the cache if it is fresh
        if not tvshows:
            from json import load
            querystring = '&'.join('{}={}'.format(key, value) for key, value in list(params.items()))
            suggest_url = self._VRTNU_SUGGEST_URL + '?' + querystring
            log(2, 'URL get: {url}', url=unquote(suggest_url))
            tvshows = load(urlopen(suggest_url))
            update_cache(cache_file, tvshows)

        return tvshows
예제 #6
0
 def get_token(self, name, variant=None, url=None, roaming=False):
     """Get a token"""
     # Try to get a cached token
     if not roaming:
         cache_file = self._get_token_filename(name, variant)
         token = get_cache(cache_file, cache_dir=self._TOKEN_CACHE_DIR)
         if token:
             return token.get(name)
     # Try to refresh a token
     if variant != 'roaming' and name in ('X-VRT-Token', 'vrtlogin-at',
                                          'vrtlogin-rt'):
         cache_file = self._get_token_filename('vrtlogin-rt')
         refresh_token = get_cache(cache_file,
                                   cache_dir=self._TOKEN_CACHE_DIR)
         if refresh_token:
             token = self._get_fresh_token(refresh_token.get('vrtlogin-rt'),
                                           name)
             if token:
                 # Save token to cache
                 from json import dumps
                 cache_file = self._get_token_filename(
                     list(token.keys())[0], variant)
                 update_cache(cache_file, dumps(token),
                              self._TOKEN_CACHE_DIR)
                 return token.get(name)
     # Get a new token
     token = self._get_new_token(name, variant, url, roaming)
     if token:
         # Save token to cache
         from json import dumps
         cache_file = self._get_token_filename(
             list(token.keys())[0], variant)
         update_cache(cache_file, dumps(token), self._TOKEN_CACHE_DIR)
         return token.get(name)
     return None
예제 #7
0
    def playing_now(self, channel):
        ''' Return the EPG information for what is playing now '''
        now = datetime.now(dateutil.tz.tzlocal())
        epg = now
        # Daily EPG information shows information from 6AM until 6AM
        if epg.hour < 6:
            epg += timedelta(days=-1)
        # Try the cache if it is fresh
        schedule = get_cache('schedule.today.json', ttl=60 * 60)
        if not schedule:
            from json import load
            epg_url = epg.strftime(self.VRT_TVGUIDE)
            log(2, 'URL get: {url}', url=epg_url)
            schedule = load(urlopen(epg_url))
            update_cache('schedule.today.json', schedule)

        entry = find_entry(CHANNELS, 'name', channel)
        if not entry:
            return ''

        episodes = iter(schedule.get(entry.get('id'), []))

        while True:
            try:
                episode = next(episodes)
            except StopIteration:
                break
            start_date = dateutil.parser.parse(episode.get('startTime'))
            end_date = dateutil.parser.parse(episode.get('endTime'))
            if start_date <= now <= end_date:  # Now playing
                return episode.get('title')
        return ''
    def _get_stream_json(self, api_data, roaming=False):
        """Get JSON with stream details from VRT API"""
        if not api_data:
            return None

        # Try cache for livestreams
        if api_data.is_live_stream and not roaming:
            filename = api_data.video_id + '.json'
            data = get_cache(filename)
            if data:
                return data

        token_url = api_data.media_api_url + '/tokens'
        if api_data.is_live_stream:
            playertoken = self._tokenresolver.get_token('vrtPlayerToken', 'live', token_url, roaming=roaming)
        else:
            playertoken = self._tokenresolver.get_token('vrtPlayerToken', 'ondemand', token_url, roaming=roaming)

        # Construct api_url and get video json
        if not playertoken:
            return None
        api_url = api_data.media_api_url + '/videos/' + api_data.publication_id + \
            api_data.video_id + '?vrtPlayerToken=' + playertoken + '&client=' + api_data.client

        stream_json = get_url_json(url=api_url)

        # Update livestream cache if we have a livestream
        if stream_json and api_data.is_live_stream:
            from json import dumps
            # Warning: Currently, the drmExpired key in the stream_json cannot be used because it provides a wrong 6 hour ttl for the VUDRM tokens.
            # After investigation these tokens seem to have an expiration time of only two hours, so we set the expirationDate value accordingly.
            stream_json.update(expirationDate=generate_expiration_date(hours=2), vualto_license_url=self._get_vualto_license_url())
            cache_file = api_data.video_id + '.json'
            update_cache(cache_file, dumps(stream_json))
        return stream_json
예제 #9
0
 def delete_local(self, asset_id, menu_caches=None):
     """Delete resumepoint locally and update cache"""
     if asset_id in self._data:
         del self._data[asset_id]
         from json import dumps
         update_cache('resume_points.json', dumps(self._data))
         if menu_caches:
             invalidate_caches(*menu_caches)
예제 #10
0
    def get_episode_items(self, date, channel):
        ''' Show episodes for a given date and channel '''
        now = datetime.now(dateutil.tz.tzlocal())
        epg = self.parse(date, now)
        epg_url = epg.strftime(self.VRT_TVGUIDE)

        self._favorites.refresh(ttl=60 * 60)

        cache_file = 'schedule.%s.json' % date
        if date in ('today', 'yesterday', 'tomorrow'):
            # Try the cache if it is fresh
            schedule = get_cache(cache_file, ttl=60 * 60)
            if not schedule:
                from json import load
                log(2, 'URL get: {url}', url=epg_url)
                schedule = load(urlopen(epg_url))
                update_cache(cache_file, schedule)
        else:
            from json import load
            log(2, 'URL get: {url}', url=epg_url)
            schedule = load(urlopen(epg_url))

        entry = find_entry(CHANNELS, 'name', channel)
        if entry:
            episodes = schedule.get(entry.get('id'), [])
        else:
            episodes = []
        episode_items = []
        for episode in episodes:

            label = self._metadata.get_label(episode)

            context_menu = []
            path = None
            if episode.get('url'):
                from statichelper import add_https_method, url_to_program
                video_url = add_https_method(episode.get('url'))
                path = url_for('play_url', video_url=video_url)
                program = url_to_program(episode.get('url'))
                context_menu, favorite_marker, watchlater_marker = self._metadata.get_context_menu(episode, program, cache_file)
                label += favorite_marker + watchlater_marker

            info_labels = self._metadata.get_info_labels(episode, date=date, channel=entry)
            info_labels['title'] = label

            episode_items.append(TitleItem(
                title=label,
                path=path,
                art_dict=self._metadata.get_art(episode),
                info_dict=info_labels,
                is_playable=True,
                context_menu=context_menu,
            ))
        return episode_items
예제 #11
0
 def refresh(self, ttl=None):
     """Get a cached copy or a newer favorites from VRT, or fall back to a cached file"""
     if not self.is_activated():
         return
     favorites_dict = get_cache(self.FAVORITES_CACHE_FILE, ttl)
     if not favorites_dict:
         favorites_dict = self._generate_favorites_dict(
             self.get_favorites())
     if favorites_dict is not None:
         from json import dumps
         self._favorites = favorites_dict
         update_cache(self.FAVORITES_CACHE_FILE, dumps(self._favorites))
예제 #12
0
 def refresh_watchlater(self, ttl=None):
     """Get a cached copy or a newer watchLater list from VRT, or fall back to a cached file"""
     if not self.is_activated():
         return
     watchlater_dict = get_cache(self.WATCHLATER_CACHE_FILE, ttl)
     if not watchlater_dict:
         watchlater_dict = self._generate_watchlater_dict(
             self.get_watchlater())
     if watchlater_dict is not None:
         from json import dumps
         self._watchlater = watchlater_dict
         update_cache(self.WATCHLATER_CACHE_FILE, dumps(self._watchlater))
    def _get_vualto_license_url(self):
        """Get Widevine license URL from Vualto API"""
        # Try cache
        data = get_cache('vualto_license_url.json')
        if data:
            return data.get('la_url')

        vualto_license_url = get_url_json(url=self._VUPLAY_API_URL, fail={}).get('drm_providers', {}).get('widevine', {})
        if vualto_license_url:
            from json import dumps
            vualto_license_url.update(expirationDate=generate_expiration_date(hours=168))
            update_cache('vualto_license_url.json', dumps(vualto_license_url))
        return vualto_license_url.get('la_url')
def get_video_attributes(vrtnu_url):
    """Return a dictionary with video attributes by scraping the VRT NU website"""

    # Get cache
    cache_file = 'web_video_attrs_multi.json'
    video_attrs_multi = get_cache(cache_file, ttl=ttl('indirect'))
    if not video_attrs_multi:
        video_attrs_multi = {}
    if vrtnu_url in video_attrs_multi:
        return video_attrs_multi[vrtnu_url]

    # Scrape video attributes
    from bs4 import BeautifulSoup, SoupStrainer
    try:
        response = open_url(vrtnu_url, raise_errors='all')
    except HTTPError as exc:
        log_error('Web scraping video attributes failed: {error}', error=exc)
        return None
    if response is None:
        return None
    html_page = response.read()
    strainer = SoupStrainer(
        ['section', 'div'],
        {'class': ['video-detail__player', 'livestream__inner']})
    soup = BeautifulSoup(html_page, 'html.parser', parse_only=strainer)
    item = None
    epg_channel = None
    if '#epgchannel=' in vrtnu_url:
        epg_channel = vrtnu_url.split('#epgchannel=')[1]
    for item in soup:
        if epg_channel and epg_channel == item.get('data-epgchannel'):
            break
    if not epg_channel and len(soup) > 1:
        return None
    try:
        video_attrs = item.find(name='nui-media').attrs
    except AttributeError as exc:
        log_error('Web scraping video attributes failed: {error}', error=exc)
        return None

    # Update cache
    if vrtnu_url in video_attrs_multi:
        # Update existing
        video_attrs_multi[vrtnu_url] = video_attrs
    else:
        # Create new
        video_attrs_multi.update({vrtnu_url: video_attrs})
    from json import dumps
    update_cache(cache_file, dumps(video_attrs_multi))

    return video_attrs
예제 #15
0
    def live_description(self, channel):
        ''' Return the EPG information for current and next live program '''
        now = datetime.now(dateutil.tz.tzlocal())
        epg = now
        # Daily EPG information shows information from 6AM until 6AM
        if epg.hour < 6:
            epg += timedelta(days=-1)
        # Try the cache if it is fresh
        schedule = get_cache('schedule.today.json', ttl=60 * 60)
        if not schedule:
            from json import load
            epg_url = epg.strftime(self.VRT_TVGUIDE)
            log(2, 'URL get: {url}', url=epg_url)
            schedule = load(urlopen(epg_url))
            update_cache('schedule.today.json', schedule)

        entry = find_entry(CHANNELS, 'name', channel)
        if not entry:
            return ''

        episodes = iter(schedule.get(entry.get('id'), []))

        description = ''
        while True:
            try:
                episode = next(episodes)
            except StopIteration:
                break
            start_date = dateutil.parser.parse(episode.get('startTime'))
            end_date = dateutil.parser.parse(episode.get('endTime'))
            if start_date <= now <= end_date:  # Now playing
                description = '[COLOR yellow][B]%s[/B] %s[/COLOR]\n' % (localize(30421), self.episode_description(episode))
                try:
                    description += '[B]%s[/B] %s' % (localize(30422), self.episode_description(next(episodes)))
                except StopIteration:
                    break
                break
            if now < start_date:  # Nothing playing now, but this may be next
                description = '[B]%s[/B] %s\n' % (localize(30422), self.episode_description(episode))
                try:
                    description += '[B]%s[/B] %s' % (localize(30422), self.episode_description(next(episodes)))
                except StopIteration:
                    break
                break
        if not description:
            # Add a final 'No transmission' program
            description = '[COLOR yellow][B]%s[/B] %s - 06:00\n» %s[/COLOR]' % (localize(30421), episode.get('end'), localize(30423))
        return description
예제 #16
0
    def update(self, program, title, value=True):
        """Set a program as favorite, and update local copy"""

        # Survive any recent updates
        self.refresh(ttl=5)

        if value is self.is_favorite(program):
            # Already followed/unfollowed, nothing to do
            return True

        from tokenresolver import TokenResolver
        xvrttoken = TokenResolver().get_xvrttoken(token_variant='user')
        if xvrttoken is None:
            log_error('Failed to get favorites token from VRT NU')
            notification(message=localize(30975))
            return False

        headers = {
            'authorization': 'Bearer ' + xvrttoken,
            'content-type': 'application/json',
            'Referer': 'https://www.vrt.be/vrtnu',
        }

        from json import dumps
        from utils import program_to_url
        payload = dict(isFavorite=value,
                       programUrl=program_to_url(program, 'short'),
                       title=title)
        data = dumps(payload).encode('utf-8')
        program_id = program_to_id(program)
        try:
            get_url_json(
                'https://video-user-data.vrt.be/favorites/{program_id}'.format(
                    program_id=program_id),
                headers=headers,
                data=data)
        except HTTPError as exc:
            log_error(
                "Failed to (un)follow program '{program}' at VRT NU ({error})",
                program=program,
                error=exc)
            notification(message=localize(30976, program=program))
            return False
        # NOTE: Updates to favorites take a longer time to take effect, so we keep our own cache and use it
        self._data[program_id] = dict(value=payload)
        update_cache('favorites.json', dumps(self._data))
        invalidate_caches('my-offline-*.json', 'my-recent-*.json')
        return True
예제 #17
0
    def _get_usertoken(self, name=None, login_json=None, roaming=False):
        """Get a user X-VRT-Token, vrtlogin-at, vrtlogin-expiry, vrtlogin-rt, SESSION, OIDCXSRF or state token"""
        if not login_json:
            login_json = self._get_login_json()
        cookiejar = cookielib.CookieJar()
        open_url(self._USER_TOKEN_GATEWAY_URL, cookiejar=cookiejar)
        xsrf = next(
            (cookie for cookie in cookiejar if cookie.name == 'OIDCXSRF'),
            None)
        if xsrf is None:
            return None
        payload = dict(UID=login_json.get('UID'),
                       UIDSignature=login_json.get('UIDSignature'),
                       signatureTimestamp=login_json.get('signatureTimestamp'),
                       client_id='vrtnu-site',
                       _csrf=xsrf.value)
        data = urlencode(payload).encode()
        response = open_url(self._VRT_LOGIN_URL,
                            data=data,
                            cookiejar=cookiejar)
        if response is None:
            return None

        destination = response.geturl()
        usertoken = TokenResolver._create_token_dictionary(cookiejar, name)
        if not usertoken and not destination.startswith(
                'https://www.vrt.be/vrtnu'):
            if roaming is False:
                ok_dialog(heading=localize(30970), message=localize(30972))
            return None

        # Cache additional tokens for later use
        refreshtoken = TokenResolver._create_token_dictionary(
            cookiejar, cookie_name='vrtlogin-rt')
        accesstoken = TokenResolver._create_token_dictionary(
            cookiejar, cookie_name='vrtlogin-at')
        if refreshtoken is not None:
            from json import dumps
            cache_file = self._get_token_filename('vrtlogin-rt')
            update_cache(cache_file, dumps(refreshtoken),
                         self._TOKEN_CACHE_DIR)
        if accesstoken is not None:
            from json import dumps
            cache_file = self._get_token_filename('vrtlogin-at')
            update_cache(cache_file, dumps(accesstoken), self._TOKEN_CACHE_DIR)
        return usertoken
예제 #18
0
    def get_categories(self):
        """Return a list of categories"""
        cache_file = 'categories.json'

        # Try the cache if it is fresh
        categories = get_cache(cache_file, ttl=7 * 24 * 60 * 60)
        if self.valid_categories(categories):
            return categories

        # Try online categories json
        categories = self.get_online_categories()
        if self.valid_categories(categories):
            from json import dumps
            update_cache(cache_file, dumps(categories))
            return categories

        # Fall back to internal hard-coded categories
        from data import CATEGORIES
        log(2, 'Fall back to internal hard-coded categories')
        return CATEGORIES
    def _get_stream_json(self, api_data, roaming=False):
        """Get JSON with stream details from VRT API"""
        if not api_data:
            return None

        # Try cache for livestreams
        if api_data.is_live_stream and not roaming:
            filename = api_data.video_id + '.json'
            data = get_cache(filename)
            if data:
                return data

        token_url = api_data.media_api_url + '/tokens'
        if api_data.is_live_stream:
            playertoken = self._tokenresolver.get_token('vrtPlayerToken',
                                                        'live',
                                                        token_url,
                                                        roaming=roaming)
        else:
            playertoken = self._tokenresolver.get_token('vrtPlayerToken',
                                                        'ondemand',
                                                        token_url,
                                                        roaming=roaming)

        # Construct api_url and get video json
        if not playertoken:
            return None
        api_url = api_data.media_api_url + '/videos/' + api_data.publication_id + \
            api_data.video_id + '?vrtPlayerToken=' + playertoken + '&client=' + api_data.client

        stream_json = get_url_json(url=api_url)
        if stream_json and api_data.is_live_stream:
            from json import dumps
            exp = stream_json.get('drmExpired') or generate_expiration_date()
            vualto_license_url = self._get_vualto_license_url().get('la_url')
            stream_json.update(expirationDate=exp,
                               vualto_license_url=vualto_license_url)
            cache_file = api_data.video_id + '.json'
            update_cache(cache_file, dumps(stream_json))
        return stream_json
예제 #20
0
    def update(self, program, title, value=True):
        ''' Set a program as favorite, and update local copy '''

        self.refresh(ttl=60)
        if value is self.is_favorite(program):
            # Already followed/unfollowed, nothing to do
            return True

        from tokenresolver import TokenResolver
        xvrttoken = TokenResolver().get_xvrttoken(token_variant='user')
        if xvrttoken is None:
            log_error('Failed to get favorites token from VRT NU')
            notification(message=localize(30975))
            return False

        headers = {
            'authorization': 'Bearer ' + xvrttoken,
            'content-type': 'application/json',
            'Referer': 'https://www.vrt.be/vrtnu',
        }

        from statichelper import program_to_url
        payload = dict(isFavorite=value, programUrl=program_to_url(program, 'short'), title=title)
        from json import dumps
        data = dumps(payload).encode('utf-8')
        program_uuid = self.program_to_uuid(program)
        log(2, 'URL post: https://video-user-data.vrt.be/favorites/{uuid}', uuid=program_uuid)
        req = Request('https://video-user-data.vrt.be/favorites/%s' % program_uuid, data=data, headers=headers)
        result = urlopen(req)
        if result.getcode() != 200:
            log_error("Failed to (un)follow program '{program}' at VRT NU", program=program)
            notification(message=localize(30976, program=program))
            return False
        # NOTE: Updates to favorites take a longer time to take effect, so we keep our own cache and use it
        self._favorites[program_uuid] = dict(value=payload)
        update_cache('favorites.json', self._favorites)
        invalidate_caches('my-offline-*.json', 'my-recent-*.json')
        return True
예제 #21
0
    def list_categories(self):
        ''' Construct a list of category ListItems '''
        categories = []

        # Try the cache if it is fresh
        categories = get_cache('categories.json', ttl=7 * 24 * 60 * 60)

        # Try to scrape from the web
        if not categories:
            try:
                categories = self.get_categories()
            except Exception:  # pylint: disable=broad-except
                categories = []
            else:
                update_cache('categories.json', categories)

        # Use the cache anyway (better than hard-coded)
        if not categories:
            categories = get_cache('categories.json', ttl=None)

        # Fall back to internal hard-coded categories if all else fails
        from data import CATEGORIES
        if not categories:
            categories = CATEGORIES

        category_items = []
        for category in self.localize_categories(categories, CATEGORIES):
            if get_setting('showfanart', 'true') == 'true':
                thumbnail = category.get('thumbnail', 'DefaultGenre.png')
            else:
                thumbnail = 'DefaultGenre.png'
            category_items.append(TitleItem(
                title=category.get('name'),
                path=url_for('categories', category=category.get('id')),
                art_dict=dict(thumb=thumbnail, icon='DefaultGenre.png'),
                info_dict=dict(plot='[B]%s[/B]' % category.get('name'), studio='VRT'),
            ))
        return category_items
예제 #22
0
def get_categories():
    """Return a list of categories by scraping the VRT NU website"""

    cache_file = 'categories.json'
    categories = []

    # Try the cache if it is fresh
    categories = get_cache(cache_file, ttl=7 * 24 * 60 * 60)

    # Try to scrape from the web
    if not valid_categories(categories):
        from bs4 import BeautifulSoup, SoupStrainer
        log(2, 'URL get: https://www.vrt.be/vrtnu/categorieen/')
        response = urlopen('https://www.vrt.be/vrtnu/categorieen/')
        tiles = SoupStrainer('nui-list--content')
        soup = BeautifulSoup(response.read(), 'html.parser', parse_only=tiles)

        categories = []
        for tile in soup.find_all('nui-tile'):
            categories.append(dict(
                id=tile.get('href').split('/')[-2],
                thumbnail=get_category_thumbnail(tile),
                name=get_category_title(tile),
            ))
        if categories:
            from json import dumps
            update_cache('categories.json', dumps(categories))

    # Use the cache anyway (better than hard-coded)
    if not valid_categories(categories):
        categories = get_cache(cache_file, ttl=None)

    # Fall back to internal hard-coded categories if all else fails
    if not valid_categories(categories):
        from data import CATEGORIES
        categories = CATEGORIES
    return categories
예제 #23
0
    def update(self,
               uuid,
               title,
               url,
               watch_later=None,
               position=None,
               total=None):
        ''' Set program resumepoint or watchLater status and update local copy '''

        # The video has no assetPath, so we cannot update resumepoints
        if uuid is None:
            return True

        if position is not None and position >= total - 30:
            watch_later = False

        self.refresh(ttl=0)
        if watch_later is not None and position is None and total is None and watch_later is self.is_watchlater(
                uuid):
            # watchLater status is not changed, nothing to do
            return True

        if watch_later is None and position == self.get_position(
                uuid) and total == self.get_total(uuid):
            # resumepoint is not changed, nothing to do
            return True

        # Collect header info for POST Request
        from tokenresolver import TokenResolver
        xvrttoken = TokenResolver().get_xvrttoken(token_variant='user')
        if xvrttoken is None:
            log_error('Failed to get usertoken from VRT NU')
            notification(message=localize(30975) + title)
            return False

        headers = {
            'authorization': 'Bearer ' + xvrttoken,
            'content-type': 'application/json',
            'Referer': 'https://www.vrt.be' + url,
        }

        if uuid in self._resumepoints:
            # Update existing resumepoint values
            payload = self._resumepoints[uuid]['value']
            payload['url'] = url
        else:
            # Create new resumepoint values
            payload = dict(position=0, total=100, url=url, watchLater=False)

        if position is not None:
            payload['position'] = position

        if total is not None:
            payload['total'] = total

        removes = []
        if position is not None or total is not None:
            removes.append('continue-*.json')

        if watch_later is not None:
            # Add watchLater status to payload
            payload['watchLater'] = watch_later
            removes.append('watchlater-*.json')

        from json import dumps
        data = dumps(payload).encode()
        log(2,
            'URL post: https://video-user-data.vrt.be/resume_points/{uuid}',
            uuid=uuid)
        log(2, 'URL post data:: {data}', data=data)
        try:
            req = Request('https://video-user-data.vrt.be/resume_points/%s' %
                          uuid,
                          data=data,
                          headers=headers)
            urlopen(req)
        except HTTPError as exc:
            log_error('Failed to (un)watch episode at VRT NU ({error})',
                      error=exc)
            notification(message=localize(30977))
            return False

        # NOTE: Updates to resumepoints take a longer time to take effect, so we keep our own cache and use it
        self._resumepoints[uuid] = dict(value=payload)
        update_cache('resume_points.json', self._resumepoints)
        invalidate_caches(*removes)
        return True
예제 #24
0
    def update_resumepoint(self,
                           video_id,
                           asset_str,
                           title,
                           position=None,
                           total=None,
                           path=None,
                           episode_id=None,
                           episode_title=None):
        """Set episode resumepoint and update local copy"""

        if video_id is None:
            return True

        menu_caches = []
        self.refresh_resumepoints(ttl=5)

        # Add existing position and total if None
        if video_id in self._resumepoints and position is None and total is None:
            position = self.get_position(video_id)
            total = self.get_total(video_id)

        # Update
        if (self.still_watching(position, total) or
            (path
             and path.startswith('plugin://plugin.video.vrt.nu/play/upnext'))):
            # Normally, VRT NU resumepoints are deleted when an episode is (un)watched and Kodi GUI automatically sets
            # the (un)watched status when Kodi Player exits. This mechanism doesn't work with "Up Next" episodes because
            # these episodes are not initiated from a ListItem in Kodi GUI.
            # For "Up Next" episodes, we should never delete the VRT NU resumepoints to make sure the watched status
            # can be forced in Kodi GUI using the playcount infolabel.
            log(3,
                "[Resumepoints] Update resumepoint '{video_id}' {position}/{total}",
                video_id=video_id,
                position=position,
                total=total)

            if position == self.get_position(
                    video_id) and total == self.get_total(video_id):
                # Resumepoint is not changed, nothing to do
                return True

            menu_caches.append('continue-*.json')

            # Update online
            gdpr = '{asset_str} gekeken tot {at} seconden.'.format(
                asset_str=asset_str, at=position)
            payload = dict(
                at=position,
                total=total,
                gdpr=gdpr,
            )
            from json import dumps
            try:
                resumepoint_json = get_url_json(
                    '{api}/{video_id}'.format(api=self.RESUMEPOINTS_URL,
                                              video_id=video_id),
                    headers=self.resumepoints_headers(),
                    data=dumps(payload).encode())
            except HTTPError as exc:
                log_error(
                    'Failed to update resumepoint of {title} at VRT NU ({error})',
                    title=title,
                    error=exc)
                notification(message=localize(30977, title=title))
                return False

            # Update local
            for idx, item in enumerate(self._resumepoints.get('items')):
                if item.get('mediaId') == video_id:
                    self._resumepoints.get('items')[idx] = resumepoint_json
                    break
            update_cache(self.RESUMEPOINTS_CACHE_FILE,
                         dumps(self._resumepoints))
            if menu_caches:
                invalidate_caches(*menu_caches)
        else:

            # Delete
            log(3,
                "[Resumepoints] Delete resumepoint '{asset_str}' {position}/{total}",
                asset_str=asset_str,
                position=position,
                total=total)

            # Delete watchlater
            self.update_watchlater(episode_id,
                                   episode_title,
                                   watch_later=False)

            # Do nothing if there is no resumepoint for this video_id
            from json import dumps
            if video_id not in dumps(self._resumepoints):
                log(3,
                    "[Resumepoints] '{video_id}' not present, nothing to delete",
                    video_id=video_id)
                return True

            # Add menu caches
            menu_caches.append('continue-*.json')

            # Delete online
            try:
                result = open_url('{api}/{video_id}'.format(
                    api=self.RESUMEPOINTS_URL, video_id=video_id),
                                  headers=self.resumepoints_headers(),
                                  method='DELETE',
                                  raise_errors='all')
                log(3,
                    "[Resumepoints] '{video_id}' online deleted: {code}",
                    video_id=video_id,
                    code=result.getcode())
            except HTTPError as exc:
                log_error(
                    "Failed to remove resumepoint of '{video_id}': {error}",
                    video_id=video_id,
                    error=exc)
                return False

            # Delete local representation and cache
            for item in self._resumepoints.get('items'):
                if item.get('mediaId') == video_id:
                    self._resumepoints.get('items').remove(item)
                    break

            update_cache(self.RESUMEPOINTS_CACHE_FILE,
                         dumps(self._resumepoints))
            if menu_caches:
                invalidate_caches(*menu_caches)
        return True
예제 #25
0
    def get_episodes(self, program=None, season=None, episodes=None, category=None, feature=None, programtype=None, keywords=None,
                     whatson_id=None, video_id=None, video_url=None, page=None, use_favorites=False, variety=None, cache_file=None):
        ''' Get episodes or season data from VRT NU Search API '''

        # Contruct params
        if page:
            page = statichelper.realpage(page)
            all_items = False
            params = {
                'from': ((page - 1) * 50) + 1,
                'i': 'video',
                'size': 50,
            }
        elif variety == 'single':
            all_items = False
            params = {
                'i': 'video',
                'size': '1',
            }
        else:
            all_items = True
            params = {
                'i': 'video',
                'size': '300',
            }

        if variety:
            season = 'allseasons'

            if variety == 'offline':
                from datetime import datetime
                import dateutil.tz
                params['facets[assetOffTime]'] = datetime.now(dateutil.tz.gettz('Europe/Brussels')).strftime('%Y-%m-%d')

            if variety == 'oneoff':
                params['facets[programType]'] = 'oneoff'

            if variety == 'watchlater':
                self._resumepoints.refresh(ttl=5 * 60)
                episode_urls = self._resumepoints.watchlater_urls()
                params['facets[url]'] = '[%s]' % (','.join(episode_urls))

            if variety == 'continue':
                self._resumepoints.refresh(ttl=5 * 60)
                episode_urls = self._resumepoints.resumepoints_urls()
                params['facets[url]'] = '[%s]' % (','.join(episode_urls))

            if use_favorites:
                program_urls = [statichelper.program_to_url(p, 'medium') for p in self._favorites.programs()]
                params['facets[programUrl]'] = '[%s]' % (','.join(program_urls))
            elif variety in ('offline', 'recent'):
                channel_filter = [channel.get('name') for channel in CHANNELS if get_setting(channel.get('name'), 'true') == 'true']
                params['facets[programBrands]'] = '[%s]' % (','.join(channel_filter))

        if program:
            params['facets[programUrl]'] = statichelper.program_to_url(program, 'medium')

        if season and season != 'allseasons':
            params['facets[seasonTitle]'] = season

        if episodes:
            params['facets[episodeNumber]'] = '[%s]' % (','.join(str(episode) for episode in episodes))

        if category:
            params['facets[categories]'] = category

        if feature:
            params['facets[programTags.title]'] = feature

        if programtype:
            params['facets[programType]'] = programtype

        if keywords:
            if not season:
                season = 'allseasons'
            params['q'] = quote_plus(statichelper.from_unicode(keywords))
            params['highlight'] = 'true'

        if whatson_id:
            params['facets[whatsonId]'] = whatson_id

        if video_id:
            params['facets[videoId]'] = video_id

        if video_url:
            params['facets[url]'] = video_url

        # Construct VRT NU Search API Url and get api data
        querystring = '&'.join('{}={}'.format(key, value) for key, value in list(params.items()))
        search_url = self._VRTNU_SEARCH_URL + '?' + querystring.replace(' ', '%20')  # Only encode spaces to minimize url length

        from json import load
        if cache_file:
            # Get api data from cache if it is fresh
            search_json = get_cache(cache_file, ttl=60 * 60)
            if not search_json:
                log(2, 'URL get: {url}', url=unquote(search_url))
                req = Request(search_url)
                try:
                    search_json = load(urlopen(req))
                except (TypeError, ValueError):  # No JSON object could be decoded
                    return []
                except HTTPError as exc:
                    url_length = len(req.get_selector())
                    if exc.code == 413 and url_length > 8192:
                        ok_dialog(heading='HTTP Error 413', message=localize(30967))
                        log_error('HTTP Error 413: Exceeded maximum url length: '
                                  'VRT Search API url has a length of {length} characters.', length=url_length)
                        return []
                    if exc.code == 400 and 7600 <= url_length <= 8192:
                        ok_dialog(heading='HTTP Error 400', message=localize(30967))
                        log_error('HTTP Error 400: Probably exceeded maximum url length: '
                                  'VRT Search API url has a length of {length} characters.', length=url_length)
                        return []
                    raise
                update_cache(cache_file, search_json)
        else:
            log(2, 'URL get: {url}', url=unquote(search_url))
            search_json = load(urlopen(search_url))

        # Check for multiple seasons
        seasons = None
        if 'facets[seasonTitle]' not in unquote(search_url):
            facets = search_json.get('facets', dict()).get('facets')
            seasons = next((f.get('buckets', []) for f in facets if f.get('name') == 'seasons' and len(f.get('buckets', [])) > 1), None)

        episodes = search_json.get('results', [{}])
        show_seasons = bool(season != 'allseasons')

        # Return seasons
        if show_seasons and seasons:
            return (seasons, episodes)

        api_pages = search_json.get('meta').get('pages').get('total')
        api_page_size = search_json.get('meta').get('pages').get('size')
        total_results = search_json.get('meta').get('total_results')

        if all_items and total_results > api_page_size:
            for api_page in range(1, api_pages):
                api_page_url = search_url + '&from=' + str(api_page * api_page_size + 1)
                api_page_json = load(urlopen(api_page_url))
                episodes += api_page_json.get('results', [{}])

        # Return episodes
        return episodes