def _get_roaming_xvrttoken(self): """Get a X-VRT-Token for roaming""" vrtlogin_at = self.get_token('vrtlogin-at', roaming=True) if vrtlogin_at is None: return None cookie_value = 'vrtlogin-at=' + vrtlogin_at headers = {'Cookie': cookie_value} response = open_url(self._ROAMING_TOKEN_GATEWAY_URL, headers=headers, follow_redirects=False) if response is None: return None req_info = response.info() cookie_value += '; state=' + req_info.get('Set-Cookie').split( 'state=')[1].split('; ')[0] response = open_url(req_info.get('Location'), follow_redirects=False) if response is None: return None url = response.info().get('Location') headers = {'Cookie': cookie_value} if url is None: return None response = open_url(url, headers=headers, follow_redirects=False) if response is None: return None setcookie_header = response.info().get('Set-Cookie') return TokenResolver._create_token_dictionary(setcookie_header)
def _get_fresh_token(self, refresh_token, name): """Refresh an expired X-VRT-Token, vrtlogin-at or vrtlogin-rt token""" refresh_url = self._TOKEN_GATEWAY_URL + '/refreshtoken?legacy=true' cookie_value = 'vrtlogin-rt=' + refresh_token headers = {'Cookie': cookie_value} cookiejar = cookielib.CookieJar() try: open_url(refresh_url, headers=headers, cookiejar=cookiejar, raise_errors=[401]) except HTTPError: ok_dialog(heading=localize(30970), message=localize(30971)) return TokenResolver._create_token_dictionary(cookiejar, name)
def _get_xvrttoken(self, login_json=None): """Get a one year valid X-VRT-Token""" from json import dumps if not login_json: login_json = self._get_login_json() login_token = login_json.get('sessionInfo', {}).get('login_token') if not login_token: return None login_cookie = 'glt_{api_key}={token}'.format(api_key=self._API_KEY, token=login_token) payload = dict(uid=login_json.get('UID'), uidsig=login_json.get('UIDSignature'), ts=login_json.get('signatureTimestamp'), email=from_unicode(get_setting('username'))) data = dumps(payload).encode() headers = {'Content-Type': 'application/json', 'Cookie': login_cookie} response = open_url(self._TOKEN_GATEWAY_URL, data=data, headers=headers) if response is None: return None setcookie_header = response.info().get('Set-Cookie') xvrttoken = TokenResolver._create_token_dictionary(setcookie_header) if xvrttoken is None: return None notification(message=localize(30952)) # Login succeeded. return xvrttoken
def _get_usertoken(self, name=None, login_json=None, roaming=False): """Get a user X-VRT-Token, vrtlogin-at, vrtlogin-expiry, vrtlogin-rt, SESSION, OIDCXSRF or state token""" if not login_json: login_json = self._get_login_json() cookiejar = cookielib.CookieJar() open_url(self._USER_TOKEN_GATEWAY_URL, cookiejar=cookiejar) xsrf = next( (cookie for cookie in cookiejar if cookie.name == 'OIDCXSRF'), None) if xsrf is None: return None payload = dict(UID=login_json.get('UID'), UIDSignature=login_json.get('UIDSignature'), signatureTimestamp=login_json.get('signatureTimestamp'), client_id='vrtnu-site', _csrf=xsrf.value) data = urlencode(payload).encode() response = open_url(self._VRT_LOGIN_URL, data=data, cookiejar=cookiejar) if response is None: return None destination = response.geturl() usertoken = TokenResolver._create_token_dictionary(cookiejar, name) if not usertoken and not destination.startswith( 'https://www.vrt.be/vrtnu'): if roaming is False: ok_dialog(heading=localize(30970), message=localize(30972)) return None # Cache additional tokens for later use refreshtoken = TokenResolver._create_token_dictionary( cookiejar, cookie_name='vrtlogin-rt') accesstoken = TokenResolver._create_token_dictionary( cookiejar, cookie_name='vrtlogin-at') if refreshtoken is not None: from json import dumps cache_file = self._get_token_filename('vrtlogin-rt') update_cache(cache_file, dumps(refreshtoken), self._TOKEN_CACHE_DIR) if accesstoken is not None: from json import dumps cache_file = self._get_token_filename('vrtlogin-at') update_cache(cache_file, dumps(accesstoken), self._TOKEN_CACHE_DIR) return usertoken
def _select_hls_substreams(self, master_hls_url, protocol): """Select HLS substreams to speed up Kodi player start, workaround for slower Kodi selection""" hls_variant_url = None subtitle_url = None hls_audio_id = None hls_subtitle_id = None hls_base_url = master_hls_url.split('.m3u8')[0] try: response = open_url(master_hls_url, raise_errors=[415]) except HTTPError as exc: self._handle_bad_stream_error(protocol, exc.code, exc.reason) return None if response is None: return None hls_playlist = to_unicode(response.read()) max_bandwidth = get_max_bandwidth() stream_bandwidth = None # Get hls variant url based on max_bandwidth setting import re hls_variant_regex = re.compile(r'#EXT-X-STREAM-INF:[\w\-.,=\"]*?BANDWIDTH=(?P<BANDWIDTH>\d+),' r'[\w\-.,=\"]+\d,(?:AUDIO=\"(?P<AUDIO>[\w\-]+)\",)?(?:SUBTITLES=\"' r'(?P<SUBTITLES>\w+)\",)?[\w\-.,=\"]+?[\r\n](?P<URI>[\w:\/\-.=?&]+)') # reverse sort by bandwidth for match in sorted(re.finditer(hls_variant_regex, hls_playlist), key=lambda m: int(m.group('BANDWIDTH')), reverse=True): stream_bandwidth = int(match.group('BANDWIDTH')) // 1000 if max_bandwidth == 0 or stream_bandwidth < max_bandwidth: if match.group('URI').startswith('http'): hls_variant_url = match.group('URI') else: hls_variant_url = hls_base_url + match.group('URI') hls_audio_id = match.group('AUDIO') hls_subtitle_id = match.group('SUBTITLES') break if stream_bandwidth > max_bandwidth and not hls_variant_url: message = localize(30057, max=max_bandwidth, min=stream_bandwidth) ok_dialog(message=message) open_settings() # Get audio url if hls_audio_id: audio_regex = re.compile(r'#EXT-X-MEDIA:TYPE=AUDIO[\w\-=,\.\"\/]+?GROUP-ID=\"' + hls_audio_id + '' r'\"[\w\-=,\.\"\/]+?URI=\"(?P<AUDIO_URI>[\w\-=]+)\.m3u8\"') match_audio = re.search(audio_regex, hls_playlist) if match_audio: hls_variant_url = hls_base_url + match_audio.group('AUDIO_URI') + '-' + hls_variant_url.split('-')[-1] # Get subtitle url, works only for on demand streams if get_setting_bool('showsubtitles', default=True) and '/live/' not in master_hls_url and hls_subtitle_id: subtitle_regex = re.compile(r'#EXT-X-MEDIA:TYPE=SUBTITLES[\w\-=,\.\"\/]+?GROUP-ID=\"' + hls_subtitle_id + '' r'\"[\w\-=,\.\"\/]+URI=\"(?P<SUBTITLE_URI>[\w\-=]+)\.m3u8\"') match_subtitle = re.search(subtitle_regex, hls_playlist) if match_subtitle: subtitle_url = hls_base_url + match_subtitle.group('SUBTITLE_URI') + '.webvtt' return StreamURLS(hls_variant_url, subtitle_url)
def delete_online(self, asset_id): """Delete resumepoint online""" try: result = open_url('https://video-user-data.vrt.be/resume_points/{asset_id}'.format(asset_id=asset_id), headers=self.resumepoint_headers(), method='DELETE', raise_errors='all') log(3, "[Resumepoints] '{asset_id}' online deleted: {code}", asset_id=asset_id, code=result.getcode()) except HTTPError as exc: log_error("Failed to remove '{asset_id}' from resumepoints: {error}", asset_id=asset_id, error=exc) return False return True
def get_video_attributes(vrtnu_url): """Return a dictionary with video attributes by scraping the VRT NU website""" # Get cache cache_file = 'web_video_attrs_multi.json' video_attrs_multi = get_cache(cache_file, ttl=ttl('indirect')) if not video_attrs_multi: video_attrs_multi = {} if vrtnu_url in video_attrs_multi: return video_attrs_multi[vrtnu_url] # Scrape video attributes from bs4 import BeautifulSoup, SoupStrainer try: response = open_url(vrtnu_url, raise_errors='all') except HTTPError as exc: log_error('Web scraping video attributes failed: {error}', error=exc) return None if response is None: return None html_page = response.read() strainer = SoupStrainer( ['section', 'div'], {'class': ['video-detail__player', 'livestream__inner']}) soup = BeautifulSoup(html_page, 'html.parser', parse_only=strainer) item = None epg_channel = None if '#epgchannel=' in vrtnu_url: epg_channel = vrtnu_url.split('#epgchannel=')[1] for item in soup: if epg_channel and epg_channel == item.get('data-epgchannel'): break if not epg_channel and len(soup) > 1: return None try: video_attrs = item.find(name='nui-media').attrs except AttributeError as exc: log_error('Web scraping video attributes failed: {error}', error=exc) return None # Update cache if vrtnu_url in video_attrs_multi: # Update existing video_attrs_multi[vrtnu_url] = video_attrs else: # Create new video_attrs_multi.update({vrtnu_url: video_attrs}) from json import dumps update_cache(cache_file, dumps(video_attrs_multi)) return video_attrs
def update_resumepoint(self, video_id, asset_str, title, position=None, total=None, path=None, episode_id=None, episode_title=None): """Set episode resumepoint and update local copy""" if video_id is None: return True menu_caches = [] self.refresh_resumepoints(ttl=5) # Add existing position and total if None if video_id in self._resumepoints and position is None and total is None: position = self.get_position(video_id) total = self.get_total(video_id) # Update if (self.still_watching(position, total) or (path and path.startswith('plugin://plugin.video.vrt.nu/play/upnext'))): # Normally, VRT NU resumepoints are deleted when an episode is (un)watched and Kodi GUI automatically sets # the (un)watched status when Kodi Player exits. This mechanism doesn't work with "Up Next" episodes because # these episodes are not initiated from a ListItem in Kodi GUI. # For "Up Next" episodes, we should never delete the VRT NU resumepoints to make sure the watched status # can be forced in Kodi GUI using the playcount infolabel. log(3, "[Resumepoints] Update resumepoint '{video_id}' {position}/{total}", video_id=video_id, position=position, total=total) if position == self.get_position( video_id) and total == self.get_total(video_id): # Resumepoint is not changed, nothing to do return True menu_caches.append('continue-*.json') # Update online gdpr = '{asset_str} gekeken tot {at} seconden.'.format( asset_str=asset_str, at=position) payload = dict( at=position, total=total, gdpr=gdpr, ) from json import dumps try: resumepoint_json = get_url_json( '{api}/{video_id}'.format(api=self.RESUMEPOINTS_URL, video_id=video_id), headers=self.resumepoints_headers(), data=dumps(payload).encode()) except HTTPError as exc: log_error( 'Failed to update resumepoint of {title} at VRT NU ({error})', title=title, error=exc) notification(message=localize(30977, title=title)) return False # Update local for idx, item in enumerate(self._resumepoints.get('items')): if item.get('mediaId') == video_id: self._resumepoints.get('items')[idx] = resumepoint_json break update_cache(self.RESUMEPOINTS_CACHE_FILE, dumps(self._resumepoints)) if menu_caches: invalidate_caches(*menu_caches) else: # Delete log(3, "[Resumepoints] Delete resumepoint '{asset_str}' {position}/{total}", asset_str=asset_str, position=position, total=total) # Delete watchlater self.update_watchlater(episode_id, episode_title, watch_later=False) # Do nothing if there is no resumepoint for this video_id from json import dumps if video_id not in dumps(self._resumepoints): log(3, "[Resumepoints] '{video_id}' not present, nothing to delete", video_id=video_id) return True # Add menu caches menu_caches.append('continue-*.json') # Delete online try: result = open_url('{api}/{video_id}'.format( api=self.RESUMEPOINTS_URL, video_id=video_id), headers=self.resumepoints_headers(), method='DELETE', raise_errors='all') log(3, "[Resumepoints] '{video_id}' online deleted: {code}", video_id=video_id, code=result.getcode()) except HTTPError as exc: log_error( "Failed to remove resumepoint of '{video_id}': {error}", video_id=video_id, error=exc) return False # Delete local representation and cache for item in self._resumepoints.get('items'): if item.get('mediaId') == video_id: self._resumepoints.get('items').remove(item) break update_cache(self.RESUMEPOINTS_CACHE_FILE, dumps(self._resumepoints)) if menu_caches: invalidate_caches(*menu_caches) return True