def test_update_qsd(self): self.assertEqual(update_qsd("http://test.se?one=1&two=3", {"two": 2}), "http://test.se?one=1&two=2") self.assertEqual( update_qsd("http://test.se?one=1&two=3", remove=["two"]), "http://test.se?one=1") self.assertEqual( update_qsd("http://test.se?one=1&two=3", {"one": None}, remove="*"), "http://test.se?one=1")
def _get_streams(self): channel = (self._re_url_1.match(self.url) or self._re_url_2.match(self.url) or self._re_url_3.match(self.url)).group("channel") channels = [ # ((channels), (path, channel)) (("5-tv", "tv-5", "5tv"), ("tv5", "tv-5")), (("chetv", "ctc-che", "che_ext"), ("ctc", "ctc-che")), (("ctc"), ("ctc", "ctc")), (("ctclove", "ctc-love", "ctc_love_ext"), ("ctc", "ctc-love")), (("domashniy", "ctc-dom", "domashniy_ext"), ("ctc", "ctc-dom")), (("iz"), ("iz", "iz")), (("mir"), ("mtrkmir", "mir")), (("muztv"), ("muztv", "muztv")), (("ren", "ren-tv", "rentv"), ("nmg", "ren-tv")), (("russia1"), ("vgtrk", "russia1")), (("russia24"), ("vgtrk", "russia24")), (("russiak", "kultura"), ("vgtrk", "russiak")), (("spas"), ("spas", "spas")), (("tvc"), ("tvc", "tvc")), (("tvzvezda", "zvezda"), ("zvezda", "zvezda")), (("u", "u_ott"), ("utv", "u_ott")), ] for c in channels: if channel in c[0]: path, channel = c[1] break else: log.error(f"Unsupported channel: {channel}") return res_token = self.session.http.get( "https://media.mediavitrina.ru/get_token", schema=validate.Schema( validate.transform(parse_json), {"result": { "token": str }}, validate.get("result"), )) url = self.session.http.get(update_qsd( f"https://media.mediavitrina.ru/api/v2/{path}/playlist/{channel}_as_array.json", qsd=res_token), schema=validate.Schema( validate.transform(parse_json), {"hls": [validate.url()]}, validate.get("hls"), validate.get(0), )) if not url: return if "georestrictions" in url: log.error("Stream is geo-restricted") return yield from HLSStream.parse_variant_playlist( self.session, url, name_fmt="{pixels}_{bitrate}").items()
def _get_streams(self): if self.get_option("purge_credentials"): self.clear_cookies() log.info("All credentials were successfully removed") self.session.http.headers.update({ "User-Agent": useragents.CHROME, }) self.niconico_web_login() wss_api_url = self.get_wss_api_url() if not wss_api_url: log.error("Failed to get wss_api_url. " "Please check if the URL is correct, " "and make sure your account has access to the video.") return self.wsclient = NicoLiveWsClient(self.session, wss_api_url) self.wsclient.start() hls_stream_url = self._get_hls_stream_url() if not hls_stream_url: return offset = self.get_option("timeshift-offset") if offset and "timeshift" in wss_api_url: hls_stream_url = update_qsd(self.hls_stream_url, {"start": offset}) for quality, stream in NicoLiveHLSStream.parse_variant_playlist( self.session, hls_stream_url).items(): stream.set_wsclient(self.wsclient) yield quality, stream
def get_wss_api_url(self): try: data = self.session.http.get( self.url, schema=validate.Schema( validate.parse_html(), validate.xml_find( ".//script[@id='embedded-data'][@data-props]"), validate.get("data-props"), validate.parse_json(), { "site": { "relive": { "webSocketUrl": validate.url(scheme="wss") }, validate.optional("frontendId"): int } }, validate.get("site"), validate.union_get(("relive", "webSocketUrl"), "frontendId"))) except PluginError: return wss_api_url, frontend_id = data if frontend_id is not None: wss_api_url = update_qsd(wss_api_url, {"frontend_id": frontend_id}) return wss_api_url
def _get_streams(self): url = self.session.http.get( "https://stream.1tv.ru/api/playlist/1tvch_as_array.json", data={"r": random.randint(1, 100000)}, schema=validate.Schema( validate.transform(parse_json), {"hls": [validate.url()]}, validate.get("hls"), validate.get(0), )) if not url: return if "georestrictions" in url: log.error("Stream is geo-restricted") return hls_session = self.session.http.get( "https://stream.1tv.ru/get_hls_session", schema=validate.Schema( validate.transform(parse_json), {"s": validate.transform(unquote)}, )) url = update_qsd(url, qsd=hls_session, safe="/:") yield from HLSStream.parse_variant_playlist( self.session, url, name_fmt="{pixels}_{bitrate}").items()
def _get_streams(self): channel = self._url_re.match(self.url).group("channel") pdata = self.session.http.get(self.caronte_url.format(channel=channel), acceptable_status=(200, 403, 404), schema=self.caronte_schema) gbx = self.session.http.get(self.gbx_url.format(channel=channel), schema=self.gbx_schema) if "code" in pdata: log.error("error getting pdata: {}".format(pdata["code"])) return tokens = self.session.http.post(pdata["cerbero"], acceptable_status=(200, 403, 404), json={"bbx": pdata["bbx"], "gbx": gbx}, headers={"origin": "https://www.mitele.es"}, schema=self.cerbero_schema) if "code" in tokens: log.error("Could not get stream tokens: {} ({})".format(tokens["code"], self.token_errors.get(tokens["code"], "unknown error"))) return for stream in pdata["dls"]: if stream["drm"]: log.warning("Stream may be protected by DRM") else: sformat = stream.get("format") log.debug("Stream: {} ({})".format(stream["stream"], sformat or "n/a")) cdn_token = tokens.get(stream["lid"], {}).get("cdn", "") qsd = parse_qsd(cdn_token) if sformat == "hls": yield from HLSStream.parse_variant_playlist(self.session, update_qsd(stream["stream"], qsd)).items()
def _get_clips(self): try: (((sig, token), streams), (self.author, self.category), self.title) = self.api.clips(self.clip_name) except (PluginError, TypeError): return for quality, stream in streams: yield quality, HTTPStream(self.session, update_qsd(stream, {"sig": sig, "token": token}))
def _get_clips(self): try: sig, token, streams = self.api.clips(self.clip_name) except (PluginError, TypeError): return for quality, stream in streams: yield quality, HTTPStream(self.session, update_qsd(stream, {"sig": sig, "token": token}))
def _get_streams(self): data = None m = self.match.groupdict() if m['slug_live']: res = self.session.http.get('https://api.pluto.tv/v2/channels') data = self.session.http.json(res, schema=self._schema_media(m['slug_live'])) elif m['slug_series'] and m['slug_episode']: res = self.session.http.get(f'http://api.pluto.tv/v3/vod/slugs/{m["slug_series"]}') data = self.session.http.json( res, schema=validate.Schema( {'seasons': validate.all( [{'episodes': self._schema_media(m['slug_episode'])}], validate.filter(lambda k: k['episodes'] is not None))}, validate.get('seasons'), validate.get(0), validate.any(None, validate.get('episodes')) ), ) elif m['slug_movies']: res = self.session.http.get('https://api.pluto.tv/v3/vod/categories', params={'includeItems': 'true', 'deviceType': 'web'}) data = self.session.http.json( res, schema=validate.Schema( {'categories': validate.all( [{'items': self._schema_media(m['slug_movies'])}], validate.filter(lambda k: k['items'] is not None))}, validate.get('categories'), validate.get(0), validate.any(None, validate.get('items')), ), ) log.trace(f'{data!r}') if data is None or not data.get('stitched'): return self.title = data['name'] stream_url_no_sid = data['stitched']['urls'][0]['url'] device_id = str(uuid4()) stream_url = update_qsd(stream_url_no_sid, { 'deviceId': device_id, 'sid': device_id, 'deviceType': 'web', 'deviceMake': 'Firefox', 'deviceModel': 'Firefox', 'appName': 'web', }) self.session.set_option('ffmpeg-fout', 'mpegts') for q, s in HLSStream.parse_variant_playlist(self.session, stream_url).items(): yield q, MuxedStream(self.session, s)
def _get_streams(self): self.session.http.headers.update( {"Referer": "https://tviplayer.iol.pt/"}) data = self.session.http.get( self.url, schema=validate.Schema( validate.parse_html(), validate.xml_xpath_string( ".//script[contains(text(),'.m3u8')]/text()"), validate.text, validate.transform(self._re_jsonData.search), validate.any( None, validate.all( validate.get("json"), validate.parse_json(), { "id": validate.text, "liveType": validate.text, "videoType": validate.text, "videoUrl": validate.url(path=validate.endswith(".m3u8")), validate.optional("channel"): validate.text, })))) if not data: return log.debug("{0!r}".format(data)) if data["liveType"].upper() == "DIRETO" and data["videoType"].upper( ) == "LIVE": geo_path = "live" else: geo_path = "vod" data_geo = self.session.http.get( "https://services.iol.pt/direitos/rights/{0}?id={1}".format( geo_path, data['id']), acceptable_status=(200, 403), schema=validate.Schema( validate.parse_json(), { "code": validate.text, "error": validate.any(None, validate.text), "detail": validate.text, })) log.debug("{0!r}".format(data_geo)) if data_geo["detail"] != "ok": log.error("{0}".format(data_geo['detail'])) return wmsAuthSign = self.session.http.get( "https://services.iol.pt/matrix?userId=", schema=validate.Schema(validate.text)) hls_url = update_qsd(data["videoUrl"], {"wmsAuthSign": wmsAuthSign}) return HLSStream.parse_variant_playlist(self.session, hls_url)
def _get_streams(self): live_url = self._get_live_url() if not live_url: log.info("This stream may be off-air or not available in your country") return if self._is_token_based_site(): token = self._get_token() if not token: return return HLSStream.parse_variant_playlist(self.session, update_qsd(live_url, {"iut": token})) else: return HLSStream.parse_variant_playlist(self.session, live_url)
def on_response(res, **kwargs): if res.headers.get("x-waf-redirect") == "1": if not res.headers.get("X-WAF-Backend-Status"): log.debug("Getting WAF cookie") cookie = res.cookies.get(self.HASH_COOKIE) key = md5(cookie.encode("utf-8")).hexdigest() res.headers["Location"] = update_qsd( res.headers["Location"], qsd={"key": key}) return res elif res.headers.get( "X-WAF-Backend-Status") == "challenge_success": self.session.http.cookies.update(res.cookies) return res
def _get_streams(self): deviceid = str(uuid.uuid4()) appkeysecret = self._generate_applicationkeysecret(deviceid) json_data = { "deviceId": deviceid, "applicationKeySecret": appkeysecret } res = self.session.http.post(self._USER_API, json=json_data) jsonres = self.session.http.json(res, schema=self._USER_SCHEMA) self.usertoken = jsonres['token'] # for authorzation matchresult = self._url_re.match(self.url) if matchresult.group("onair"): onair = matchresult.group("onair") if onair == "news-global": self._CHANNEL = update_qsd(self._CHANNEL, {"division": "1"}) res = self.session.http.get(self._CHANNEL) jsonres = self.session.http.json(res, schema=self._CHANNEL_SCHEMA) channels = jsonres["channels"] for channel in channels: if onair == channel["id"]: break else: raise NoStreamsError(self.url) playlisturl = channel["playback"]["hls"] elif matchresult.group("episode"): episode = matchresult.group("episode") if not self._is_playable("episode", episode): log.error("Premium stream is not playable") return {} playlisturl = self._PRGM3U8.format(episode) elif matchresult.group("slots"): slots = matchresult.group("slots") if not self._is_playable("slots", slots): log.error("Premium stream is not playable") return {} playlisturl = self._SLOTM3U8.format(slots) log.debug("URL={0}".format(playlisturl)) # hook abematv private protocol self.session.http.mount( "abematv-license://", AbemaTVLicenseAdapter(self.session, deviceid, self.usertoken)) streams = HLSStream.parse_variant_playlist(self.session, playlisturl) if not streams: return {"live": HLSStream(self.session, playlisturl)} else: return streams
def handle_api_message(self, message): _log.debug(f"Received: {message}") message_parsed = json.loads(message) if message_parsed["type"] == "stream": data = message_parsed["data"] self.hls_stream_url = data["uri"] # load in the offset for timeshift live videos offset = self.get_option("timeshift-offset") if offset and 'timeshift' in self.wss_api_url: self.hls_stream_url = update_qsd(self.hls_stream_url, {"start": offset}) self.is_stream_ready = True if message_parsed["type"] == "watch": body = message_parsed["body"] command = body["command"] if command == "currentstream": current_stream = body["currentStream"] self.hls_stream_url = current_stream["uri"] self.is_stream_ready = True elif command == "watchinginterval": self.watching_interval = int(body["params"][0]) _log.debug("Got watching_interval: {0}".format( self.watching_interval)) if self.watching_interval_worker_thread is None: _log.debug("send_watching_scheduler starting.") self.watching_interval_worker_thread = threading.Thread( target=self.send_watching_scheduler) self.watching_interval_worker_thread.daemon = True self.watching_interval_worker_thread.start() else: _log.debug("send_watching_scheduler already running.") elif command == "disconnect": _log.info("Websocket API closed.") _log.info("Stream ended.") self.is_stream_ended = True if self.stream_reader is not None: self.stream_reader.close() _log.info("Stream reader closed.") elif message_parsed["type"] == "ping": self.send_pong()
def _get_token_req_url(self): token_req_host_re = re.compile(r"""jQuery\.get\s*\(['"]([^'"]+)['"]""") schema = validate.Schema( validate.xml_xpath_string( ".//script[contains(text(), 'LIVE_URL')]/text()"), validate.any( None, validate.all( validate.transform(token_req_host_re.search), validate.any( None, validate.all( validate.get(1), validate.url(), )), )), ) token_req_host = validate.validate(schema, self.page) log.debug("token_req_host={0}".format(token_req_host)) token_req_str_re = re.compile( r"""Math\.floor\(Date\.now\(\)\s*/\s*3600000\),\s*['"]([^'"]+)['"]""" ) schema = validate.Schema( validate.xml_xpath_string( ".//script[contains(text(), 'LIVE_URL')]/text()"), validate.any( None, validate.all( validate.transform(token_req_str_re.search), validate.any( None, validate.all( validate.get(1), validate.text, )), )), ) token_req_str = validate.validate(schema, self.page) log.debug("token_req_str={0}".format(token_req_str)) if not token_req_str: return date = int(time.time() // 3600) token_req_token = self.transform_token(token_req_str, date) or self.transform_token( token_req_str, date - 1) if token_req_host and token_req_token: return update_qsd(token_req_host, {"rsk": token_req_token})
def test_update_qsd(): assert update_qsd("http://test.se?one=1&two=3", {"two": 2}) == "http://test.se?one=1&two=2" assert update_qsd("http://test.se?one=1&two=3", remove=["two"]) == "http://test.se?one=1" assert update_qsd("http://test.se?one=1&two=3", {"one": None}, remove="*") == "http://test.se?one=1" assert update_qsd("http://test.se", OrderedDict([("one", ""), ("two", "")])) == "http://test.se?one=&two=", \ "should add empty params" assert update_qsd( "http://test.se?one=", {"one": None }) == "http://test.se?one=", "should leave empty params unchanged" assert update_qsd("http://test.se?one=", keep_blank_values=False ) == "http://test.se", "should strip blank params" assert update_qsd("http://test.se?one=&two=", {"one": None}, keep_blank_values=False) == "http://test.se?one=", \ "should leave one" assert update_qsd("http://test.se?&two=", {"one": ''}, keep_blank_values=False) == "http://test.se?one=", \ "should set one blank" assert update_qsd("http://test.se?one=", {"two": 2}) == "http://test.se?one=&two=2"
def _get_hls_streams(self, channel): channel = self.hls_channel_remap.get(channel, channel) embed_url = self.embed_url.format(channel) self.logger.debug("Found embed URL: {0}", embed_url) # page needs to have a mobile user agent embed_page = self.session.http.get(embed_url, headers={"User-Agent": useragents.ANDROID}) m = self.embed_re.search(embed_page.text) if m: # remove all query string arguments except hdnea hls_stream_url = update_qsd(m.group(1), {"hdnea": None}, remove="*") try: for s in HLSStream.parse_variant_playlist(self.session, hls_stream_url).items(): yield s except Exception: self.logger.error("Failed to load the HLS playlist for {0}", channel)
def _get_streams(self): try: data = self.session.http.get( self.url, schema=validate.Schema( validate.parse_html(), validate.xml_find( ".//video[@id='brightcove_video_player']"), validate.union_get("data-video-id", "data-account", "data-ad-config-id", "data-player"))) except PluginError: return data_video_id, data_account, data_ad_config_id, data_player = data url = self._PLAYER_URL.format(data_account=data_account, data_player=data_player) policy_key = self.session.http.get( url, schema=validate.Schema( validate.transform(self._policy_key_re.search), validate.any(None, validate.get(1)))) if not policy_key: return url = self._API_URL.format(data_account=data_account, data_video_id=data_video_id) if data_ad_config_id is not None: url = update_qsd(url, dict(ad_config_id=data_ad_config_id)) streams = self.session.http.get( url, headers={"Accept": f"application/json;pk={policy_key}"}, schema=validate.Schema( validate.parse_json(), { "sources": [{ validate.optional("type"): str, "src": validate.url(), }], }, validate.get("sources"), validate.filter(lambda source: source.get("type") == "application/x-mpegURL"))) for stream in streams: return HLSStream.parse_variant_playlist(self.session, stream["src"])
def _get_streams(self): stream_info = self._get_stream_info() log.debug("Live stream info: {}".format(stream_info)) if not stream_info.get("movie") or not stream_info["movie"]["live"]: raise PluginError("The live stream is offline") if not stream_info.get("fmp4"): raise PluginError("Login required") # Keys are already validated by schema above proto = stream_info["fmp4"]["proto"] host = stream_info["fmp4"]["host"] movie_id = stream_info["movie"]["id"] if stream_info["fmp4"]["source"]: mode = "main" # High quality elif stream_info["fmp4"]["mobilesource"]: mode = "mobilesource" # Medium quality else: mode = "base" # Low quality if (proto == '') or (host == '') or (not movie_id): raise PluginError("No stream available for user {}".format( self.channel)) real_stream_url = self._STREAM_REAL_URL.format(proto=proto, host=host, movie_id=movie_id, mode=mode) password = self.options.get("password") if password is not None: password_hash = hashlib.md5(password.encode()).hexdigest() real_stream_url = update_qsd(real_stream_url, {"word": password_hash}) log.debug("Real stream url: {}".format(real_stream_url)) return { mode: TwitCastingStream(session=self.session, url=real_stream_url) }
def _get_streams(self): if "orbit-plus-4" in self.url: channel = "1tv-orbit-plus-4" self.title = u"Первый канал HD (+4)" else: channel = "1tvch" self.title = u"Первый канал HD" url = self.session.http.get( "https://stream.1tv.ru/api/playlist/{0}_as_array.json".format( channel), data={"r": random.randint(1, 100000)}, schema=validate.Schema( validate.parse_json(), {"hls": [validate.url()]}, validate.get(("hls", 0)), )) if not url: return log.debug("{0}".format(url)) if "georestrictions" in url: log.error("Stream is geo-restricted") return hls_session = self.session.http.get( "https://stream.1tv.ru/get_hls_session", schema=validate.Schema( validate.parse_json(), {"s": validate.transform(unquote)}, )) url = update_qsd(url, qsd=hls_session, safe="/:") return HLSStream.parse_variant_playlist(self.session, url, name_fmt="{pixels}_{bitrate}")
def test_update_qsd(): assert update_qsd("http://test.se?one=1&two=3", {"two": 2}) == "http://test.se?one=1&two=2" assert update_qsd("http://test.se?one=1&two=3", remove=["two"]) == "http://test.se?one=1" assert update_qsd("http://test.se?one=1&two=3", {"one": None}, remove="*") == "http://test.se?one=1" assert update_qsd("http://test.se", OrderedDict([("one", ""), ("two", "")])) == "http://test.se?one=&two=", \ "should add empty params" assert update_qsd( "http://test.se?one=", {"one": None }) == "http://test.se?one=", "should leave empty params unchanged" assert update_qsd("http://test.se?one=", keep_blank_values=False ) == "http://test.se", "should strip blank params" assert update_qsd("http://test.se?one=&two=", {"one": None}, keep_blank_values=False) == "http://test.se?one=", \ "should leave one" assert update_qsd("http://test.se?&two=", {"one": ''}, keep_blank_values=False) == "http://test.se?one=", \ "should set one blank" assert update_qsd("http://test.se?one=", {"two": 2}) == "http://test.se?one=&two=2" assert update_qsd("http://test.se?foo=%3F", {"bar": "!"}) == "http://test.se?foo=%3F&bar=%21", \ "urlencode - encoded URL" assert update_qsd("http://test.se?foo=?", {"bar": "!"}) == "http://test.se?foo=%3F&bar=%21", \ "urlencode - fix URL" assert update_qsd("http://test.se?foo=?", {"bar": "!"}, quote_via=lambda s, *_: s) == "http://test.se?foo=?&bar=!", \ "urlencode - dummy quote method" assert update_qsd("http://test.se", {"foo": "/ "}) == "http://test.se?foo=%2F+", \ "urlencode - default quote_plus" assert update_qsd("http://test.se", {"foo": "/ "}, safe="/", quote_via=quote) == "http://test.se?foo=/%20", \ "urlencode - regular quote with reserved slash" assert update_qsd("http://test.se", {"foo": "/ "}, safe="", quote_via=quote) == "http://test.se?foo=%2F%20", \ "urlencode - regular quote without reserved slash"
def _get_streams(self): self.session.http.headers.update({ "User-Agent": useragents.CHROME }) CHROME_VERSION = re.compile(r"Chrome/(\d+)").search(useragents.CHROME).group(1) # Retrieve geolocation data country_code = self.session.http.get(self.GEO_URL, schema=validate.Schema( validate.parse_json(), {"reponse": {"geo_info": { "country_code": validate.text }}}, validate.get(("reponse", "geo_info", "country_code")) )) log.debug("Country: {0}".format(country_code)) # Retrieve URL page and search for video ID video_id = None try: video_id = self.session.http.get(self.url, schema=validate.Schema( validate.parse_html(), validate.any( validate.all( validate.xml_xpath_string(".//script[contains(text(),'window.FTVPlayerVideos')][1]/text()"), validate.text, validate.transform(self._re_ftv_player_videos.search), validate.get("json"), validate.parse_json(), [{"videoId": validate.text}], validate.get((0, "videoId")) ), validate.all( validate.xml_xpath_string(".//script[contains(text(),'new Magnetoscope')][1]/text()"), validate.text, validate.transform(self._re_player_load.search), validate.get("video_id") ), validate.all( validate.xml_xpath_string(".//*[@id][contains(@class,'francetv-player-wrapper')][1]/@id"), validate.text ), validate.all( validate.xml_xpath_string(".//*[@data-id][@class='magneto'][1]/@data-id"), validate.text ) ) )) except PluginError: pass if not video_id: return log.debug("Video ID: {0}".format(video_id)) api_url = update_qsd(self.API_URL.format(video_id=video_id), { "country_code": country_code, "w": 1920, "h": 1080, "player_version": self.PLAYER_VERSION, "domain": urlparse(self.url).netloc, "device_type": "mobile", "browser": "chrome", "browser_version": CHROME_VERSION, "os": "ios", "gmt": datetime.now(tz=LOCALTIMEZONE).strftime("%z") }) video_format, token_url, url, self.title = self.session.http.get(api_url, schema=validate.Schema( validate.parse_json(), { "video": { "workflow": validate.any("token-akamai", "dai"), "format": validate.any("dash", "hls"), "token": validate.url(), "url": validate.url() }, "meta": { "title": validate.text } }, validate.union_get( ("video", "format"), ("video", "token"), ("video", "url"), ("meta", "title") ) )) data_url = update_qsd(token_url, { "url": url }) video_url = self.session.http.get(data_url, schema=validate.Schema( validate.parse_json(), {"url": validate.url()}, validate.get("url") )) if video_format == "dash": for s in DASHStream.parse_manifest(self.session, video_url).items(): yield s elif video_format == "hls": for s in HLSStream.parse_variant_playlist(self.session, video_url).items(): yield s
def _get_playlist(self, host, path, params, token): qs = parse_qs(params) qs["jwt"] = token yield from PlutoHLSStream.parse_variant_playlist( self.session, update_qsd(urljoin(host, path), qs)).items()
def test_update_qsd(self): self.assertEqual(update_qsd("http://test.se?one=1&two=3", {"two": 2}), "http://test.se?one=1&two=2") self.assertEqual(update_qsd("http://test.se?one=1&two=3", remove=["two"]), "http://test.se?one=1") self.assertEqual(update_qsd("http://test.se?one=1&two=3", {"one": None}, remove="*"), "http://test.se?one=1")
def _get_streams(self): self.session.http.headers.update({"Referer": self.url}) p_netloc = urlparse(self.url).netloc if p_netloc == "player.mediavitrina.ru": # https://player.mediavitrina.ru/ url_player = self.url elif p_netloc.endswith("ctc.ru"): # https://ctc.ru/online/ url_player = self.session.http.get( "https://ctc.ru/api/page/v1/online/", schema=validate.Schema( validate.parse_json(), { "content": validate.all( [dict], validate. filter(lambda n: n.get("type") == "on-air"), [{ "onAirLink": validate.url(netloc="player.mediavitrina.ru") }], validate.get((0, "onAirLink"))) }, validate.get("content"))) else: # https://chetv.ru/online/ # https://ctclove.ru/online/ # https://domashniy.ru/online/ url_player = self.session.http.get( self.url, schema=validate.Schema( validate.parse_html(), validate.xml_xpath_string( ".//iframe[starts-with(@src,'https://player.mediavitrina.ru/')]/@src" ), ), acceptable_status=(200, 403, 404)) if not url_player: return log.debug(f"url_player={url_player}") script_data = self.session.http.get( url_player, schema=validate.Schema( validate.parse_html(), validate.xml_xpath_string( ".//script[contains(text(),'media.mediavitrina.ru/')]/text()" ), )) if not script_data: log.debug("invalid script_data") return m = self._re_url_json.search(script_data) if not m: log.debug("invalid url_json") return url_json = m.group(0) log.debug(f"url_json={url_json}") url_json = re.sub(r"\{\{PLAYER_REFERER_HOSTNAME\}\}", "mediavitrina.ru", url_json) url_json = re.sub(r"\{\{[A-Za-z_]+\}\}", "", url_json) res_token = self.session.http.get( "https://media.mediavitrina.ru/get_token", schema=validate.Schema( validate.parse_json(), {"result": { "token": str }}, validate.get("result"), )) url = self.session.http.get(update_qsd(url_json, qsd=res_token), schema=validate.Schema( validate.parse_json(), {"hls": [validate.url()]}, validate.get(("hls", 0)), )) if not url: return if "georestrictions" in url: log.error("Stream is geo-restricted") return return HLSStream.parse_variant_playlist(self.session, url, name_fmt="{pixels}_{bitrate}")