class NineAnime(Plugin): _episode_info_url = "//9anime.to/ajax/episode/info" _info_schema = validate.Schema({ "grabber": validate.url(), "params": { "id": validate.text, "token": validate.text, "options": validate.text, } }) _streams_schema = validate.Schema({ "token": validate.text, "error": None, "data": [{ "label": validate.text, "file": validate.url(), "type": "mp4" }] }) _url_re = re.compile(r"https?://9anime.to/watch/(?:[^.]+?\.)(\w+)/(\w+)") @classmethod def can_handle_url(cls, url): return cls._url_re.match(url) is not None def add_scheme(self, url): # update the scheme for the grabber url if required if url.startswith("//"): url = "{0}:{1}".format(urlparse(self.url).scheme, url) return url def _get_streams(self): match = self._url_re.match(self.url) film_id, episode_id = match.groups() headers = {"Referer": self.url, "User-Agent": useragents.FIREFOX} # Get the info about the Episode, including the Grabber API URL info_res = http.get(self.add_scheme(self._episode_info_url), params=dict(update=0, film=film_id, id=episode_id), headers=headers) info = http.json(info_res, schema=self._info_schema) # Get the data about the streams from the Grabber API grabber_url = self.add_scheme(info["grabber"]) stream_list_res = http.get(grabber_url, params=info["params"], headers=headers) stream_data = http.json(stream_list_res, schema=self._streams_schema) for stream in stream_data["data"]: yield stream["label"], HTTPStream(self.session, stream["file"])
class Euronews(Plugin): _url_re = re.compile(r"http(?:s)?://(\w+)\.?euronews.com/(live|.*)") _re_vod = re.compile( r'<meta\s+property="og:video"\s+content="(http.*?)"\s*/>') _live_api_url = "http://{0}.euronews.com/api/watchlive.json" _live_schema = validate.Schema({u"url": validate.url()}) _stream_api_schema = validate.Schema({ u'status': u'ok', u'primary': validate.url(), validate.optional(u'backup'): validate.url() }) @classmethod def can_handle_url(cls, url): return cls._url_re.match(url) def _get_vod_stream(self): """ Find the VOD video url :return: video url """ res = http.get(self.url) video_urls = self._re_vod.findall(res.text) if len(video_urls): return dict(vod=HTTPStream(self.session, video_urls[0])) def _get_live_streams(self, subdomain): """ Get the live stream in a particular language :param subdomain: :return: """ res = http.get(self._live_api_url.format(subdomain)) live_res = http.json(res, schema=self._live_schema) api_res = http.get(live_res[u"url"]) stream_data = http.json(api_res, schema=self._stream_api_schema) return HLSStream.parse_variant_playlist(self.session, stream_data[u'primary']) def _get_streams(self): """ Find the streams for euronews :return: """ match = self._url_re.match(self.url) subdomain, path = match.groups() if path == "live": return self._get_live_streams(subdomain) else: return self._get_vod_stream()
class ovvaTV(Plugin): url_re = re.compile( r"https?://(?:www\.)?ovva.tv/(?:ua/)?tvguide/.*?/online") iframe_re = re.compile( r"iframe .*?src=\"((?:https?:)?//(?:\w+\.)?ovva.tv/[^\"]+)\"", re.DOTALL) data_re = re.compile(r"ovva\(\'(.*?)\'\);") ovva_data_schema = validate.Schema({"url": validate.url()}, validate.get("url")) ovva_redirect_schema = validate.Schema( validate.all(validate.transform(lambda x: x.split("=")), ['302', validate.url()], validate.get(1))) @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def find_iframe(self, res): for url in self.iframe_re.findall(res.text): if url.startswith("//"): p = urlparse(self.url) return "{0}:{1}".format(p.scheme, url) else: return url def _get_streams(self): http.headers = {"User-Agent": useragents.ANDROID} res = http.get(self.url) iframe_url = self.find_iframe(res) if iframe_url: self.logger.debug("Found iframe: {0}", iframe_url) res = http.get(iframe_url, headers={"Referer": self.url}) data = self.data_re.search(res.text) if data: try: ovva_url = parse_json(b64decode( data.group(1)).decode("utf8"), schema=self.ovva_data_schema) stream_url = http.get(ovva_url, schema=self.ovva_redirect_schema) except PluginError as e: self.logger.error("Could not find stream URL: {0}", e) else: return HLSStream.parse_variant_playlist( self.session, stream_url) else: self.logger.error("Could not find player data.")
class LiveMe(Plugin): url_re = re.compile(r"https?://(www.)?liveme\.com/live\.html\?videoid=(\d+)") api_url = "https://live.ksmobile.net/live/queryinfo" api_schema = validate.Schema(validate.all({ "status": "200", "data": { "video_info": { "videosource": validate.any('', validate.url()), "hlsvideosource": validate.any('', validate.url()), } } }, validate.get("data"))) @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _random_t(self, t): return "".join(random.choice("ABCDEFGHJKMNPQRSTWXYZabcdefhijkmnprstwxyz2345678") for _ in range(t)) def _make_stream(self, url): if url and url.endswith("flv"): return HTTPStream(self.session, url) elif url and url.endswith("m3u8"): return HLSStream(self.session, url) def _get_streams(self): url_params = dict(parse_qsl(urlparse(self.url).query)) video_id = url_params.get("videoid") if video_id: vali = '{0}l{1}m{2}'.format(self._random_t(4), self._random_t(4), self._random_t(5)) data = { 'userid': 1, 'videoid': video_id, 'area': '', 'h5': 1, 'vali': vali } self.logger.debug("Found Video ID: {0}".format(video_id)) res = http.post(self.api_url, data=data) data = http.json(res, schema=self.api_schema) hls = self._make_stream(data["video_info"]["hlsvideosource"]) video = self._make_stream(data["video_info"]["videosource"]) if hls: yield "live", hls if video: yield "live", video
class RaiPlay(Plugin): url_re = re.compile(r"https?://(?:www\.)?raiplay\.it/dirette/(\w+)/?") stream_re = re.compile(r"data-video-url.*?=.*?\"([^\"]+)\"") stream_schema = validate.Schema( validate.all( validate.transform(stream_re.search), validate.any(None, validate.all(validate.get(1), validate.url())))) @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _get_streams(self): http.headers.update({"User-Agent": useragents.FIREFOX}) channel = self.url_re.match(self.url).group(1) self.logger.debug("Found channel: {0}", channel) stream_url = http.get(self.url, schema=self.stream_schema) if stream_url: try: return HLSStream.parse_variant_playlist( self.session, stream_url) except Exception as e: if "Missing #EXTM3U header" in str(e): raise PluginError( "The streaming of this content is available in Italy only." ) raise e
class TV8(Plugin): """ Support for the live stream on www.tv8.com.tr """ url_re = re.compile(r"https?://www.tv8.com.tr/canli-yayin") player_config_re = re.compile( r""" configPlayer.source.media.push[ ]*\( [ ]*\{[ ]*'src':[ ]*"(.*?)", [ ]*type:[ ]*"application/x-mpegURL"[ ]*}[ ]*\); """, re.VERBOSE) player_config_schema = validate.Schema( validate.transform(player_config_re.search), validate.any(None, validate.all(validate.get(1), validate.url()))) @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _get_streams(self): res = http.get(self.url) stream_url = self.player_config_schema.validate(res.text) if stream_url: return HLSStream.parse_variant_playlist(self.session, stream_url)
class Streamable(Plugin): url_re = re.compile(r"https?://(?:www\.)?streamable\.com/(.+)") meta_re = re.compile(r'''var\s*videoObject\s*=\s*({.*});''') config_schema = validate.Schema( validate.transform(meta_re.search), validate.any( None, validate.all( validate.get(1), validate.transform(parse_json), { "files": { validate.text: { "url": validate.url(), "width": int, "height": int, "bitrate": int } } }))) @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _get_streams(self): data = http.get(self.url, schema=self.config_schema) for info in data["files"].values(): stream_url = update_scheme(self.url, info["url"]) # pick the smaller of the two dimensions, for landscape v. portrait videos res = min(info["width"], info["height"]) yield "{0}p".format(res), HTTPStream(self.session, stream_url)
class Cam4(Plugin): _url_re = re.compile(r'https?://([a-z]+\.)?cam4.com/.+') _video_data_re = re.compile( r"flashData: (?P<flash_data>{.*}), hlsUrl: '(?P<hls_url>.+?)'") _flash_data_schema = validate.Schema( validate.all( validate.transform(parse_json), validate.Schema({ 'playerUrl': validate.url(), 'flashVars': validate.Schema({ 'videoPlayUrl': validate.text, 'videoAppUrl': validate.url(scheme='rtmp') }) }))) @classmethod def can_handle_url(cls, url): return Cam4._url_re.match(url) def _get_streams(self): res = http.get(self.url, headers={'User-Agent': useragents.ANDROID}) match = self._video_data_re.search(res.text) if match is None: return hls_streams = HLSStream.parse_variant_playlist( self.session, match.group('hls_url'), headers={'Referer': self.url}) for s in hls_streams.items(): yield s rtmp_video = self._flash_data_schema.validate( match.group('flash_data')) rtmp_stream = RTMPStream( self.session, { 'rtmp': rtmp_video['flashVars']['videoAppUrl'], 'playpath': rtmp_video['flashVars']['videoPlayUrl'], 'swfUrl': rtmp_video['playerUrl'] }) yield 'live', rtmp_stream
class ovvaTV(Plugin): url_re = re.compile(r"https?://(?:www\.)?1plus1\.video/tvguide/embed/[^/]") data_re = re.compile(r"""ovva-player["'],["'](.*?)["']\)};""") next_date_re = re.compile( r"""<div\sclass=["']o-message-timer['"]\sdata-timer=["'](\d+)["']""") ovva_data_schema = validate.Schema({"balancer": validate.url()}, validate.get("balancer")) ovva_redirect_schema = validate.Schema( validate.all(validate.transform(lambda x: x.split("=")), ['302', validate.url()], validate.get(1))) @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _get_streams(self): res = http.get(self.url) data = self.data_re.search(res.text) next_date = self.next_date_re.search(res.text) if data: try: ovva_url = parse_json(b64decode(data.group(1)).decode("utf8"), schema=self.ovva_data_schema) stream_url = http.get(ovva_url, schema=self.ovva_redirect_schema) except PluginError as e: self.logger.error("Could not find stream URL: {0}", e) else: return HLSStream.parse_variant_playlist( self.session, stream_url) elif next_date: self.logger.info("The broadcast will be available at {0}".format( datetime.fromtimestamp(int( next_date.group(1))).strftime('%Y-%m-%d %H:%M:%S'))) else: self.logger.error("Could not find player data.")
class Turkuvaz(Plugin): """ Plugin to support ATV/A2TV Live streams from www.atv.com.tr and www.a2tv.com.tr """ _url_re = re.compile( r"""https?://(?:www.)? (?:(atvavrupa).tv| (atv|a2tv|ahaber|aspor|minikago|minikacocuk|anews).com.tr) /webtv/(live-broadcast|canli-yayin)""", re.VERBOSE) _hls_url = "http://trkvz-live.ercdn.net/{channel}/{channel}.m3u8" _token_url = "http://videotoken.tmgrup.com.tr/webtv/secure" _token_schema = validate.Schema( validate.all({ "Success": True, "Url": validate.url(), }, validate.get("Url"))) @classmethod def can_handle_url(cls, url): return cls._url_re.match(url) is not None def _get_streams(self): url_m = self._url_re.match(self.url) domain = url_m.group(1) or url_m.group(2) # remap the domain to channel channel = { "atv": "atvhd", "ahaber": "ahaberhd", "aspor": "asporhd", "anews": "anewshd", "minikacocuk": "minikagococuk" }.get(domain, domain) hls_url = self._hls_url.format(channel=channel) # get the secure HLS URL res = http.get(self._token_url, params="url={0}".format(hls_url), headers={ "Referer": self.url, "User-Agent": useragents.CHROME }) secure_hls_url = http.json(res, schema=self._token_schema) self.logger.debug("Found HLS URL: {0}".format(secure_hls_url)) return HLSStream.parse_variant_playlist(self.session, secure_hls_url)
class RadioNet(Plugin): _url_re = re.compile(r"https?://(\w+)\.radio\.(net|at|de|dk|es|fr|it|pl|pt|se)") _stream_data_re = re.compile(r'\bstation\s*:\s*(\{.+\}),?\s*') _stream_schema = validate.Schema( validate.transform(_stream_data_re.search), validate.any( None, validate.all( validate.get(1), validate.transform(parse_json), { 'stationType': validate.text, 'streamUrls': validate.all([{ 'bitRate': int, 'streamUrl': validate.url() }]) }, ) ) ) @classmethod def can_handle_url(cls, url): return cls._url_re.match(url) def _get_streams(self): streams = http.get(self.url, schema=self._stream_schema) if streams is None: return # Ignore non-radio streams (podcasts...) if streams['stationType'] != 'radio_station': return stream_urls = [] for stream in streams['streamUrls']: if stream['streamUrl'] in stream_urls: continue if stream['bitRate'] > 0: bitrate = '{}k'.format(stream['bitRate']) else: bitrate = 'live' yield bitrate, HTTPStream(self.session, stream['streamUrl']) stream_urls.append(stream['streamUrl'])
class RaiPlay(Plugin): url_re = re.compile(r"https?://(?:www\.)?raiplay\.it/dirette/(\w+)/?") stream_re = re.compile(r"data-video-url.*?=.*?\"([^\"]+)\"") stream_schema = validate.Schema( validate.all( validate.transform(stream_re.search), validate.any( None, validate.all(validate.get(1), validate.url()) ) ) ) @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _get_streams(self): channel = self.url_re.match(self.url).group(1) self.logger.debug("Found channel: {0}", channel) stream_url = http.get(self.url, schema=self.stream_schema) if stream_url: return HLSStream.parse_variant_playlist(self.session, stream_url)
class CinerGroup(Plugin): """ Support for the live stream on www.showtv.com.tr """ url_re = re.compile( r"""https?://(?:www.)? (?: showtv.com.tr/canli-yayin(/showtv)?| haberturk.com/canliyayin| showmax.com.tr/canliyayin| showturk.com.tr/canli-yayin/showturk| bloomberght.com/tv| haberturk.tv/canliyayin )/?""", re.VERBOSE) stream_re = re.compile( r"""div .*? data-ht=(?P<quote>["'])(?P<data>.*?)(?P=quote)""", re.DOTALL) stream_data_schema = validate.Schema( validate.transform(stream_re.search), validate.any( None, validate.all( validate.get("data"), validate.transform(unquote), validate.transform(lambda x: x.replace(""", '"')), validate.transform(json.loads), {"ht_stream_m3u8": validate.url()}, validate.get("ht_stream_m3u8")))) @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _get_streams(self): res = http.get(self.url) stream_url = self.stream_data_schema.validate(res.text) if stream_url: return HLSStream.parse_variant_playlist(self.session, stream_url)
class PowerApp(Plugin): url_re = re.compile(r"https?://(?:www.)?powerapp.com.tr/tv/(\w+)") api_url = "http://api.powergroup.com.tr/Channels/{0}/?appRef=iPowerWeb&apiVersion=11" api_schema = validate.Schema( validate.all( { "errorCode": 0, "response": { "channel_stream_url": validate.url() } }, validate.get("response"))) @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _get_streams(self): channel = self.url_re.match(self.url).group(1) res = http.get(self.api_url.format(channel)) data = http.json(res, schema=self.api_schema) return HLSStream.parse_variant_playlist(self.session, data["channel_stream_url"])
class FilmOnAPI(object): channel_url = "http://www.filmon.com/api-v2/channel/{0}?protocol=hls" vod_url = "http://www.filmon.com/vod/info/{0}" stream_schema = { "quality": validate.text, "url": validate.url(), "watch-timeout": int } api_schema = validate.Schema( { "data": { "streams": validate.any({validate.text: stream_schema}, [stream_schema]) } }, validate.get("data")) def channel(self, channel): res = http.get(self.channel_url.format(channel)) return http.json(res, schema=self.api_schema) def vod(self, vod_id): res = http.get(self.vod_url.format(vod_id)) return http.json(res, schema=self.api_schema)
{ "chansub": { "restricted_bitrates": validate.all([validate.text], validate.filter(lambda n: not re.match( r"(.+_)?archives|live|chunked", n))) } }, validate.get("chansub")) _user_schema = validate.Schema( {validate.optional("display_name"): validate.text}, validate.get("display_name")) _video_schema = validate.Schema({ "chunks": { validate.text: [{ "length": int, "url": validate.any(None, validate.url(scheme="http")), "upkeep": validate.any("pass", "fail", None) }] }, "restrictions": { validate.text: validate.text }, "start_offset": int, "end_offset": int, }) _viewer_info_schema = validate.Schema( {validate.optional("login"): validate.text}, validate.get("login")) _viewer_token_schema = validate.Schema( {validate.optional("token"): validate.text}, validate.get("token")) _quality_options_schema = validate.Schema( {
class Looch(Plugin): url_re = re.compile( r"https?://(?:www\.)?looch\.tv/channel/(?P<name>[^/]+)(/videos/(?P<video_id>\d+))?" ) api_base = "https://api.looch.tv" channel_api = api_base + "/channels/{name}" video_api = api_base + "/videos/{id}" playback_schema = validate.Schema({"weight": int, "uri": validate.url()}) data_schema = validate.Schema({ "type": validate.text, "attributes": { validate.optional("playback"): [playback_schema], validate.optional("resolution"): { "width": int, "height": int } } }) channel_schema = validate.Schema( validate.transform(parse_json), { "included": validate.all( [data_schema], validate.filter(lambda x: x["type"] == "active_streams"), validate.map(lambda x: x["attributes"].get("playback")), validate.transform(lambda x: list(itertools.chain(*x)))), }, validate.get("included")) video_schema = validate.Schema(validate.transform(parse_json), {"data": data_schema}, validate.get("data"), validate.get("attributes")) @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _get_live_stream(self, channel): url = self.channel_api.format(name=channel) self.logger.debug("Channel API call: {0}", url) data = http.get(url, schema=self.channel_schema) self.logger.debug("Got {0} channel playback items", len(data)) for playback in data: for s in HLSStream.parse_variant_playlist(self.session, playback["uri"]).items(): yield s def _get_video_stream(self, video_id): url = self.video_api.format(id=video_id) self.logger.debug("Video API call: {0}", url) data = http.get(url, schema=self.video_schema) self.logger.debug("Got video {0} playback items", len(data["playback"])) res = data["resolution"]["height"] for playback in data["playback"]: yield "{0}p".format(res), HTTPStream(self.session, playback["uri"]) def _get_streams(self): match = self.url_re.match(self.url) self.logger.debug("Matched URL: name={name}, video_id={video_id}", **match.groupdict()) if match.group("video_id"): return self._get_video_stream(match.group("video_id")) elif match.group("name"): return self._get_live_stream(match.group("name"))
_config_schema = validate.Schema( { validate.optional("fmt_list"): validate.all( validate.text, validate.transform(parse_fmt_list) ), validate.optional("url_encoded_fmt_stream_map"): validate.all( validate.text, validate.transform(parse_stream_map), [{ "itag": validate.all( validate.text, validate.transform(int) ), "quality": validate.text, "url": validate.url(scheme="http"), validate.optional("s"): validate.text, validate.optional("stereo3d"): validate.all( validate.text, validate.transform(int), validate.transform(bool) ), }] ), validate.optional("adaptive_fmts"): validate.all( validate.text, validate.transform(parse_stream_map), [{ validate.optional("s"): validate.text, "type": validate.all( validate.text,
class CanalPlus(Plugin): # NOTE : no live url for the moment API_URL = 'https://secure-service.canal-plus.com/video/rest/getVideos/cplus/{0}?format=json' HDCORE_VERSION = '3.1.0' # Secret parameter needed to download HTTP videos on canalplus.fr SECRET = 'pqzerjlsmdkjfoiuerhsdlfknaes' _url_re = re.compile(r''' (https|http):// ( www.mycanal.fr/(.*)/(.*)/p/(?P<video_id>[0-9]+) | www\.cnews\.fr/.+ ) ''', re.VERBOSE) _video_id_re = re.compile(r'(\bdata-video="|<meta property="og:video" content=".+?&videoId=)(?P<video_id>[0-9]+)"') _mp4_bitrate_re = re.compile(r'.*_(?P<bitrate>[0-9]+k)\.mp4') _api_schema = validate.Schema({ 'ID_DM': validate.text, 'TYPE': validate.text, 'MEDIA': validate.Schema({ 'VIDEOS': validate.Schema({ validate.text: validate.any( validate.url(), '' ) }) }) }) _user_agent = useragents.CHROME @classmethod def can_handle_url(cls, url): return CanalPlus._url_re.match(url) def _get_streams(self): # Get video ID and channel from URL match = self._url_re.match(self.url) video_id = match.group('video_id') if video_id is None: # Retrieve URL page and search for video ID res = http.get(self.url) match = self._video_id_re.search(res.text) if match is None: return video_id = match.group('video_id') res = http.get(self.API_URL.format(video_id)) videos = http.json(res, schema=self._api_schema) parsed = [] headers = {'User-Agent': self._user_agent} # Some videos may be also available on Dailymotion (especially on CNews) if videos['ID_DM'] != '': for stream in self.session.streams('https://www.dailymotion.com/video/' + videos['ID_DM']).items(): yield stream for quality, video_url in list(videos['MEDIA']['VIDEOS'].items()): # Ignore empty URLs if video_url == '': continue # Ignore duplicate video URLs if video_url in parsed: continue parsed.append(video_url) try: # HDS streams don't seem to work for live videos if '.f4m' in video_url and 'LIVE' not in videos['TYPE']: for stream in HDSStream.parse_manifest(self.session, video_url, params={'hdcore': self.HDCORE_VERSION}, headers=headers).items(): yield stream elif '.m3u8' in video_url: for stream in HLSStream.parse_variant_playlist(self.session, video_url, headers=headers).items(): yield stream elif '.mp4' in video_url: # Get bitrate from video filename match = self._mp4_bitrate_re.match(video_url) if match is not None: bitrate = match.group('bitrate') else: bitrate = quality yield bitrate, HTTPStream(self.session, video_url, params={'secret': self.SECRET}, headers=headers) except IOError as err: if '403 Client Error' in str(err): self.logger.error('Failed to access stream, may be due to geo-restriction')
"error": bool, validate.optional("code"): validate.text, validate.optional("message"): validate.text, validate.optional("data"): object, }) _media_schema = validate.Schema( { "stream_data": validate.any( None, { "streams": validate.all([{ "quality": validate.any(validate.text, None), "url": validate.url(scheme="http", path=validate.endswith(".m3u8")), validate.optional("video_encode_id"): validate.text }]) }) }, validate.get("stream_data")) _login_schema = validate.Schema({ "auth": validate.text, "expires": validate.all(validate.text, validate.transform(parse_timestamp)), "user": { "username": validate.any(validate.text, None), "email": validate.text } })
"geo_blocked": [ "ES", ], "notes": "", "live": True, "vod": True, "last_update": "2015-03-13", } STREAM_INFO_URL = "http://dinamics.ccma.cat/pvideo/media.jsp?media=video&version=0s&idint={ident}&profile=pc&desplacament=0" _url_re = re.compile(r"http://(?:www.)?ccma.cat/tv3/directe/(.+?)/") _media_schema = validate.Schema({ "geo": validate.text, "url": validate.url(scheme=validate.any("http")) }) _channel_schema = validate.Schema( {"media": validate.any([_media_schema], _media_schema)}) class TV3Cat(Plugin): @classmethod def can_handle_url(self, url): match = _url_re.match(url) return match def _get_streams(self): match = _url_re.match(self.url) if match:
class IDF1(Plugin): DACAST_API_URL = 'https://json.dacast.com/b/{}/{}/{}' DACAST_TOKEN_URL = 'https://services.dacast.com/token/i/b/{}/{}/{}' _url_re = re.compile( r'http://www\.idf1\.fr/(videos/[^/]+/[^/]+\.html|live\b)') _video_id_re = re.compile( r"dacast\('(?P<broadcaster_id>\d+)_(?P<video_type>[a-z]+)_(?P<video_id>\d+)', 'replay_content', data\);" ) _video_id_alt_re = re.compile( r'<script src="//player.dacast.com/js/player.js" id="(?P<broadcaster_id>\d+)_(?P<video_type>[cf])_(?P<video_id>\d+)"' ) _player_url = 'http://ssl.p.jwpcdn.com/player/v/7.12.6/jwplayer.flash.swf' _api_schema = validate.Schema( validate.transform(parse_json), { validate.optional('html5'): validate.all([ { 'src': validate.url() }, ], ), 'hls': validate.url(), 'hds': validate.url() }, validate.transform( lambda x: [update_scheme(IDF1.DACAST_API_URL, x['hls']), x['hds'] ] + [y['src'] for y in x.get('html5', [])])) _token_schema = validate.Schema(validate.transform(parse_json), {'token': validate.text}, validate.get('token')) _user_agent = useragents.IE_11 @classmethod def can_handle_url(cls, url): return IDF1._url_re.match(url) def _get_streams(self): res = http.get(self.url) match = self._video_id_re.search( res.text) or self._video_id_alt_re.search(res.text) if match is None: return broadcaster_id = match.group('broadcaster_id') video_type = match.group('video_type') video_id = match.group('video_id') videos = http.get(self.DACAST_API_URL.format(broadcaster_id, video_type, video_id), schema=self._api_schema) token = http.get(self.DACAST_TOKEN_URL.format(broadcaster_id, video_type, video_id), schema=self._token_schema) parsed = [] for video_url in videos: video_url += token # Ignore duplicate video URLs if video_url in parsed: continue parsed.append(video_url) # Ignore HDS streams (broken) if '.m3u8' in video_url: for s in HLSStream.parse_variant_playlist( self.session, video_url).items(): yield s
class SRGSSR(Plugin): url_re = re.compile( r"""https?://(?:www\.)? (srf|rts|rsi|rtr)\.ch/ (?: play/tv| livestream/player| live-streaming| sport/direct/(\d+)- )""", re.VERBOSE) api_url = "http://il.srgssr.ch/integrationlayer/1.0/ue/{site}/video/play/{id}.json" token_url = "http://tp.srgssr.ch/akahd/token" video_id_re = re.compile( r'urn(?:%3A|:)(srf|rts|rsi|rtr)(?:%3A|:)(?:ais(?:%3A|:))?video(?:%3A|:)([^&"]+)' ) video_id_schema = validate.Schema(validate.transform(video_id_re.search)) api_schema = validate.Schema( { "Video": { "Playlists": { "Playlist": [{ "@protocol": validate.text, "url": [{ "@quality": validate.text, "text": validate.url() }] }] } } }, validate.get("Video"), validate.get("Playlists"), validate.get("Playlist")) token_schema = validate.Schema({"token": { "authparams": validate.text }}, validate.get("token"), validate.get("authparams")) @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def get_video_id(self): parsed = urlparse(self.url) qinfo = dict(parse_qsl(parsed.query or parsed.fragment.lstrip("?"))) site, video_id = None, None url_m = self.url_re.match(self.url) # look for the video id in the URL, otherwise find it in the page if "tvLiveId" in qinfo: video_id = qinfo["tvLiveId"] site = url_m.group(1) elif url_m.group(2): site, video_id = url_m.group(1), url_m.group(2) else: video_id_m = http.get(self.url, schema=self.video_id_schema) if video_id_m: site, video_id = video_id_m.groups() return site, video_id def auth_url(self, url): parsed = urlparse(url) path, _ = parsed.path.rsplit("/", 1) token_res = http.get(self.token_url, params=dict(acl=path + "/*")) authparams = http.json(token_res, schema=self.token_schema) existing = dict(parse_qsl(parsed.query)) existing.update(dict(parse_qsl(authparams))) return urlunparse(parsed._replace(query=urlencode(existing))) def _get_streams(self): site, video_id = self.get_video_id() if video_id and site: self.logger.debug("Found {0} video ID {1}", site, video_id) try: res = http.get(self.api_url.format(site=site, id=video_id)) except PluginError: return for stream_info in http.json(res, schema=self.api_schema): for url in stream_info["url"]: if stream_info["@protocol"] == "HTTP-HLS": for s in HLSStream.parse_variant_playlist( self.session, self.auth_url(url["text"])).items(): yield s
formats[int(s[0])] = "{0}p".format(h) return formats _config_schema = validate.Schema({ validate.optional("fmt_list"): validate.all(validate.text, validate.transform(parse_fmt_list)), validate.optional("url_encoded_fmt_stream_map"): validate.all(validate.text, validate.transform(parse_stream_map), [{ "itag": validate.all(validate.text, validate.transform(int)), "quality": validate.text, "url": validate.url(scheme="http"), validate.optional("s"): validate.text, validate.optional("stereo3d"): validate.all(validate.text, validate.transform(int), validate.transform(bool)), }]), validate.optional("adaptive_fmts"): validate.all(validate.text, validate.transform(parse_stream_map), [{ validate.optional("s"): validate.text, "type": validate.all(validate.text, validate.transform(lambda t: t.split(";")[0].split("/")), [validate.text, validate.text]), "url":
class Dogan(Plugin): """ Support for the live streams from Doğan Media Group channels """ url_re = re.compile( r""" https?://(?:www.)? (?:teve2.com.tr/(?:canli-yayin|filmler/.*|programlar/.*)| kanald.com.tr/.*| cnnturk.com/canli-yayin| dreamtv.com.tr/canli-yayin| dreamturk.com.tr/canli) """, re.VERBOSE) playerctrl_re = re.compile( r'''<div[^>]*?ng-controller=(?P<quote>["'])(?:Live)?PlayerCtrl(?P=quote).*?>''', re.DOTALL) videoelement_re = re.compile( r'''<div[^>]*?id=(?P<quote>["'])video-element(?P=quote).*?>''', re.DOTALL) data_id_re = re.compile( r'''data-id=(?P<quote>["'])(?P<id>\w+)(?P=quote)''') content_id_re = re.compile(r'"content(?:I|i)d", "(\w+)"') content_api = "/actions/content/media/{id}" new_content_api = "/action/media/{id}" content_api_schema = validate.Schema({ "Id": validate.text, "Media": { "Link": { "DefaultServiceUrl": validate.url(), validate.optional("ServiceUrl"): validate.any(validate.url(), ""), "SecurePath": validate.text, } } }) @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _get_content_id(self): res = http.get(self.url) # find the contentId content_id_m = self.content_id_re.search(res.text) if content_id_m: return content_id_m.group(1) # find the PlayerCtrl div player_ctrl_m = self.playerctrl_re.search(res.text) if player_ctrl_m: # extract the content id from the player control data player_ctrl_div = player_ctrl_m.group(0) content_id_m = self.data_id_re.search(player_ctrl_div) if content_id_m: return content_id_m.group("id") # find <div id="video-element" videoelement_m = self.videoelement_re.search(res.text) if videoelement_m: # extract the content id from the player control data videoelement_div = videoelement_m.group(0) content_id_m = self.data_id_re.search(videoelement_div) if content_id_m: return content_id_m.group("id") def _get_hls_url(self, content_id): # make the api url relative to the current domain if "cnnturk" in self.url or "teve2.com.tr" in self.url: self.logger.debug("Using new content API url") api_url = urljoin(self.url, self.new_content_api.format(id=content_id)) else: api_url = urljoin(self.url, self.content_api.format(id=content_id)) apires = http.get(api_url) stream_data = http.json(apires, schema=self.content_api_schema) d = stream_data["Media"]["Link"] return urljoin((d["ServiceUrl"] or d["DefaultServiceUrl"]), d["SecurePath"]) def _get_streams(self): content_id = self._get_content_id() if content_id: self.logger.debug(u"Loading content: {}", content_id) hls_url = self._get_hls_url(content_id) return HLSStream.parse_variant_playlist(self.session, hls_url) else: self.logger.error(u"Could not find the contentId for this stream")
"last_update": "2017-02-02", } RUURL = "b=chrome&p=win&v=56&f=0&d=1" _url_re = re.compile(r"https?://www.rtvs.sk/televizia/live-[\w-]+") _playlist_url_re = re.compile(r'"playlist": "([^"]+)"') _playlist_schema = validate.Schema( [ { "sources": [ validate.any( { "type": "dash", "file": validate.url(scheme="http") }, { "type": "hls", "file": validate.url(scheme="http") }, { "type": "rtmp", "file": validate.text, "streamer": validate.url(scheme="rtmp") } ) ] } ], validate.get(0), validate.get("sources") )
class TV8cat(Plugin): url_re = re.compile(r"https?://(?:www\.)?tv8\.cat/directe/?") live_iframe = "http://www.8tv.cat/wp-content/themes/8tv/_/inc/_live_html.php" iframe_re = re.compile(r'iframe .*?src="((?:https?)?//[^"]*?)"') account_id_re = re.compile(r"accountId:\"(\d+?)\"") policy_key_re = re.compile(r"policyKey:\"(.+?)\"") britecove = "https://edge.api.brightcove.com/playback/v1/accounts/{account_id}/videos/{video_id}" britecove_schema = validate.Schema({ "sources": [{ "height": int, validate.optional("src"): validate.url(), validate.optional("app_name"): validate.url(scheme="rtmp"), validate.optional("stream_name"): validate.text }] }) @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _find_iframe(self, res): iframe = self.iframe_re.search(res.text) url = iframe and iframe.group(1) if url and url.startswith("//"): p = urlparse(self.url) url = "{0}:{1}".format(p.scheme, url) return url def _britecove_params(self, url): res = http.get(url, headers={ "User-Agent": useragents.FIREFOX, "Referer": self.url }) acc = self.account_id_re.search(res.text) pk = self.policy_key_re.search(res.text) query = dict(parse_qsl(urlparse(url).query)) return { "video_id": query.get("videoId"), "account_id": acc and acc.group(1), "policy_key": pk and pk.group(1), } def _get_stream_data(self, **params): api_url = self.britecove.format(**params) res = http.get(api_url, headers={ "Accept": "application/json;pk={policy_key}".format(**params) }) return parse_json(res.text, schema=self.britecove_schema) def _get_streams(self): res = http.get(self.live_iframe) britecove_url = self._find_iframe(res) if britecove_url: self.logger.debug("Found britecove embed url: {0}", britecove_url) params = self._britecove_params(britecove_url) self.logger.debug("Got britecode params: {0}", params) stream_info = self._get_stream_data(**params) for source in stream_info.get("sources"): if source.get("src"): for s in HLSStream.parse_variant_playlist( self.session, source.get("src")).items(): yield s else: q = "{0}p".format(source.get("height")) s = RTMPStream( self.session, { "rtmp": source.get("app_name"), "playpath": source.get("stream_name") }) yield q, s
"geo_blocked": [], "notes": "", "live": True, "vod": True, "last_update": "2017-02-09", } _url_re = re.compile(r"https?://(?:www\.)?openrec.tv/(live|movie)/") _playlist_url_re = re.compile(r"data-(source)?file=\"(?P<url>[^\"]+)\"") _movie_data_re = re.compile(r'''<script type="application/ld\+json">(.*?)</script>''', re.DOTALL | re.M) _live_schema = validate.Schema( validate.transform(_playlist_url_re.findall), [ validate.union({ "isSource": validate.all(validate.get(0), validate.transform(lambda s: s == "source")), "url": validate.all(validate.get(1), validate.url(scheme="http", path=validate.endswith(".m3u8"))) }) ] ) _movie_schema = validate.Schema( validate.transform(_movie_data_re.search), validate.any( None, validate.all( validate.get(1), validate.transform(parse_json), validate.get("contentUrl") ) ) )
class Rtve(Plugin): secret_key = base64.b64decode("eWVMJmRhRDM=") content_id_re = re.compile(r'data-id\s*=\s*"(\d+)"') url_re = re.compile( r""" https?://(?:www\.)?rtve\.es/(?:directo|noticias|television|deportes|alacarta|drmn)/.*?/? """, re.VERBOSE) cdn_schema = validate.Schema( validate.transform(partial(parse_xml, invalid_char_entities=True)), validate.xml_findall(".//preset"), [ validate.union({ "quality": validate.all(validate.getattr("attrib"), validate.get("type")), "urls": validate.all(validate.xml_findall(".//url"), [validate.getattr("text")]) }) ]) subtitles_api = "http://www.rtve.es/api/videos/{id}/subtitulos.json" subtitles_schema = validate.Schema( {"page": { "items": [{ "src": validate.url(), "lang": validate.text }] }}, validate.get("page"), validate.get("items")) video_api = "http://www.rtve.es/api/videos/{id}.json" video_schema = validate.Schema( { "page": { "items": [{ "qualities": [{ "preset": validate.text, "height": int }] }] } }, validate.get("page"), validate.get("items"), validate.get(0)) options = PluginOptions({"mux_subtitles": False}) @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def __init__(self, url): Plugin.__init__(self, url) self.zclient = ZTNRClient(self.secret_key) http.headers = {"User-Agent": useragents.SAFARI_8} def _get_content_id(self): res = http.get(self.url) m = self.content_id_re.search(res.text) return m and int(m.group(1)) def _get_subtitles(self, content_id): res = http.get(self.subtitles_api.format(id=content_id)) return http.json(res, schema=self.subtitles_schema) def _get_quality_map(self, content_id): res = http.get(self.video_api.format(id=content_id)) data = http.json(res, schema=self.video_schema) qmap = {} for item in data["qualities"]: qname = { "MED": "Media", "HIGH": "Alta", "ORIGINAL": "Original" }.get(item["preset"], item["preset"]) qmap[qname] = u"{0}p".format(item["height"]) return qmap def _get_streams(self): streams = [] content_id = self._get_content_id() if content_id: self.logger.debug("Found content with id: {0}", content_id) stream_data = self.zclient.get_cdn_list(content_id, schema=self.cdn_schema) quality_map = None for stream in stream_data: for url in stream["urls"]: if url.endswith("m3u8"): try: streams.extend( HLSStream.parse_variant_playlist( self.session, url).items()) except (IOError, OSError): self.logger.debug("Failed to load m3u8 url: {0}", url) elif ((url.endswith("mp4") or url.endswith("mov") or url.endswith("avi")) and http.head( url, raise_for_status=False).status_code == 200): if quality_map is None: # only make the request when it is necessary quality_map = self._get_quality_map(content_id) # rename the HTTP sources to match the HLS sources quality = quality_map.get(stream["quality"], stream["quality"]) streams.append((quality, HTTPStream(self.session, url))) subtitles = None if self.get_option("mux_subtitles"): subtitles = self._get_subtitles(content_id) if subtitles: substreams = {} for i, subtitle in enumerate(subtitles): substreams[subtitle["lang"]] = HTTPStream( self.session, subtitle["src"]) for q, s in streams: yield q, MuxedStream(self.session, s, subtitles=substreams) else: for s in streams: yield s
def test_url(self): url_ = "https://google.se/path" assert validate(url(), url_) assert validate(url(scheme="http"), url_) assert validate(url(path="/path"), url_)