def GetScheduleData(self, data): """Performs pre-process actions for data processing Arguments: data : string - the retrieve data that was loaded for the current item and URL. Returns: A tuple of the data and a list of MediaItems that were generated. Accepts an data from the ProcessFolderList method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). """ Logger.Info("Performing Pre-Processing") items = [] json = JsonHelper(data) self.scheduleData = json.GetValue("schedules") Logger.Debug("Pre-Processing finished") return data, items
def create_single_video_item(self, result_set): """ Creates a MediaItem of type 'video' using the result_set from the regex. This method creates a new MediaItem from the Regular Expression or Json results <result_set>. The method should be implemented by derived classes and are specific to the channel. If the item is completely processed an no further data needs to be fetched the self.complete property should be set to True. If not set to True, the self.update_video_item method is called if the item is focussed or selected for playback. :param str result_set: The result_set of the self.episodeItemRegex :return: A new MediaItem of type 'video' or 'audio' (despite the method's name). :rtype: MediaItem|None """ if self.__hasAlreadyVideoItems: # we already have items, so don't show this one, it will be a duplicate return None result_set = result_set.replace('\\x27', "'") json_data = JsonHelper(result_set) url = self.parentItem.url title = json_data.get_value("name") description = HtmlHelper.to_text(json_data.get_value("description")) item = MediaItem(title, url, type="video") item.description = description item.thumb = self.parentItem.thumb item.fanart = self.parentItem.fanart return item
def update_music_item(self, item): """ Updates an existing MediaItem with more data. Used to update none complete MediaItems (self.complete = False). This could include opening the item's URL to fetch more data and then process that data or retrieve it's real media-URL. The method should at least: * cache the thumbnail to disk (use self.noImage if no thumb is available). * set at least one MediaItemPart with a single MediaStream. * set self.complete = True. if the returned item does not have a MediaItemPart then the self.complete flag will automatically be set back to False. :param MediaItem item: the original MediaItem that needs updating. :return: The original item with more data added to it's properties. :rtype: MediaItem """ Logger.debug('Starting update_music_item for %s (%s)', item.name, self.channelName) url, data = item.url.split("?") data = UriHandler.open(url, proxy=self.proxy, params=data, additional_headers=item.HttpHeaders) Logger.trace(data) json_data = JsonHelper(data) url = json_data.get_value("url", fallback=None) if url: item.append_single_stream(url) item.Complete = True return item
def UpdateVideoItem(self, item): Logger.Debug('Starting UpdateVideoItem for %s (%s)', item.name, self.channelName) data = UriHandler.Open(item.url, proxy=self.proxy, additionalHeaders=item.HttpHeaders) json = JsonHelper(data) part = item.CreateNewEmptyMediaPart() part.Subtitle = NpoStream.GetSubtitle(json.GetValue("mid"), proxy=self.proxy) for stream in json.GetValue("videoStreams"): if not stream["url"].startswith("odi"): part.AppendMediaStream(stream["url"], stream["bitrate"] / 1000) item.complete = True if item.HasMediaItemParts(): return item for s, b in NpoStream.GetStreamsFromNpo(None, json.GetValue("mid"), proxy=self.proxy): item.complete = True part.AppendMediaStream(s, b) return item
def UpdateLiveStream(self, item): Logger.Debug("Updating Live stream") # let's request a token token = self.__GetToken() # What is the channel name to play channel = self.channelCode if self.channelCode == "q2": channel = "2be" elif self.channelCode == "stievie": channel = item.metaData["channelId"] url = "https://stream-live.medialaan.io/stream-live/v1/channels/%s/broadcasts/current/video/?deviceId=%s" % ( channel, uuid.uuid4() # Could be a random int ) auth = {"Authorization": "apikey=%s&access_token=%s" % (self.__apiKey, token)} data = UriHandler.Open(url, proxy=self.proxy, noCache=True, additionalHeaders=auth) jsonData = JsonHelper(data) hls = jsonData.GetValue("response", "url", "hls") if not hls: return item part = item.CreateNewEmptyMediaPart() for s, b in M3u8.GetStreamsFromM3u8(hls, self.proxy): item.complete = True # s = self.GetVerifiableVideoUrl(s) part.AppendMediaStream(s, b) return item
def extract_slug_data(self, data): """ Extracts the correct Slugged Data for tabbed items :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ Logger.info("Extracting Slugged data during pre-processing") data, items = self.extract_json_data(data) json = JsonHelper(data) slugs = json.get_value("relatedVideoContent", "relatedVideosAccordion") for slug_data in slugs: tab_slug = "?tab=%s" % (slug_data["slug"], ) if not self.parentItem.url.endswith(tab_slug): continue for item in slug_data["videos"]: i = self.create_json_item(item) if i: items.append(i) return data, items
def UpdateVideoItem(self, item): """ Updates the item """ data = UriHandler.Open(item.url, proxy=self.proxy) baseEncode = Regexer.DoRegex(self.mediaUrlRegex, data)[-1] jsonData = EncodingHelper().DecodeBase64(baseEncode) json = JsonHelper(jsonData, logger=Logger.Instance()) Logger.Trace(json) # "flv": "http://media.dumpert.nl/flv/e2a926ff_10307954_804223649588516_151552487_n.mp4.flv", # "tablet": "http://media.dumpert.nl/tablet/e2a926ff_10307954_804223649588516_151552487_n.mp4.mp4", # "mobile": "http://media.dumpert.nl/mobile/e2a926ff_10307954_804223649588516_151552487_n.mp4.mp4", item.MediaItemParts = [] part = item.CreateNewEmptyMediaPart() streams = json.GetValue() for key in streams: if key == "flv": part.AppendMediaStream(streams[key], 1000) elif key == "tablet": part.AppendMediaStream(streams[key], 800) elif key == "mobile": part.AppendMediaStream(streams[key], 450) else: Logger.Debug("Key '%s' was not used", key) item.complete = True Logger.Trace("VideoItem updated: %s", item) return item
def extract_page_data(self, data): """ Performs pre-process actions for data processing. Accepts an data from the process_folder_list method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ items = [] json = JsonHelper(data) data = json.get_value("data") Logger.trace(data) if json.get_value("loadMore", fallback=False): url, page = self.parentItem.url.rsplit("/", 1) url = "{0}/{1}".format(url, int(page) + 1) page_item = MediaItem("{0}".format(int(page) + 2), url) page_item.type = "page" items.append(page_item) return data, items
def update_video_item(self, item): """ Accepts an item. It returns an updated item. Usually retrieves the MediaURL and the Thumb! It should return a completed item. """ Logger.debug('Starting update_video_item for %s (%s)', item.name, self.channelName) # rtmpt://vrt.flash.streampower.be/een//2011/07/1000_110723_getipt_neefs_wiels_Website_EEN.flv # http://www.een.be/sites/een.be/modules/custom/vrt_video/player/player_4.3.swf # now the mediaurl is derived. First we try WMV data = UriHandler.open(item.url, proxy=self.proxy) part = item.create_new_empty_media_part() if "mediazone.vrt.be" not in item.url: # Extract actual media data video_id = Regexer.do_regex('data-video=[\'"]([^"\']+)[\'"]', data)[0] url = "https://mediazone.vrt.be/api/v1/een/assets/%s" % (video_id, ) data = UriHandler.open(url, proxy=self.proxy) json = JsonHelper(data) urls = json.get_value("targetUrls") for url_info in urls: Logger.trace(url_info) if url_info["type"].lower() != "hls": continue hls_url = url_info["url"] for s, b in M3u8.get_streams_from_m3u8(hls_url, self.proxy): part.append_media_stream(s, b) item.complete = True return item
def renew_token(self, refresh_token): """ Sets a new access token on the User using the refresh token. The basic expire time of the refresh token is 30 days: http://docs.aws.amazon.com/cognito/latest/developerguide/amazon-cognito-user-pools-using-tokens-with-identity-providers.html :param str refresh_token: Token to use for refreshing the authorization token. """ refresh_request = { "AuthParameters": { "REFRESH_TOKEN": refresh_token }, "ClientId": self.client_id, "AuthFlow": "REFRESH_TOKEN" } refresh_headers = { "X-Amz-Target": "AWSCognitoIdentityProviderService.InitiateAuth", "Content-Type": "application/x-amz-json-1.1" } refresh_request_data = JsonHelper.dump(refresh_request) refresh_response = UriHandler.open(self.url, proxy=self.__proxy, params=refresh_request_data, additional_headers=refresh_headers) refresh_json = JsonHelper(refresh_response) id_token = refresh_json.get_value("AuthenticationResult", "IdToken") return id_token
def pre_process_folder_list(self, data): """ Performs pre-process actions for data processing. Accepts an data from the process_folder_list method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ Logger.info("Performing Pre-Processing") items = [] if "episode.json" in self.parentItem.url: Logger.debug("Fetching Carousel data") json = JsonHelper(data) data = json.get_value("carousel") Logger.debug("Pre-Processing finished") return data, items
def extract_day_items(self, data): """ Performs pre-process actions for data processing. Accepts an data from the process_folder_list method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ items = [] json = JsonHelper(data) page_items = json.get_value('items') for item in page_items: video_item = self.create_json_video_item(item, prepend_serie=True) if video_item: items.append(video_item) return data, items
def extract_main_list_json(self, data): """ Extracts the main list JSON data from the HTML response. Accepts an data from the process_folder_list method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ data, items = self.add_others(data) start_string = "window.__REDUX_STATE__ = " start_data = data.index(start_string) end_data = data.index("</script><script async=") data = data[start_data + len(start_string):end_data] data = JsonHelper(data) letters = data.get_value("reduxAsyncConnect", "page", "components", 1, "data", "items", 1, "data", "items") for letter_data in letters: letter_data = letter_data["data"] Logger.trace("Processing '%s'", letter_data["title"]) for item in letter_data["items"]: episode = self.create_json_episode_item(item) items.append(episode) return data, items
def __get_online_version_from_bitbucket(self, include_alpha_beta=False): """ Retrieves the current online version. :param bool include_alpha_beta: should we include alpha/beta releases? :return: Returns the current online version or `None` of no version was found. :rtype: None|Version """ data = self.__uriHandler.open(self.updateUrl, no_cache=True) json_data = JsonHelper(data) online_downloads = [d for d in json_data.get_value("values") if self.__is_valid_update(d)] if len(online_downloads) == 0: return None max_version = None for online_download in online_downloads: online_parts = online_download['name'].rsplit(".", 1)[0].split("-") if len(online_parts) < 2: continue # fix the problem that a ~ is preventing downloads on BitBucket online_version_data = online_parts[1].replace("alpha", "~alpha").replace("beta", "~beta") online_version = Version(online_version_data) if not include_alpha_beta and online_version.buildType is not None: self.__logger.trace("Ignoring %s", online_version) continue self.__logger.trace("Found possible version: %s", online_version) if online_version > max_version: max_version = online_version return max_version
def __get_online_version_from_github(self, include_alpha_beta=False): """ Retrieves the current online version. :param bool include_alpha_beta: should we include alpha/beta releases? :return: Returns the current online version or `None` of no version was found. :rtype: None|Version """ data = self.__uriHandler.open(self.updateUrl, no_cache=True) json_data = JsonHelper(data) version_tag = "tag_name" online_versions = [ r[version_tag].lstrip("v").replace("-", "~") for r in json_data.get_value() if bool(r[version_tag]) and ( not r["prerelease"] or include_alpha_beta) ] if not bool(online_versions): return None max_version = None for online_version_data in online_versions: online_version = Version(online_version_data) if not include_alpha_beta and online_version.buildType is not None: self.__logger.trace("Ignoring %s", online_version) continue self.__logger.trace("Found possible version: %s", online_version) if online_version > max_version: max_version = online_version return max_version
def load_channel_data(self, data): """ Adds the channel items to the listing. :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ items = [] now = datetime.datetime.now() try: server_time = UriHandler.open("https://www.svtplay.se/api/server_time", proxy=self.proxy, no_cache=True) server_time_json = JsonHelper(server_time) server_time = server_time_json.get_value("time") except: Logger.error("Error determining server time", exc_info=True) server_time = "%04d-%02d-%02dT%02d:%02d:%02d" % (now.year, now.month, now.day, now.hour, now.minute, now.second) data = UriHandler.open( "https://www.svtplay.se/api/channel_page?now=%s" % (server_time, ), proxy=self.proxy) return data, items
def update_json_video(self, item): """ Updates an existing MediaItem with more data. Used to update none complete MediaItems (self.complete = False). This could include opening the item's URL to fetch more data and then process that data or retrieve it's real media-URL. The method should at least: * cache the thumbnail to disk (use self.noImage if no thumb is available). * set at least one MediaItemPart with a single MediaStream. * set self.complete = True. if the returned item does not have a MediaItemPart then the self.complete flag will automatically be set back to False. :param MediaItem item: the original MediaItem that needs updating. :return: The original item with more data added to it's properties. :rtype: MediaItem """ Logger.debug('Starting update_video_item: %s', item.name) data = UriHandler.open(item.url, proxy=self.proxy, additional_headers=self.httpHeaders) json_data = JsonHelper(data) streams = json_data.get_value("formats") if not streams: return item qualities = {"720p": 1600, "480p": 1200, "360p": 500, "other": 0} # , "http-hls": 1500, "3gp-mob01": 300, "flv-web01": 500} part = item.create_new_empty_media_part() urls = [] for stream in streams: url = stream["url"].values()[-1] if url in urls: # duplicate url, ignore continue urls.append(url) # actually process the url if not url.endswith(".m3u8"): part.append_media_stream( url=url, bitrate=qualities.get(stream.get("name", "other"), 0) ) item.complete = True # elif AddonSettings.use_adaptive_stream_add_on(): # content_type, url = UriHandler.header(url, self.proxy) # stream = part.append_media_stream(url, 0) # M3u8.SetInputStreamAddonInput(stream, self.proxy) # item.complete = True else: content_type, url = UriHandler.header(url, self.proxy) for s, b in M3u8.get_streams_from_m3u8(url, self.proxy): item.complete = True part.append_media_stream(s, b) return item
def list_some_videos(self, data): """ If there was a Lastest section in the data return those video files :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ items = [] if not self.__showSomeVideosInListing: return data, items json_data = JsonHelper(data) sections = json_data.get_value("relatedVideoContent", "relatedVideosAccordion") sections = list(filter(lambda s: s['type'] not in self.__excludedTabs, sections)) Logger.debug("Found %s folders/tabs", len(sections)) if len(sections) == 1: # we should exclude that tab from the folders list and show the videos here self.__listedRelatedTab = sections[0]["type"] # otherwise the default "RELATED_VIDEO_TABS_LATEST" is used Logger.debug("Excluded tab '%s' which will be show as videos", self.__listedRelatedTab) for section in sections: if not section["type"] == self.__listedRelatedTab: continue for video_data in section['videos']: items.append(self.create_json_item(video_data)) return data, items
def update_video_api_item(self, item): """ Updates an existing MediaItem with more data. Used to update none complete MediaItems (self.complete = False). This could include opening the item's URL to fetch more data and then process that data or retrieve it's real media-URL. The method should at least: * cache the thumbnail to disk (use self.noImage if no thumb is available). * set at least one MediaItemPart with a single MediaStream. * set self.complete = True. if the returned item does not have a MediaItemPart then the self.complete flag will automatically be set back to False. :param MediaItem item: the original MediaItem that needs updating. :return: The original item with more data added to it's properties. :rtype: MediaItem """ Logger.debug('Starting UpdateChannelItem for %s (%s)', item.name, self.channelName) data = UriHandler.open(item.url, proxy=self.proxy) json = JsonHelper(data, logger=Logger.instance()) videos = json.get_value("videoReferences") subtitles = json.get_value("subtitleReferences") Logger.trace(videos) return self.__update_item_from_video_references( item, videos, subtitles)
def update_json_video_item(self, item): """ Updates an existing MediaItem with more data. Used to update none complete MediaItems (self.complete = False). This could include opening the item's URL to fetch more data and then process that data or retrieve it's real media-URL. The method should at least: * cache the thumbnail to disk (use self.noImage if no thumb is available). * set at least one MediaItemPart with a single MediaStream. * set self.complete = True. if the returned item does not have a MediaItemPart then the self.complete flag will automatically be set back to False. :param MediaItem item: the original MediaItem that needs updating. :return: The original item with more data added to it's properties. :rtype: MediaItem """ headers = {} if self.localIP: headers.update(self.localIP) data = UriHandler.open(item.url, proxy=self.proxy, additional_headers=headers) video_data = JsonHelper(data) stream_data = video_data.get_value("mediaAssetsOnDemand") if not stream_data: return item use_adaptive = AddonSettings.use_adaptive_stream_add_on() stream_data = stream_data[0] part = item.create_new_empty_media_part() if "hlsUrl" in stream_data: hls_url = stream_data["hlsUrl"] if use_adaptive: stream = part.append_media_stream(hls_url, 0) M3u8.set_input_stream_addon_input(stream, self.proxy, headers=headers) item.complete = True else: for s, b in M3u8.get_streams_from_m3u8(hls_url, self.proxy, headers=headers): item.complete = True part.append_media_stream(s, b) if "timedTextSubtitlesUrl" in stream_data and stream_data[ "timedTextSubtitlesUrl"]: sub_url = stream_data["timedTextSubtitlesUrl"].replace( ".ttml", ".vtt") sub_url = HtmlEntityHelper.url_decode(sub_url) part.Subtitle = SubtitleHelper.download_subtitle(sub_url, format="webvtt") return item
def AddPageItems(self, data): """ Adds page items to the main listing @param data: the Parsed Data @return: a tuple of data and items """ Logger.Info("Performing Pre-Processing") items = [] json = JsonHelper(data) totalResults = json.GetValue("totalResults") fromValue = json.GetValue("from") sizeValue = json.GetValue("size") if fromValue + sizeValue < totalResults: morePages = LanguageHelper.GetLocalizedString( LanguageHelper.MorePages) url = self.parentItem.url.split('?')[0] url = "%s?size=%s&from=%s&sort=Nieuwste" % (url, sizeValue, fromValue + sizeValue) Logger.Debug("Adding next-page item from %s to %s", fromValue + sizeValue, fromValue + sizeValue + sizeValue) nextPage = mediaitem.MediaItem(morePages, url) nextPage.icon = self.parentItem.icon nextPage.fanart = self.parentItem.fanart nextPage.thumb = self.parentItem.thumb nextPage.dontGroup = True items.append(nextPage) Logger.Debug("Pre-Processing finished") return json, items
def update_video_item(self, item): data = UriHandler.open(item.url, proxy=self.proxy, additional_headers=item.HttpHeaders) media_regex = 'data-media="([^"]+)"' media_info = Regexer.do_regex(media_regex, data)[0] media_info = HtmlEntityHelper.convert_html_entities(media_info) media_info = JsonHelper(media_info) Logger.trace(media_info) # sources part = item.create_new_empty_media_part() # high, web, mobile, url media_sources = media_info.json.get("sources", {}) for quality in media_sources: url = media_sources[quality] if quality == "high": bitrate = 2000 elif quality == "web": bitrate = 800 elif quality == "mobile": bitrate = 400 else: bitrate = 0 part.append_media_stream(url, bitrate) # geoLocRestriction item.isGeoLocked = not media_info.get_value( "geoLocRestriction", fallback="world") == "world" item.complete = True return item
def extract_json(self, data): """ Performs pre-process actions for data processing. Accepts an data from the process_folder_list method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ Logger.info("Performing Pre-Processing") items = [] json_data = Regexer.do_regex('type="application/json">([^<]+)<', data) if not json_data: Logger.warning("No JSON data found.") return data, items json = JsonHelper(json_data[0]) result = [] for key, value in json.json.items(): result.append(value) value["title"] = key # set new json and return JsonHelper object json.json = result return json, items
def LoadChannelData(self, data): """ Adds the channel items to the listing. @param data: The data to use. Returns a list of MediaItems that were retrieved. """ items = [] # data = UriHandler.Open("https://www.svtplay.se/api/channel_page", proxy=self.proxy, noCache=True) now = datetime.datetime.now() try: serverTime = UriHandler.Open("https://www.svtplay.se/api/server_time", proxy=self.proxy, noCache=True) serverTimeJson = JsonHelper(serverTime) serverTime = serverTimeJson.GetValue("time") except: Logger.Error("Error determining server time", exc_info=True) serverTime = "%04d-%02d-%02dT%02d:%02d:%02d" % (now.year, now.month, now.day, now.hour, now.minute, now.second) data = UriHandler.Open( "https://www.svtplay.se/api/channel_page?now=%s" % (serverTime, ), proxy=self.proxy) return data, items
def UpdateLiveItem(self, item): """ Accepts an item. It returns an updated item. Usually retrieves the MediaURL and the Thumb! It should return a completed item. """ Logger.Debug('Starting UpdateVideoItem for %s (%s)', item.name, self.channelName) data = UriHandler.Open(item.url, proxy=self.proxy) data = data[data.index('{'):] json = JsonHelper(data) sid = json.GetValue("sid") videoUrl = "http://edge2.tikilive.com:1935/html5_tikilive/34967/amlst:mainstream/playlist.m3u8" \ "?i=YXBwTmFtZT1QbGF5ZXImY0lEPTM0OTY3JmNOYW1lPUFUViUyME5ldHdvcmtzJm9JRD0xMzY1NTUmb05hbW" \ "U9YXR2bmV0d29ya3Mmc0lkPWZvODRpbDNlN3FzNjh1ZXQycWwyZWF2MDgxJnVJRD0wJnVOYW1lPUd1ZXN0MTNiNjk=&id=%s" \ % (sid,) part = item.CreateNewEmptyMediaPart() for s, b in M3u8.GetStreamsFromM3u8(videoUrl, self.proxy): item.complete = True # s = self.GetVerifiableVideoUrl(s) part.AppendMediaStream(s, b) item.complete = True return item
def ListSomeVideos(self, data): """ If there was a Lastest section in the data return those video files """ items = [] if not self.__showSomeVideosInListing: return data, items jsonData = JsonHelper(data) sections = jsonData.GetValue("relatedVideoContent", "relatedVideosAccordion") sections = filter(lambda s: s['type'] not in self.__excludedTabs, sections) Logger.Debug("Found %s folders/tabs", len(sections)) if len(sections) == 1: # we should exclude that tab from the folders list and show the videos here self.__listedRelatedTab = sections[0]["type"] # otherwise the default "RELATED_VIDEO_TABS_LATEST" is used Logger.Debug("Excluded tab '%s' which will be show as videos", self.__listedRelatedTab) for section in sections: if not section["type"] == self.__listedRelatedTab: continue for videoData in section['videos']: items.append(self.CreateJsonItem(videoData)) return data, items
def UpdateVideoItem(self, item): """ Accepts an item. It returns an updated item. Usually retrieves the MediaURL and the Thumb! It should return a completed item. """ Logger.Debug('Starting UpdateVideoItem for %s (%s)', item.name, self.channelName) if not item.url.endswith("m3u8"): data = UriHandler.Open(item.url, proxy=self.proxy) jsonData = Regexer.DoRegex(self.mediaUrlRegex, data) if not jsonData: Logger.Error("Cannot find JSON stream info.") return item json = JsonHelper(jsonData[0]) Logger.Trace(json.json) stream = json.GetValue("source", "hls") Logger.Debug("Found HLS: %s", stream) else: stream = item.url part = item.CreateNewEmptyMediaPart() for s, b in M3u8.GetStreamsFromM3u8(stream, self.proxy): item.complete = True # s = self.GetVerifiableVideoUrl(s) part.AppendMediaStream(s, b) # var playerConfig = {"id":"mediaplayer","width":"100%","height":"100%","autostart":"false","image":"http:\/\/www.ketnet.be\/sites\/default\/files\/thumb_5667ea22632bc.jpg","brand":"ketnet","source":{"hls":"http:\/\/vod.stream.vrt.be\/ketnet\/_definst_\/mp4:ketnet\/2015\/12\/Ben_ik_familie_van_R001_A0023_20151208_143112_864.mp4\/playlist.m3u8"},"analytics":{"type_stream":"vod","playlist":"Ben ik familie van?","program":"Ben ik familie van?","episode":"Ben ik familie van?: Warre - Aflevering 3","parts":"1","whatson":"270157835527"},"title":"Ben ik familie van?: Warre - Aflevering 3","description":"Ben ik familie van?: Warre - Aflevering 3"} return item
def UpdateVideoApiItem(self, item): """ Updates an existing MediaItem with more data. Arguments: item : MediaItem - the MediaItem that needs to be updated date : String - the json content of the item's URL Returns: The original item with more data added to it's properties. Used to update none complete MediaItems (self.complete = False). This could include opening the item's URL to fetch more data and then process that data or retrieve it's real media-URL. The method should at least: * cache the thumbnail to disk (use self.noImage if no thumb is available). * set at least one MediaItemPart with a single MediaStream. * set self.complete = True. if the returned item does not have a MediaItemPart then the self.complete flag will automatically be set back to False. """ Logger.Debug('Starting UpdateChannelItem for %s (%s)', item.name, self.channelName) data = UriHandler.Open(item.url, proxy=self.proxy) json = JsonHelper(data, logger=Logger.Instance()) videos = json.GetValue("videoReferences") subtitles = json.GetValue("subtitleReferences") Logger.Trace(videos) return self.__UpdateItemFromVideoReferences(item, videos, subtitles)
def make_episode_dictionary_array(self, data): """ Performs pre-process actions for data processing. Accepts an data from the process_folder_list method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ Logger.info("Performing Pre-Processing") items = [] json_data = JsonHelper(data) dict_items = json_data.get_value("items", fallback=[]) for item in dict_items: if item == "banners" or item == "curators": continue items.append(self.create_episode_item(dict_items[item])) Logger.debug("Pre-Processing finished") data = "" return data, items
def UpdateVideoItem(self, item): data = UriHandler.Open(item.url, proxy=self.proxy, additionalHeaders=item.HttpHeaders) mediaRegex = 'data-media="([^"]+)"' mediaInfo = Regexer.DoRegex(mediaRegex, data)[0] mediaInfo = HtmlEntityHelper.ConvertHTMLEntities(mediaInfo) mediaInfo = JsonHelper(mediaInfo) Logger.Trace(mediaInfo) # sources part = item.CreateNewEmptyMediaPart() # high, web, mobile, url mediaSources = mediaInfo.json.get("sources", {}) for quality in mediaSources: url = mediaSources[quality] if quality == "high": bitrate = 2000 elif quality == "web": bitrate = 800 elif quality == "mobile": bitrate = 400 else: bitrate = 0 part.AppendMediaStream(url, bitrate) # geoLocRestriction item.isGeoLocked = not mediaInfo.GetValue("geoLocRestriction", fallback="world") == "world" item.complete = True return item