def __init__(self, title, thumbnail_url, preview_youtube_url, studio_name, pegi_rating): MediaItem.__init__(self, title, thumbnail_url, preview_youtube_url); self.studio_name = studio_name; if(pegi_rating in Videogame.PEGI_RATINGS): self.pegi_rating_img_url = Videogame.PEGI_RATINGS[pegi_rating]; else: # Default PEGI is 3 self.pegi_rating_img_url = Videogame.PEGI_RATINGS[3];
def __ShowEmptyInformation(self, items, favs=False): """ Adds an empty item to a list or just shows a message. @type favs: boolean indicating that we are dealing with favourites @param items: the list of items @rtype : boolean indicating succes or not """ if self.channelObject: Statistics.RegisterError(self.channelObject) if favs: title = LanguageHelper.GetLocalizedString(LanguageHelper.NoFavsId) else: title = LanguageHelper.GetLocalizedString(LanguageHelper.ErrorNoEpisodes) behaviour = AddonSettings.GetEmptyListBehaviour() Logger.Debug("Showing empty info for mode (favs=%s): [%s]", favs, behaviour) if behaviour == "error": # show error ok = False elif behaviour == "dummy" and not favs: # We should add a dummy items, but not for favs emptyListItem = MediaItem("- %s -" % (title.strip("."), ), "", type='video') emptyListItem.icon = self.channelObject.icon emptyListItem.thumb = self.channelObject.noImage emptyListItem.fanart = self.channelObject.fanart emptyListItem.dontGroup = True emptyListItem.description = "This listing was left empty intentionally." emptyListItem.complete = True emptyListItem.fanart = self.channelObject.fanart # add funny stream here? # part = emptyListItem.CreateNewEmptyMediaPart() # for s, b in YouTube.GetStreamsFromYouTube("", self.channelObject.proxy): # part.AppendMediaStream(s, b) # if we add one, set OK to True ok = True items.append(emptyListItem) else: ok = True XbmcWrapper.ShowNotification(LanguageHelper.GetLocalizedString(LanguageHelper.ErrorId), title, XbmcWrapper.Error, 2500) return ok
def add_live_streams_and_recent(self, data): """ Adds the live streams for RTL-Z. Accepts an data from the process_folder_list method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ items = [] # let's add the RTL-Z live stream rtlz_live = MediaItem("RTL Z Live Stream", "") rtlz_live.icon = self.icon rtlz_live.thumb = self.noImage rtlz_live.complete = True rtlz_live.isLive = True rtlz_live.dontGroup = True stream_item = MediaItem( "RTL Z: Live Stream", "http://www.rtl.nl/(config=RTLXLV2,channel=rtlxl,progid=rtlz,zone=inlineplayer.rtl.nl/rtlz,ord=0)/system/video/wvx/components/financien/rtlz/miMedia/livestream/rtlz_livestream.xml/1500.wvx" ) stream_item.icon = self.icon stream_item.thumb = self.noImage stream_item.complete = True stream_item.type = "video" stream_item.dontGroup = True stream_item.append_single_stream("http://mss6.rtl7.nl/rtlzbroad", 1200) stream_item.append_single_stream("http://mss26.rtl7.nl/rtlzbroad", 1200) stream_item.append_single_stream("http://mss4.rtl7.nl/rtlzbroad", 1200) stream_item.append_single_stream("http://mss5.rtl7.nl/rtlzbroad", 1200) stream_item.append_single_stream("http://mss3.rtl7.nl/rtlzbroad", 1200) rtlz_live.items.append(stream_item) items.append(rtlz_live) # Add recent items data, recent_items = self.add_recent_items(data) return data, recent_items
def create_video_item(self, result_set): """ Creates a MediaItem of type 'video' using the result_set from the regex. This method creates a new MediaItem from the Regular Expression or Json results <result_set>. The method should be implemented by derived classes and are specific to the channel. If the item is completely processed an no further data needs to be fetched the self.complete property should be set to True. If not set to True, the self.update_video_item method is called if the item is focussed or selected for playback. :param list[str]|dict result_set: The result_set of the self.episodeItemRegex :return: A new MediaItem of type 'video' or 'audio' (despite the method's name). :rtype: MediaItem|None """ Logger.trace('starting FormatVideoItem for %s', self.channelName) # Logger.Trace(result_set) # the vmanProgramId (like 1019976) leads to http://anytime.tv4.se/webtv/metafileFlash.smil?p=1019976&bw=1000&emulate=true&sl=true program_id = result_set["id"] # Logger.Debug("ProgId = %s", programId) url = "https://playback-api.b17g.net/media/%s?service=tv4&device=browser&protocol=hls" % ( program_id, ) name = result_set["title"] item = MediaItem(name, url) item.description = result_set["description"] if item.description is None: item.description = item.name # premium_expire_date_time=2099-12-31T00:00:00+01:00 date = result_set["broadcast_date_time"] (date_part, time_part) = date.split("T") (year, month, day) = date_part.split("-") (hour, minutes, rest1, zone) = time_part.split(":") item.set_date(year, month, day, hour, minutes, 00) broadcast_date = datetime.datetime(int(year), int(month), int(day), int(hour), int(minutes)) thumb_url = result_set.get("image", result_set.get("program_image")) # some images need to come via a proxy: if thumb_url and "://img.b17g.net/" in thumb_url: item.thumb = "https://imageproxy.b17g.services/?format=jpg&shape=cut" \ "&quality=90&resize=520x293&source={}"\ .format(HtmlEntityHelper.url_encode(thumb_url)) else: item.thumb = thumb_url availability = result_set["availability"] # noinspection PyTypeChecker free_period = availability["availability_group_free"] # noinspection PyTypeChecker premium_period = availability["availability_group_premium"] now = datetime.datetime.now() if False and not premium_period == "0": # always premium free_expired = now - datetime.timedelta(days=99 * 365) elif free_period == "30+" or free_period is None: free_expired = broadcast_date + datetime.timedelta(days=99 * 365) else: free_expired = broadcast_date + datetime.timedelta( days=int(free_period)) Logger.trace( "Premium info for: %s\nPremium state: %s\nFree State: %s\nBroadcast %s vs Expired %s", name, premium_period, free_period, broadcast_date, free_expired) if now > free_expired: item.isPaid = True item.type = "video" item.complete = False item.icon = self.icon item.isGeoLocked = result_set["is_geo_restricted"] item.isDrmProtected = result_set["is_drm_protected"] item.isLive = result_set.get("is_live", False) if item.isLive: item.url = "{0}&is_live=true".format(item.url) return item
def create_video_item_json(self, result_set): """ Creates a MediaItem of type 'video' using the result_set from the regex. This method creates a new MediaItem from the Regular Expression or Json results <result_set>. The method should be implemented by derived classes and are specific to the channel. If the item is completely processed an no further data needs to be fetched the self.complete property should be set to True. If not set to True, the self.update_video_item method is called if the item is focussed or selected for playback. :param dict result_set: The result_set of the self.episodeItemRegex :return: A new MediaItem of type 'video' or 'audio' (despite the method's name). :rtype: MediaItem|None """ Logger.trace(result_set) # get the title original_title = result_set.get("original_title") local_title = result_set.get("local_title") # Logger.Trace("%s - %s", originalTitle, localTitle) if original_title == "": title = local_title else: title = original_title # get the other meta data play_lists = result_set.get("local_playlists", []) video_mgid = None for play_list in play_lists: language = play_list["language_code"] if language == self.language: Logger.trace("Found '%s' playlist, using this one.", language) video_mgid = play_list["id"] break elif language == "en": Logger.trace("Found '%s' instead of '%s' playlist", language, self.language) video_mgid = play_list["id"] if video_mgid is None: Logger.error("No video MGID found for: %s", title) return None url = "http://api.mtvnn.com/v2/mrss.xml?uri=mgid:sensei:video:mtvnn.com:local_playlist-%s" % ( video_mgid, ) thumb = result_set.get("riptide_image_id") thumb = "http://images.mtvnn.com/%s/original" % (thumb, ) description = result_set.get("local_long_description") date = result_set.get("published_from") date = date[0:10].split("-") item = MediaItem(title, url) item.thumb = thumb item.description = description item.icon = self.icon item.type = 'video' item.set_date(date[0], date[1], date[2]) item.complete = False return item
def process_folder_list(self, item=None): # NOSONAR """ Process the selected item and get's it's child items using the available dataparsers. Accepts an <item> and returns a list of MediaListems with at least name & url set. The following actions are done: * determining the correct parsers to use * call a pre-processor * parsing the data with the parsers * calling the creators for item creations if the item is None, we assume that we are dealing with the first call for this channel and the mainlist uri is used. :param MediaItem|None item: The parent item. :return: A list of MediaItems that form the childeren of the <item>. :rtype: list[MediaItem] """ items = [] self.parentItem = item if item is None: Logger.info( "process_folder_list :: No item was specified. Assuming it was the main channel list" ) url = self.mainListUri elif len(item.items) > 0: return item.items else: url = item.url # Determine the handlers and process data_parsers = self.__get_data_parsers(url) # Exclude the updaters only data_parsers = [ p for p in data_parsers if not p.is_video_updater_only() ] if [p for p in data_parsers if p.LogOnRequired]: Logger.info("One or more dataparsers require logging in.") self.loggedOn = self.log_on() # now set the headers here and not earlier in case they might have been update by the logon if item is not None and item.HttpHeaders: headers = item.HttpHeaders else: headers = self.httpHeaders # Let's retrieve the required data. Main url's if url.startswith("http:") or url.startswith( "https:") or url.startswith("file:"): # Disable cache on live folders no_cache = item is not None and not item.is_playable( ) and item.isLive if no_cache: Logger.debug("Disabling cache for '%s'", item) data = UriHandler.open(url, proxy=self.proxy, additional_headers=headers, no_cache=no_cache) # Searching a site using search_site() elif url == "searchSite" or url == "#searchSite": Logger.debug("Starting to search") return self.search_site() # Labels instead of url's elif url.startswith("#"): data = "" # Others else: Logger.debug("Unknown URL format. Setting data to ''") data = "" # first check if there is a generic pre-processor pre_procs = [p for p in data_parsers if p.is_generic_pre_processor()] num_pre_procs = len(pre_procs) Logger.trace("Processing %s Generic Pre-Processors DataParsers", num_pre_procs) if num_pre_procs > 1: # warn for strange results if more than 1 generic pre-processor is present. Logger.warning( "More than one Generic Pre-Processor is found (%s). They are being processed in the " "order that Python likes which might result in unexpected result.", num_pre_procs) for data_parser in pre_procs: # remove it from the list data_parsers.remove(data_parser) # and process it Logger.debug("Processing %s", data_parser) (data, pre_items) = data_parser.PreProcessor(data) items += pre_items if isinstance(data, JsonHelper): Logger.debug( "Generic preprocessor resulted in JsonHelper data") # The the other handlers Logger.trace("Processing %s Normal DataParsers", len(data_parsers)) handler_json = None for data_parser in data_parsers: Logger.debug("Processing %s", data_parser) # Check for preprocessors if data_parser.PreProcessor: Logger.debug("Processing DataParser.PreProcessor") (handler_data, pre_items) = data_parser.PreProcessor(data) items += pre_items else: handler_data = data Logger.debug("Processing DataParser.Parser") if data_parser.Parser is None or (data_parser.Parser == "" and not data_parser.IsJson): if data_parser.Creator: Logger.warning("No <parser> found for %s. Skipping.", data_parser.Creator) continue if data_parser.IsJson: if handler_json is None: # Cache the json requests to improve performance Logger.trace("Caching JSON results for Dataparsing") if isinstance(handler_data, JsonHelper): handler_json = handler_data else: handler_json = JsonHelper(handler_data, Logger.instance()) Logger.trace(data_parser.Parser) parser_results = handler_json.get_value(fallback=[], *data_parser.Parser) if not isinstance(parser_results, (tuple, list)): # if there is just one match, return that as a list parser_results = [parser_results] else: if isinstance(handler_data, JsonHelper): raise ValueError( "Cannot perform Regex Parser on JsonHelper.") else: parser_results = Regexer.do_regex(data_parser.Parser, handler_data) Logger.debug("Processing DataParser.Creator for %s items", len(parser_results)) for parser_result in parser_results: handler_result = data_parser.Creator(parser_result) if handler_result is not None: if isinstance(handler_result, list): items += handler_result else: items.append(handler_result) # should we exclude DRM/GEO? hide_geo_locked = AddonSettings.hide_geo_locked_items_for_location( self.language) hide_drm_protected = AddonSettings.hide_drm_items() hide_premium = AddonSettings.hide_premium_items() hide_folders = AddonSettings.hide_restricted_folders() type_to_exclude = None if not hide_folders: type_to_exclude = "folder" old_count = len(items) if hide_drm_protected: Logger.debug("Hiding DRM items") items = [ i for i in items if not i.isDrmProtected or i.type == type_to_exclude ] if hide_geo_locked: Logger.debug("Hiding GEO Locked items due to GEO region: %s", self.language) items = [ i for i in items if not i.isGeoLocked or i.type == type_to_exclude ] if hide_premium: Logger.debug("Hiding Premium items") items = [ i for i in items if not i.isPaid or i.type == type_to_exclude ] # Local import for performance from cloaker import Cloaker cloaker = Cloaker(self, AddonSettings.store(LOCAL), logger=Logger.instance()) if not AddonSettings.show_cloaked_items(): Logger.debug("Hiding Cloaked items") items = [i for i in items if not cloaker.is_cloaked(i.url)] else: cloaked_items = [i for i in items if cloaker.is_cloaked(i.url)] for c in cloaked_items: c.isCloaked = True if len(items) != old_count: Logger.info( "Hidden %s items due to DRM/GEO/Premium/cloak filter (Hide Folders=%s)", old_count - len(items), hide_folders) # Check for grouping or not limit = AddonSettings.get_list_limit() folder_items = [i for i in items if i.type.lower() == "folder"] # we should also de-duplicate before calculating folder_items = list(set(folder_items)) folders = len(folder_items) if 0 < limit < folders: # let's filter them by alphabet if the number is exceeded Logger.debug( "Creating Groups for list exceeding '%s' folder items. Total folders found '%s'.", limit, folders) other = LanguageHelper.get_localized_string( LanguageHelper.OtherChars) title_format = LanguageHelper.get_localized_string( LanguageHelper.StartWith) result = dict() non_grouped = [] # Should we remove prefixes just as Kodi does? # prefixes = ("de", "het", "the", "een", "a", "an") for sub_item in items: if sub_item.dontGroup or sub_item.type != "folder": non_grouped.append(sub_item) continue char = sub_item.name[0].upper() # Should we de-prefix? # for p in prefixes: # if sub_item.name.lower().startswith(p + " "): # char = sub_item.name[len(p) + 1][0].upper() if char.isdigit(): char = "0-9" elif not char.isalpha(): char = other if char not in result: Logger.trace("Creating Grouped item from: %s", sub_item) if char == other: item = MediaItem( title_format.replace("'", "") % (char, ), "") else: item = MediaItem(title_format % (char.upper(), ), "") item.thumb = self.noImage item.complete = True # item.set_date(2100 + ord(char[0]), 1, 1, text='') result[char] = item else: item = result[char] item.items.append(sub_item) items = non_grouped + list(result.values()) unique_results = sorted(set(items), key=items.index) Logger.trace("Found '%d' items of which '%d' are unique.", len(items), len(unique_results)) return unique_results
def create_generic_item(self, result_set, program_type): """ Creates a MediaItem of type 'video' or 'folder' using the result_set from the regex and a basic set of values. This method creates a new MediaItem from the Regular Expression or Json results <result_set>. The method should be implemented by derived classes and are specific to the channel. If the item is completely processed an no further data needs to be fetched the self.complete property should be set to True. If not set to True, the self.update_video_item method is called if the item is focussed or selected for playback. :param list[str]|dict result_set: The result_set of the self.episodeItemRegex :return: A new MediaItem of type 'video' or 'audio' (despite the method's name). :rtype: MediaItem|None """ title = result_set["title"] if not result_set.get("hasOndemandRights", True): Logger.debug("Item '%s' has no on-demand rights", title) return None item_id = result_set["id"] if program_type == "programme": url = "https://psapi.nrk.no/programs/{}?apiKey={}".format( item_id, self.__api_key) item = MediaItem(title, url) item.type = 'video' else: use_old_series_api = False if use_old_series_api: url = "https://psapi.nrk.no/series/{}?apiKey={}".format( item_id, self.__api_key) else: url = "https://psapi.nrk.no/tv/catalog/series/{}?apiKey={}".format( item_id, self.__api_key) item = MediaItem(title, url) item.type = 'folder' item.icon = self.icon item.isGeoLocked = result_set.get( "isGeoBlocked", result_set.get("usageRights", {}).get("isGeoBlocked", False)) description = result_set.get("description") if description and description.lower() != "no description": item.description = description if "image" not in result_set or "webImages" not in result_set["image"]: return item # noinspection PyTypeChecker item.thumb = self.__get_image(result_set["image"]["webImages"], "pixelWidth", "imageUrl") # see if there is a date? self.__set_date(result_set, item) return item
def add_live_streams(self, data): """ Performs pre-process actions for data processing. Accepts an data from the process_folder_list method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ items = [] if self.parentItem is None: live_item = MediaItem( "\a.: Live TV :.", "https://d5ms27yy6exnf.cloudfront.net/live/omroepflevoland/tv/index.m3u8" ) live_item.icon = self.icon live_item.thumb = self.noImage live_item.type = 'video' live_item.dontGroup = True now = datetime.datetime.now() live_item.set_date(now.year, now.month, now.day, now.hour, now.minute, now.second) items.append(live_item) live_item = MediaItem( "\a.: Live Radio :.", "https://d5ms27yy6exnf.cloudfront.net/live/omroepflevoland/radio/index.m3u8" ) live_item.icon = self.icon live_item.thumb = self.noImage live_item.type = 'video' live_item.dontGroup = True now = datetime.datetime.now() live_item.set_date(now.year, now.month, now.day, now.hour, now.minute, now.second) items.append(live_item) # add "More" more = LanguageHelper.get_localized_string(LanguageHelper.MorePages) current_url = self.parentItem.url if self.parentItem is not None else self.mainListUri url, page = current_url.rsplit("=", 1) url = "{}={}".format(url, int(page) + 1) item = MediaItem(more, url) item.thumb = self.noImage item.icon = self.icon item.fanart = self.fanart item.complete = True items.append(item) return data, items
def create_folder_item(self, result_set): """ Creates a MediaItem of type 'folder' using the result_set from the regex. This method creates a new MediaItem from the Regular Expression or Json results <result_set>. The method should be implemented by derived classes and are specific to the channel. :param list[str]|dict[str,str] result_set: The result_set of the self.episodeItemRegex :return: A new MediaItem of type 'folder'. :rtype: MediaItem|None """ if len(result_set) > 3 and result_set[3] != "": Logger.debug("Sub category folder found.") url = parse.urljoin( self.baseUrl, HtmlEntityHelper.convert_html_entities(result_set[3])) name = "\a.: %s :." % (result_set[4], ) item = MediaItem(name, url) item.thumb = self.noImage item.complete = True item.type = "folder" return item url = parse.urljoin( self.baseUrl, HtmlEntityHelper.convert_html_entities(result_set[0])) name = HtmlEntityHelper.convert_html_entities(result_set[1]) helper = HtmlHelper(result_set[2]) description = helper.get_tag_content("div", {'class': 'description'}) item = MediaItem(name, "%s/RSS" % (url, )) item.thumb = self.noImage item.type = 'folder' item.description = description.strip() date = helper.get_tag_content("div", {'class': 'date'}) if date == "": date = helper.get_tag_content("span", {'class': 'lastPublishedDate'}) if not date == "": date_parts = Regexer.do_regex(r"(\w+) (\d+)[^<]+, (\d+)", date) if len(date_parts) > 0: date_parts = date_parts[0] month_part = date_parts[0].lower() day_part = date_parts[1] year_part = date_parts[2] try: month = DateHelper.get_month_from_name(month_part, "en") item.set_date(year_part, month, day_part) except: Logger.error("Error matching month: %s", month_part, exc_info=True) item.complete = True return item
def add_others(self, data): """ Performs pre-process actions for data processing. Accepts an data from the process_folder_list method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ Logger.info("Performing Pre-Processing") items = [] others = MediaItem( "\b.: Populair :.", "https://api.kijk.nl/v2/default/sections/popular_PopularVODs?offset=0" ) items.append(others) days = MediaItem("\b.: Deze week :.", "#lastweek") items.append(days) search = MediaItem("\b.: Zoeken :.", "searchSite") search.complete = True search.icon = self.icon search.thumb = self.noImage search.dontGroup = True search.HttpHeaders = {"X-Requested-With": "XMLHttpRequest"} items.append(search) if self.channelCode == "veronica": live = LanguageHelper.get_localized_string( LanguageHelper.LiveStreamTitleId) live_radio = MediaItem("Radio Veronica {}".format(live), "") live_radio.type = "video" live_radio.icon = self.icon live_radio.thumb = self.noImage live_radio.dontGroup = True part = live_radio.create_new_empty_media_part() live_stream = "https://talparadiohls-i.akamaihd.net/hls/live/585615/VR-Veronica-1/playlist.m3u8" if AddonSettings.use_adaptive_stream_add_on(with_encryption=False, channel=self): stream = part.append_media_stream(live_stream, 0) M3u8.set_input_stream_addon_input(stream, self.proxy) live_radio.complete = True else: for s, b in M3u8.get_streams_from_m3u8(live_stream, self.proxy): live_radio.complete = True part.append_media_stream(s, b) items.append(live_radio) Logger.debug("Pre-Processing finished") return data, items
def create_recent_video_item(self, result_set): """ Creates a MediaItem of type 'video' using the result_set from the regex. This method creates a new MediaItem from the Regular Expression or Json results <result_set>. The method should be implemented by derived classes and are specific to the channel. If the item is completely processed an no further data needs to be fetched the self.complete property should be set to True. If not set to True, the self.update_video_item method is called if the item is focussed or selected for playback. :param list[str]|dict result_set: The result_set of the self.episodeItemRegex :return: A new MediaItem of type 'video' or 'audio' (despite the method's name). :rtype: MediaItem|None """ Logger.trace(result_set) show_title = result_set["abstract_name"] episode_title = result_set["title"] title = "{} - {}".format(show_title, episode_title) description = result_set.get("synopsis") uuid = result_set["uuid"] url = "http://www.rtl.nl/system/s4m/xldata/ux/%s?context=rtlxl&" \ "d=pc&fmt=adaptive&version=3" % (uuid, ) # The JSON urls do not yet work # url = "http://www.rtl.nl/system/s4m/vfd/version=1/d=pc/output=json/" \ # "fun=abstract/uuid=%s/fmt=smooth" % (uuid,) item = MediaItem(title.title(), url) item.type = "video" item.description = description item.thumb = "%s%s" % ( self.posterBase, uuid, ) audience = result_set.get("audience") Logger.debug("Found audience: %s", audience) item.isGeoLocked = audience == "ALLEEN_NL" # We can play the DRM stuff # item.isDrmProtected = audience == "DRM" station = result_set.get("station", None) if station: item.name = "{} ({})".format(item.name, station) icon = self.largeIconSet.get(station.lower(), None) if icon: Logger.trace("Setting icon to: %s", icon) item.icon = icon # 2018-12-05T19:30:00.000Z date_time = result_set.get("dateTime", None) if date_time: date_time = DateHelper.get_date_from_string( date_time[:-5], "%Y-%m-%dT%H:%M:%S") # The time is in UTC, and the show as at UTC+1 date_time = datetime.datetime(*date_time[:6]) + datetime.timedelta( hours=1) item.name = "{:02d}:{:02d}: {}".format(date_time.hour, date_time.minute, item.name) item.set_date(date_time.year, date_time.month, date_time.day, date_time.hour, date_time.minute, date_time.second) return item
def create_video_item(self, result_set): """ Creates a MediaItem of type 'video' using the result_set from the regex. This method creates a new MediaItem from the Regular Expression or Json results <result_set>. The method should be implemented by derived classes and are specific to the channel. If the item is completely processed an no further data needs to be fetched the self.complete property should be set to True. If not set to True, the self.update_video_item method is called if the item is focussed or selected for playback. :param list[str]|dict[str,dict[str,str] result_set: The result_set of the self.episodeItemRegex :return: A new MediaItem of type 'video' or 'audio' (despite the method's name). :rtype: MediaItem|None """ Logger.trace(result_set) drm_locked = False geo_blocked = result_set["is_geo_blocked"] title = result_set["title"] if ("_links" not in result_set or "stream" not in result_set["_links"] or "href" not in result_set["_links"]["stream"]): Logger.warning("No streams found for %s", title) return None # the description description = result_set["description"].strip() # The long version summary = result_set["summary"].strip() # The short version # Logger.Trace("Comparing:\nDesc: %s\nSumm:%s", description, summary) if not description.startswith(summary): # the descripts starts with the summary. Don't show description = "%s\n\n%s" % (summary, description) video_type = result_set["type"] if not video_type == "program": title = "%s (%s)" % (title, video_type.title()) elif result_set["format_position"]["is_episodic"]: # and resultSet["format_position"]["episode"] != "0": # make sure we show the episodes and seaso # season = int(resultSet["format_position"]["season"]) episode = int(result_set["format_position"]["episode"] or "0") webisode = result_set.get("webisode", False) # if the name had the episode in it, translate it if episode > 0 and not webisode: description = "%s\n\n%s" % (title, description) title = "%s - %s %s %s %s" % (result_set["format_title"], self.seasonLabel, result_set["format_position"]["season"], self.episodeLabel, result_set["format_position"]["episode"]) else: Logger.debug("Found episode number '0' for '%s', " "using name instead of episode number", title) url = result_set["_links"]["stream"]["href"] item = MediaItem(title, url) date_info = None date_format = "%Y-%m-%dT%H:%M:%S" if "broadcasts" in result_set and len(result_set["broadcasts"]) > 0: date_info = result_set["broadcasts"][0]["air_at"] Logger.trace("Date set from 'air_at'") if "playable_from" in result_set["broadcasts"][0]: start_date = result_set["broadcasts"][0]["playable_from"] playable_from = DateHelper.get_date_from_string(start_date[0:-6], date_format) playable_from = datetime.datetime(*playable_from[0:6]) if playable_from > datetime.datetime.now(): drm_locked = True elif "publish_at" in result_set: date_info = result_set["publish_at"] Logger.trace("Date set from 'publish_at'") if date_info is not None: # publish_at=2007-09-02T21:55:00+00:00 info = date_info.split("T") date_info = info[0] time_info = info[1] date_info = date_info.split("-") time_info = time_info.split(":") item.set_date(date_info[0], date_info[1], date_info[2], time_info[0], time_info[1], 0) item.type = "video" item.complete = False item.icon = self.icon item.isGeoLocked = geo_blocked item.isDrmProtected = drm_locked thumb_data = result_set['_links'].get('image', None) if thumb_data is not None: # Older version # item.thumbUrl = thumb_data['href'].replace("{size}", "thumb") item.thumb = self.__get_thumb_image(thumb_data['href']) item.description = description srt = result_set.get("sami_path") if not srt: srt = result_set.get("subtitles_webvtt") if srt: Logger.debug("Storing SRT/WebVTT path: %s", srt) part = item.create_new_empty_media_part() part.Subtitle = srt return item
def create_video_item(self, result_set): """ Creates a MediaItem of type 'video' using the result_set from the regex. This method creates a new MediaItem from the Regular Expression or Json results <result_set>. The method should be implemented by derived classes and are specific to the channel. If the item is completely processed an no further data needs to be fetched the self.complete property should be set to True. If not set to True, the self.update_video_item method is called if the item is focussed or selected for playback. :param list[str]|dict result_set: The result_set of the self.episodeItemRegex :return: A new MediaItem of type 'video' or 'audio' (despite the method's name). :rtype: MediaItem|None """ Logger.trace(result_set) episode_key = result_set["episode_key"] if episode_key: episode_data = self.episodes.get(episode_key, None) if not episode_data: Logger.warning("Could not find episodes data for key: %s", episode_key) return None Logger.debug("Found Episode Data: %s", episode_data) else: Logger.debug("No Episode Data Found") episode_data = None title = result_set["title"] description = None if episode_data: if title: title = "%s - %s" % (episode_data["name"], title) else: title = episode_data["name"] description = episode_data.get("synopsis", None) # tarifs have datetimes # noinspection PyStatementEffect # """ # "ddr_timeframes": [{ # "start": 1382119200, # "stop": 1382378399, # "tariff": 149 # }, # { # "start": 1382378400, # "tariff": 0 # }], # # """ tariffs = result_set.get("ddr_timeframes") premium_item = False if tariffs: Logger.trace(tariffs) for tariff in tariffs: # type: dict if tariff["tariff"] > 0: start = tariff.get("start", 0) end = tariff.get("stop", 2147483647) start = DateHelper.get_date_from_posix(start) end = DateHelper.get_date_from_posix(end) now = datetime.datetime.now() if start < now < end: premium_item = True Logger.debug( "Found a tariff for this episode: %s - %s: %s", start, end, tariff["tariff"]) break uuid = result_set["uuid"] url = "http://www.rtl.nl/system/s4m/xldata/ux/%s?context=rtlxl&d=pc&fmt=adaptive&version=3" % ( uuid, ) # The JSON urls do not yet work # url = "http://www.rtl.nl/system/s4m/vfd/version=1/d=pc/output=json/fun=abstract/uuid=%s/fmt=smooth" % (uuid,) item = MediaItem(title.title(), url) item.type = "video" item.isPaid = premium_item item.description = description item.thumb = "%s%s" % ( self.posterBase, uuid, ) station = result_set.get("station", None) if station: icon = self.largeIconSet.get(station.lower(), None) if icon: Logger.trace("Setting icon to: %s", icon) item.icon = icon date_time = result_set.get("display_date", None) if date_time: date_time = DateHelper.get_date_from_posix(int(date_time)) item.set_date(date_time.year, date_time.month, date_time.day, date_time.hour, date_time.minute, date_time.second) return item
def pre_process_folder_list(self, data): """ Performs pre-process actions for data processing. Accepts an data from the process_folder_list method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ items = [] # We need to keep the JSON data, in order to refer to it from the create methods. self.currentJson = JsonHelper(data, Logger.instance()) # Extract season (called abstracts) information self.abstracts = dict() # : the season Logger.debug("Storing abstract information") for abstract in self.currentJson.get_value("abstracts"): self.abstracts[abstract["key"]] = abstract # If we have episodes available, list them self.episodes = dict() if "episodes" in self.currentJson.get_value(): Logger.debug("Storing episode information") for episode in self.currentJson.get_value("episodes"): self.episodes[episode["key"]] = episode # extract some meta data self.posterBase = self.currentJson.get_value("meta", "poster_base_url") self.thumbBase = self.currentJson.get_value("meta", "thumb_base_url") # And create page items items_on_page = int( self.currentJson.get_value("meta", "nr_of_videos_onpage")) total_items = int( self.currentJson.get_value("meta", "nr_of_videos_total")) current_page = self.currentJson.get_value("meta", "pg") if current_page == "all": current_page = 1 else: current_page = int(current_page) Logger.debug( "Found a total of %s items (%s items per page), we are on page %s", total_items, items_on_page, current_page) # But don't show them if not episodes were found if self.episodes: if items_on_page < 50: Logger.debug("No more pages to show.") else: next_page = current_page + 1 url = self.parentItem.url[:self.parentItem.url.rindex("=")] url = "%s=%s" % (url, next_page) Logger.trace(url) page_item = MediaItem(str(next_page), url) page_item.type = "page" page_item.complete = True items.append(page_item) return data, items
def add_recent_items(self, data): """ Builds the "Recent" folder for this channel. :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ items = [] recent = MediaItem("\a .: Recent :.", "") recent.type = "folder" recent.complete = True recent.dontGroup = True items.append(recent) today = datetime.datetime.now() days = [ "Maandag", "Dinsdag", "Woensdag", "Donderdag", "Vrijdag", "Zaterdag", "Zondag" ] for i in range(0, 7, 1): air_date = today - datetime.timedelta(i) Logger.trace("Adding item for: %s", air_date) # Determine a nice display date day = days[air_date.weekday()] if i == 0: day = "Vandaag" elif i == 1: day = "Gisteren" elif i == 2: day = "Eergisteren" title = "%04d-%02d-%02d - %s" % (air_date.year, air_date.month, air_date.day, day) # url = "https://www.npostart.nl/media/series?page=1&dateFrom=%04d-%02d-%02d&tileMapping=normal&tileType=teaser&pageType=catalogue" % \ url = "https://xlapi.rtl.nl/version=1/fun=gemist/model=svod/bcdate=" \ "{0:04d}{1:02d}{2:02d}/".format(air_date.year, air_date.month, air_date.day) extra = MediaItem(title, url) extra.complete = True extra.icon = self.icon extra.thumb = self.noImage extra.dontGroup = True extra.set_date(air_date.year, air_date.month, air_date.day, text="") recent.items.append(extra) news = MediaItem("\a .: Zoeken :.", "#searchSite") news.type = "folder" news.complete = True news.dontGroup = True items.append(news) return data, items
def add_categories(self, data): """ Performs pre-process actions for data processing. Accepts an data from the process_folder_list method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ Logger.info("Performing Pre-Processing") items = [] cat = MediaItem( "\b.: Categorieën :.", "http://m.schooltv.nl/api/v1/categorieen.json?size=100") cat.thumb = self.noImage cat.icon = self.icon cat.fanart = self.fanart cat.complete = True cat.dontGroup = True items.append(cat) tips = MediaItem( "\b.: Tips :.", "http://m.schooltv.nl/api/v1/programmas/tips.json?size=100") tips.thumb = self.noImage tips.icon = self.icon tips.fanart = self.fanart tips.complete = True tips.dontGroup = True items.append(tips) data = JsonHelper(data) ages = MediaItem("\b.: Leeftijden :.", "") ages.thumb = self.noImage ages.icon = self.icon ages.fanart = self.fanart ages.complete = True ages.dontGroup = True for age in ("0-4", "5-6", "7-8", "9-12", "13-15", "16-18"): age_item = MediaItem( "%s Jaar" % (age, ), "http://m.schooltv.nl/api/v1/leeftijdscategorieen/%s/afleveringen.json?" "size=%s&sort=Nieuwste" % (age, self.__PageSize)) age_item.thumb = self.noImage age_item.icon = self.icon age_item.fanart = self.fanart age_item.complete = True age_item.dontGroup = True ages.items.append(age_item) # We should list programs instead of videos, so just prefill them here. for program in data.get_value(): if age in program['ageGroups']: age_item.items.append(self.create_episode_item(program)) items.append(ages) Logger.debug("Pre-Processing finished") return data, items
def add_live_items_and_genres(self, data): """ Adds the Live items, Channels and Last Episodes to the listing. :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ items = [] extra_items = { "Kanaler": "#kanaler", "Livesändningar": "https://www.svtplay.se/live?sida=1", "Sök": "searchSite", "Senaste program": "https://www.svtplay.se/senaste?sida=1", "Sista chansen": "https://www.svtplay.se/sista-chansen?sida=1", "Populära": "https://www.svtplay.se/populara?sida=1", } # https://www.svtplay.se/ajax/dokumentar/titlar?filterAccessibility=&filterRights= category_items = { "Drama": ( "https://www.svtplay.se/genre/drama", "https://www.svtstatic.se/play/play5/images/categories/posters/drama-d75cd2da2eecde36b3d60fad6b92ad42.jpg" ), "Dokumentär": ( "https://www.svtplay.se/genre/dokumentar", "https://www.svtstatic.se/play/play5/images/categories/posters/dokumentar-00599af62aa8009dbc13577eff894b8e.jpg" ), "Humor": ( "https://www.svtplay.se/genre/humor", "https://www.svtstatic.se/play/play5/images/categories/posters/humor-abc329317eedf789d2cca76151213188.jpg" ), "Livsstil": ( "https://www.svtplay.se/genre/livsstil", "https://www.svtstatic.se/play/play5/images/categories/posters/livsstil-2d9cd77d86c086fb8908ce4905b488b7.jpg" ), "Underhållning": ( "https://www.svtplay.se/genre/underhallning", "https://www.svtstatic.se/play/play5/images/categories/posters/underhallning-a60da5125e715d74500a200bd4416841.jpg" ), "Kultur": ( "https://www.svtplay.se/genre/kultur", "https://www.svtstatic.se/play/play5/images/categories/posters/kultur-93dca50ed1d6f25d316ac1621393851a.jpg" ), "Samhälle & Fakta": ( "https://www.svtplay.se/genre/samhalle-och-fakta", "https://www.svtstatic.se/play/play5/images/categories/posters/samhalle-och-fakta-3750657f72529a572f3698e01452f348.jpg" ), "Film": ( "https://www.svtplay.se/genre/film", "https://www.svtstatic.se/image-cms/svtse/1436202866/svtplay/article2952281.svt/ALTERNATES/large/film1280-jpg" ), "Barn": ( "https://www.svtplay.se/genre/barn", "https://www.svtstatic.se/play/play5/images/categories/posters/barn-c17302a6f7a9a458e0043b58bbe8ab79.jpg" ), "Nyheter": ( "https://www.svtplay.se/genre/nyheter", "https://www.svtstatic.se/play/play6/images/categories/posters/nyheter.e67ff1b5770152af4690ad188546f9e9.jpg" ), "Sport": ( "https://www.svtplay.se/genre/sport", "https://www.svtstatic.se/play/play6/images/categories/posters/sport.98b65f6627e4addbc4177542035ea504.jpg" ) } for title, url in extra_items.items(): new_item = MediaItem("\a.: %s :." % (title, ), url) new_item.complete = True new_item.thumb = self.noImage new_item.dontGroup = True new_item.set_date(2099, 1, 1, text="") items.append(new_item) new_item = MediaItem("\a.: Kategorier :.", "https://www.svtplay.se/genre") new_item.complete = True new_item.thumb = self.noImage new_item.dontGroup = True new_item.set_date(2099, 1, 1, text="") for title, (url, thumb) in category_items.items(): cat_item = MediaItem(title, url) cat_item.complete = True cat_item.thumb = thumb or self.noImage cat_item.dontGroup = True # cat_item.set_date(2099, 1, 1, text="") new_item.items.append(cat_item) items.append(new_item) new_item = MediaItem("\a.: Genrer/Taggar :.", "https://www.svtplay.se/genre") new_item.complete = True new_item.thumb = self.noImage new_item.dontGroup = True new_item.set_date(2099, 1, 1, text="") items.append(new_item) return data, items
def create_video_item(self, result_set): """ Creates a MediaItem of type 'video' using the result_set from the regex. This method creates a new MediaItem from the Regular Expression or Json results <result_set>. The method should be implemented by derived classes and are specific to the channel. If the item is completely processed an no further data needs to be fetched the self.complete property should be set to True. If not set to True, the self.update_video_item method is called if the item is focussed or selected for playback. :param dict[str,str|dict] result_set: The result_set of the self.episodeItemRegex :return: A new MediaItem of type 'video' or 'audio' (despite the method's name). :rtype: MediaItem|None """ Logger.trace(result_set) title = result_set["title"] if title is None: Logger.warning("Found item with all <null> items. Skipping") return None if "subtitle" in result_set and result_set['subtitle'].lower( ) not in title.lower(): title = "%(title)s - %(subtitle)s" % result_set url = "http://m.schooltv.nl/api/v1/afleveringen/%(mid)s.json" % result_set item = MediaItem(title, url) item.description = result_set.get("description", "") age_groups = result_set.get('ageGroups', ['Onbekend']) item.description = "%s\n\nLeeftijden: %s" % (item.description, ", ".join(age_groups)) item.thumb = result_set.get("image", "") item.icon = self.icon item.type = 'video' item.fanart = self.fanart item.complete = False item.set_info_label("duration", result_set['duration']) if "publicationDate" in result_set: broadcast_date = DateHelper.get_date_from_posix( int(result_set['publicationDate'])) item.set_date(broadcast_date.year, broadcast_date.month, broadcast_date.day, broadcast_date.hour, broadcast_date.minute, broadcast_date.second) return item
def create_json_item(self, result_set): # NOSONAR """ Creates a MediaItem of type 'video' using the result_set from the regex. This method creates a new MediaItem from the Regular Expression or Json results <result_set>. The method should be implemented by derived classes and are specific to the channel. If the item is completely processed an no further data needs to be fetched the self.complete property should be set to True. If not set to True, the self.update_video_item method is called if the item is focussed or selected for playback. :param list[str]|dict result_set: The result_set of the self.episodeItemRegex :return: A new MediaItem of type 'video' or 'audio' (despite the method's name). :rtype: MediaItem|None """ Logger.trace(result_set) # determine the title program_title = result_set.get("programTitle", "") or "" show_title = result_set.get("title", "") or "" if show_title == "" and program_title != "": title = program_title elif show_title != "" and program_title == "": title = show_title elif program_title == "" and show_title == "": Logger.warning("Could not find title for item: %s", result_set) return None elif show_title != "" and show_title != program_title: title = "%s - %s" % (program_title, show_title) else: # they are the same title = show_title # NOSONAR if "live" in result_set and result_set["live"]: title = "%s (·Live·)" % (title, ) item_type = result_set.get("contentType") if "contentUrl" in result_set: url = result_set["contentUrl"] else: url = result_set["url"] broad_cast_date = result_set.get("broadcastDate", None) if item_type in ("videoEpisod", "videoKlipp", "singel"): if not url.startswith("/video/") and not url.startswith("/klipp/"): Logger.warning("Found video item without a /video/ or /klipp/ url.") return None item_type = "video" if "programVersionId" in result_set: url = "https://www.svt.se/videoplayer-api/video/%s" % (result_set["programVersionId"],) else: url = "%s%s" % (self.baseUrl, url) else: item_type = "folder" url = "%s%s" % (self.baseUrl, url) item = MediaItem(title, url) item.icon = self.icon item.type = item_type item.isGeoLocked = result_set.get("onlyAvailableInSweden", False) item.description = result_set.get("description", "") if "season" in result_set and "episodeNumber" in result_set and result_set["episodeNumber"]: season = int(result_set["season"]) episode = int(result_set["episodeNumber"]) if season > 0 and episode > 0: item.name = "s%02de%02d - %s" % (season, episode, item.name) item.set_season_info(season, episode) thumb = self.noImage if self.parentItem: thumb = self.parentItem.thumb for image_key in ("image", "imageMedium", "thumbnailMedium", "thumbnail", "poster"): if image_key in result_set and result_set[image_key] is not None: thumb = result_set[image_key] break item.thumb = self.__get_thumb(thumb or self.noImage) if broad_cast_date is not None: if "+" in broad_cast_date: broad_cast_date = broad_cast_date.rsplit("+")[0] time_stamp = DateHelper.get_date_from_string(broad_cast_date, "%Y-%m-%dT%H:%M:%S") item.set_date(*time_stamp[0:6]) # Set the expire date expire_date = result_set.get("expireDate") if expire_date is not None: expire_date = expire_date.split("+")[0].replace("T", " ") item.description = \ "{}\n\n{}: {}".format(item.description or "", self.__expires_text, expire_date) length = result_set.get("materialLength", 0) if length > 0: item.set_info_label(MediaItem.LabelDuration, length) return item
def load_programs(self, data): """ Performs pre-process actions for data processing. Accepts an data from the process_folder_list method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ items = [] # fetch al pages p = 1 url_format = "https://{0}/content/shows?" \ "include=images" \ "&page%5Bsize%5D=100&page%5Bnumber%5D={{0}}".format(self.baseUrlApi) # "include=images%2CprimaryChannel" \ url = url_format.format(p) data = UriHandler.open(url, proxy=self.proxy) json = JsonHelper(data) pages = json.get_value("meta", "totalPages") programs = json.get_value("data") or [] # extract the images self.__update_image_lookup(json) for p in range(2, pages + 1, 1): url = url_format.format(p) Logger.debug("Loading: %s", url) data = UriHandler.open(url, proxy=self.proxy) json = JsonHelper(data) programs += json.get_value("data") or [] # extract the images self.__update_image_lookup(json) Logger.debug("Found a total of %s items over %s pages", len(programs), pages) for p in programs: item = self.create_program_item(p) if item is not None: items.append(item) if self.recentUrl: recent_text = LanguageHelper.get_localized_string( LanguageHelper.Recent) recent = MediaItem("\b.: {} :.".format(recent_text), self.recentUrl) recent.dontGroup = True recent.fanart = self.fanart items.append(recent) # live items if self.liveUrl: live = MediaItem("\b.: Live :.", self.liveUrl) live.type = "video" live.dontGroup = True live.isGeoLocked = True live.isLive = True live.fanart = self.fanart items.append(live) search = MediaItem("\a.: Sök :.", "searchSite") search.type = "folder" search.dontGroup = True search.fanart = self.fanart items.append(search) return data, items
def create_channel_item(self, channel): """ Creates a MediaItem of type 'video' for a live channel using the result_set from the regex. This method creates a new MediaItem from the Regular Expression or Json results <result_set>. The method should be implemented by derived classes and are specific to the channel. If the item is completely processed an no further data needs to be fetched the self.complete property should be set to True. If not set to True, the self.update_video_item method is called if the item is focussed or selected for playback. :param list[str]|dict channel: The result_set of the self.episodeItemRegex :return: A new MediaItem of type 'video' or 'audio' (despite the method's name). :rtype: MediaItem|None """ Logger.trace(channel) title = channel["programmeTitle"] episode = channel.get("episodeTitle", None) thumb = self.noImage channel_id = channel["channel"].lower() if channel_id == "svtk": channel_title = "Kunskapskanalen" channel_id = "kunskapskanalen" elif channel_id == "svtb": channel_title = "Barnkanalen" channel_id = "barnkanalen" else: channel_title = channel["channel"] description = channel.get("longDescription") date_format = "%Y-%m-%dT%H:%M:%S" start_time = DateHelper.get_date_from_string(channel["publishingTime"][:19], date_format) end_time = DateHelper.get_date_from_string(channel["publishingEndTime"][:19], date_format) if episode: title = "%s: %s - %s (%02d:%02d - %02d:%02d)" \ % (channel_title, title, episode, start_time.tm_hour, start_time.tm_min, end_time.tm_hour, end_time.tm_min) else: title = "%s: %s (%02d:%02d - %02d:%02d)" \ % (channel_title, title, start_time.tm_hour, start_time.tm_min, end_time.tm_hour, end_time.tm_min) channel_item = MediaItem( title, "https://www.svt.se/videoplayer-api/video/ch-%s" % (channel_id.lower(), ) ) channel_item.type = "video" channel_item.description = description channel_item.isLive = True channel_item.isGeoLocked = True channel_item.thumb = thumb if "titlePageThumbnailIds" in channel and channel["titlePageThumbnailIds"]: channel_item.thumb = "https://www.svtstatic.se/image/wide/650/%s.jpg" % (channel["titlePageThumbnailIds"][0], ) return channel_item
def create_instalment_video_item(self, result_set): """ Creates a MediaItem of type 'video' using the result_set from the regex. This method creates a new MediaItem from the Regular Expression or Json results <result_set>. The method should be implemented by derived classes and are specific to the channel. If the item is completely processed an no further data needs to be fetched the self.complete property should be set to True. If not set to True, the self.update_video_item method is called if the item is focussed or selected for playback. :param list[str]|dict[str,str|dict] result_set: The result_set of the self.episodeItemRegex :return: A new MediaItem of type 'video' or 'audio' (despite the method's name). :rtype: MediaItem|None """ title = result_set["titles"]["title"] sub_title = result_set["titles"]["subtitle"] # noinspection PyTypeChecker if result_set.get("availability", {}).get("status", "available") != "available": Logger.debug("Found '%s' with a non-available status", title) return None url = "https://psapi.nrk.no/programs/{}?apiKey={}".format( result_set["prfId"], self.__api_key) item = MediaItem(title, url) item.type = 'video' item.thumb = self.__get_image(result_set["image"], "width", "url") item.fanart = self.parentItem.fanart # noinspection PyTypeChecker item.isGeoLocked = result_set.get("usageRights", {}).get( "geoBlock", {}).get("isGeoBlocked", False) if sub_title and sub_title.strip(): item.description = sub_title if "firstTransmissionDateDisplayValue" in result_set: Logger.trace("Using 'firstTransmissionDateDisplayValue' for date") day, month, year = result_set[ "firstTransmissionDateDisplayValue"].split(".") item.set_date(year, month, day) elif "usageRights" in result_set and "from" in result_set[ "usageRights"] and result_set["usageRights"][ "from"] is not None: Logger.trace("Using 'usageRights.from.date' for date") # noinspection PyTypeChecker date_value = result_set["usageRights"]["from"]["date"].split( "+")[0] time_stamp = DateHelper.get_date_from_string( date_value, date_format="%Y-%m-%dT%H:%M:%S") item.set_date(*time_stamp[0:6]) return item
def add_live_stream(self, data): """ Performs pre-process actions for data processing. Accepts an data from the process_folder_list method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ items = [] item = MediaItem("\a.: TWiT.TV Live :.", "http://live.twit.tv/") item.thumb = self.noImage item.icon = self.icon item.complete = True playback_item = MediaItem("Play Live", "http://live.twit.tv/") playback_item.type = "playlist" playback_item.thumb = self.noImage playback_item.icon = self.icon playback_item.isLive = True playback_part = playback_item.create_new_empty_media_part() # noinspection PyStatementEffect """ BitGravity There are two streams available from BitGravity; a 512 kbps low-bandwidth stream and a 1 Mbps high-bandwidth stream. UStream This is the default stream. The UStream stream is a variable stream that maxes at 2.2 Mbps and adjusts down based on your bandwidth. Justin.tv The Justin.tv stream is a 2.2 mbps high-bandwidth stream that will adjust to lower bandwidth and resolutions. Flosoft.biz The Flosoft.biz stream is a 5 resolution/bitrate HLS stream, intended for our app developers. Please see Flosoft Developer Section. This stream is hosted by TWiT through Flosoft.biz """ # http://wiki.twit.tv/wiki/TWiT_Live#Direct_links_to_TWiT_Live_Video_Streams media_urls = { # Justin TV # "2000": "http://usher.justin.tv/stream/multi_playlist/twit.m3u8", # Flosoft (http://wiki.twit.tv/wiki/Developer_Guide#Flosoft.biz) "264": "http://hls.cdn.flosoft.biz/flosoft/mp4:twitStream_240/playlist.m3u8", "512": "http://hls.cdn.flosoft.biz/flosoft/mp4:twitStream_360/playlist.m3u8", "1024": "http://hls.cdn.flosoft.biz/flosoft/mp4:twitStream_480/playlist.m3u8", "1475": "http://hls.cdn.flosoft.biz/flosoft/mp4:twitStream_540/playlist.m3u8", "1778": "http://hls.cdn.flosoft.biz/flosoft/mp4:twitStream_720/playlist.m3u8", # UStream "1524": "http://iphone-streaming.ustream.tv/ustreamVideo/1524/streams/live/playlist.m3u8", # BitGravity # "512": "http://209.131.99.99/twit/live/low", # "1024": "http://209.131.99.99/twit/live/high", #"512": "http://64.185.191.180/cdn-live-s1/_definst_/twit/live/low/playlist.m3u8", #"1024": "http://64.185.191.180/cdn-live-s1/_definst_/twit/live/high/playlist.m3u8", } for bitrate in media_urls: playback_part.append_media_stream(media_urls[bitrate], bitrate) Logger.debug("Streams: %s", playback_part) playback_item.complete = True item.items.append(playback_item) Logger.debug("Appended: %s", playback_item) items.append(item) return data, items
def create_video_item(self, result_set): """ Creates a MediaItem of type 'video' using the result_set from the regex. This method creates a new MediaItem from the Regular Expression or Json results <result_set>. The method should be implemented by derived classes and are specific to the channel. If the item is completely processed an no further data needs to be fetched the self.complete property should be set to True. If not set to True, the self.update_video_item method is called if the item is focussed or selected for playback. :param dict[str,dict|None] result_set: The result_set of the self.episodeItemRegex :return: A new MediaItem of type 'video' or 'audio' (despite the method's name). :rtype: MediaItem|None """ Logger.trace(result_set) title = result_set["title"] if "subTitle" in result_set: title = "%s - %s" % (title, result_set["subTitle"]) mgid = result_set["id"].split(":")[-1] url = "http://feeds.mtvnservices.com/od/feed/intl-mrss-player-feed" \ "?mgid=mgid:arc:episode:mtvplay.com:%s" \ "&ep=%s" \ "&episodeType=segmented" \ "&imageEp=android.playplex.mtv.%s" \ "&arcEp=android.playplex.mtv.%s" \ % (mgid, self.__backgroundServiceEp, self.__region.lower(), self.__region.lower()) item = MediaItem(title, url) item.type = "video" item.icon = self.icon item.description = result_set.get("description", None) item.thumb = self.parentItem.thumb item.fanart = self.parentItem.fanart item.isGeoLocked = True images = result_set.get("images", []) if images: # mgid:file:gsp:scenic:/international/mtv.nl/playplex/dutch-ridiculousness/Dutch_Ridiculousness_Landscape.png # http://playplex.mtvnimages.com/uri/mgid:file:gsp:scenic:/international/mtv.nl/playplex/dutch-ridiculousness/Dutch_Ridiculousness_Landscape.png for image in images: if image["width"] > 500: pass # no fanart here else: item.thumb = "http://playplex.mtvnimages.com/uri/%(url)s" % image date = result_set.get("originalAirDate", None) if not date: date = result_set.get("originalPublishDate", None) if date: time_stamp = date["timestamp"] date_time = DateHelper.get_date_from_posix(time_stamp) item.set_date(date_time.year, date_time.month, date_time.day, date_time.hour, date_time.minute, date_time.second) return item
def __init__(self, title, thumbnail_url, preview_youtube_url, actors, release_date, imdb_url): MediaItem.__init__(self, title, thumbnail_url, preview_youtube_url); self.actors = actors; self.release_date = release_date; self.imdb_url = imdb_url;
def add_live_channels_and_folders(self, data): """ Performs pre-process actions for data processing. Accepts an data from the process_folder_list method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ Logger.info("Generating Live channels") items = [] live_channels = [ { "name": "BBC 1 HD", "code": "bbc_one_hd", "image": "bbc1large.png" }, { "name": "BBC 2 HD", "code": "bbc_two_hd", "image": "bbc2large.png" }, { "name": "BBC 3 HD", "code": "bbc_three_hd", "image": "bbc3large.png" }, { "name": "BBC 4 HD", "code": "bbc_four_hd", "image": "bbc4large.png" }, { "name": "CBBC", "code": "cbbc_hd", "image": "cbbclarge.png" }, { "name": "CBeebies", "code": "cbeebies_hd", "image": "cbeebieslarge.png" }, { "name": "BBC News Channel", "code": "bbc_news24", "image": "bbcnewslarge.png" }, { "name": "BBC Parliament", "code": "bbc_parliament", "image": "bbcparliamentlarge.png" }, { "name": "Alba", "code": "bbc_alba", "image": "bbcalbalarge.png" }, { "name": "S4C", "code": "s4cpbs", "image": "bbchdlarge.png" }, { "name": "BBC One London", "code": "bbc_one_london", "image": "bbchdlarge.png" }, { "name": "BBC One Scotland", "code": "bbc_one_scotland_hd", "image": "bbchdlarge.png" }, { "name": "BBC One Northern Ireland", "code": "bbc_one_northern_ireland_hd", "image": "bbchdlarge.png" }, { "name": "BBC One Wales", "code": "bbc_one_wales_hd", "image": "bbchdlarge.png" }, { "name": "BBC Two Scotland", "code": "bbc_two_scotland", "image": "bbchdlarge.png" }, { "name": "BBC Two Northern Ireland", "code": "bbc_two_northern_ireland_digital", "image": "bbchdlarge.png" }, { "name": "BBC Two Wales", "code": "bbc_two_wales_digital", "image": "bbchdlarge.png" }, ] live = MediaItem("Live Channels", "") live.dontGroup = True live.type = "folder" items.append(live) for channel in live_channels: url = "http://a.files.bbci.co.uk/media/live/manifesto/audio_video/simulcast/hds/uk/pc/ak/%(code)s.f4m" % channel item = MediaItem(channel["name"], url) item.isGeoLocked = True item.isLive = True item.type = "video" item.complete = False item.thumb = self.get_image_location(channel["image"]) live.items.append(item) extra = MediaItem("Shows (A-Z)", "#alphalisting") extra.complete = True extra.icon = self.icon extra.thumb = self.noImage extra.description = "Alphabetical show listing of BBC shows" extra.dontGroup = True extra.set_date(2200, 1, 1, text="") items.append(extra) return data, items
def StievieCreateEpgItems(self, epg): Logger.Trace(epg) Logger.Debug("Processing EPG for channel %s", epg["id"]) items = [] summerTime = time.localtime().tm_isdst now = datetime.datetime.now() for resultSet in epg["items"]: # if not resultSet["parentSeriesOID"]: # continue # Does not always work # videoId = resultSet["epgId"].replace("-", "_") # url = "https://vod.medialaan.io/vod/v2/videos/%s_Stievie_free" % (videoId, ) videoId = resultSet["programOID"] url = "https://vod.medialaan.io/vod/v2/videos?episodeIds=%s&limit=10&offset=0&sort=broadcastDate&sortDirection=asc" % ( videoId, ) title = resultSet["title"] if resultSet["episode"] and resultSet["season"]: title = "%s - s%02de%02d" % (title, resultSet["season"], resultSet["episode"]) if "startTime" in resultSet and resultSet["startTime"]: dateTime = resultSet["startTime"] dateValue = DateHelper.GetDateFromString( dateTime, dateFormat="%Y-%m-%dT%H:%M:%S.000Z") # Convert to Belgium posix time stamp dateValue2 = time.mktime(dateValue) + (1 + summerTime) * 60 * 60 # Conver the posix to a time stamp startTime = DateHelper.GetDateFromPosix(dateValue2) title = "%02d:%02d - %s" % (startTime.hour, startTime.minute, title) # Check for items in their black-out period if "blackout" in resultSet and resultSet["blackout"]["enabled"]: blackoutDuration = resultSet["blackout"]["duration"] blackoutStart = startTime + datetime.timedelta( seconds=blackoutDuration) if blackoutStart < now: Logger.Debug( "Found item in Black-out period: %s (started at %s)", title, blackoutStart) continue # else: # startTime = self.parentItem.metaData["airDate"] item = MediaItem(title, url) item.type = "video" item.isGeoLocked = resultSet["geoblock"] item.description = resultSet["shortDescription"] # item.SetDate(startTime.year, startTime.month, startTime.day) if "images" in resultSet and resultSet[ "images"] and "styles" in resultSet["images"][0]: images = resultSet["images"][0]["styles"] # if "1520x855" in images: # item.fanart = images["1520x855"] if "400x225" in images: item.thumb = images["400x225"] items.append(item) return items
def add_categories_and_specials(self, data): """ Performs pre-process actions for data processing. Accepts an data from the process_folder_list method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). :param str data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ Logger.info("Performing Pre-Processing") items = [] extras = { LanguageHelper.get_localized_string(LanguageHelper.Search): ("searchSite", None, False), LanguageHelper.get_localized_string(LanguageHelper.TvShows): ("http://webapi.tv4play.se/play/programs?is_active=true&platform=tablet" "&per_page=1000&fl=nid,name,program_image,is_premium,updated_at,channel&start=0", None, False) } # Channel 4 specific items if self.channelCode == "tv4se": extras.update({ LanguageHelper.get_localized_string(LanguageHelper.Categories): ("http://webapi.tv4play.se/play/categories.json", None, False), LanguageHelper.get_localized_string(LanguageHelper.MostViewedEpisodes): ("http://webapi.tv4play.se/play/video_assets/most_viewed?type=episode" "&platform=tablet&is_live=false&per_page=%s&start=0" % (self.maxPageSize, ), None, False), }) # if self.loggedOn: # extras.update({ # "\a.: Favoriter :.": ( # "http://www.tv4play.se/program/favourites", # None, True # ), # }) today = datetime.datetime.now() days = [ LanguageHelper.get_localized_string(LanguageHelper.Monday), LanguageHelper.get_localized_string(LanguageHelper.Tuesday), LanguageHelper.get_localized_string(LanguageHelper.Wednesday), LanguageHelper.get_localized_string(LanguageHelper.Thursday), LanguageHelper.get_localized_string(LanguageHelper.Friday), LanguageHelper.get_localized_string(LanguageHelper.Saturday), LanguageHelper.get_localized_string(LanguageHelper.Sunday) ] for i in range(0, 7, 1): start_date = today - datetime.timedelta(i) end_date = start_date + datetime.timedelta(1) day = days[start_date.weekday()] if i == 0: day = LanguageHelper.get_localized_string( LanguageHelper.Today) elif i == 1: day = LanguageHelper.get_localized_string( LanguageHelper.Yesterday) Logger.trace("Adding item for: %s - %s", start_date, end_date) # Old URL: # url = "http://webapi.tv4play.se/play/video_assets?exclude_node_nids=" \ # "nyheterna,v%C3%A4der,ekonomi,lotto,sporten,nyheterna-blekinge,nyheterna-bor%C3%A5s," \ # "nyheterna-dalarna,nyheterna-g%C3%A4vle,nyheterna-g%C3%B6teborg,nyheterna-halland," \ # "nyheterna-helsingborg,nyheterna-j%C3%B6nk%C3%B6ping,nyheterna-kalmar,nyheterna-link%C3%B6ping," \ # "nyheterna-lule%C3%A5,nyheterna-malm%C3%B6,nyheterna-norrk%C3%B6ping,nyheterna-skaraborg," \ # "nyheterna-skellefte%C3%A5,nyheterna-stockholm,nyheterna-sundsvall,nyheterna-ume%C3%A5," \ # "nyheterna-uppsala,nyheterna-v%C3%A4rmland,nyheterna-v%C3%A4st,nyheterna-v%C3%A4ster%C3%A5s," \ # "nyheterna-v%C3%A4xj%C3%B6,nyheterna-%C3%B6rebro,nyheterna-%C3%B6stersund,tv4-tolken," \ # "fotbollskanalen-europa" \ # "&platform=tablet&per_page=32&is_live=false&product_groups=2&type=episode&per_page=100" url = "http://webapi.tv4play.se/play/video_assets?exclude_node_nids=" \ "&platform=tablet&per_page=32&is_live=false&product_groups=2&type=episode&per_page=100" url = "%s&broadcast_from=%s&broadcast_to=%s&" % ( url, start_date.strftime("%Y%m%d"), end_date.strftime("%Y%m%d")) extras[day] = (url, start_date, False) extras[LanguageHelper.get_localized_string( LanguageHelper.CurrentlyPlayingEpisodes )] = ( "http://webapi.tv4play.se/play/video_assets?exclude_node_nids=&platform=tablet&" "per_page=32&is_live=true&product_groups=2&type=episode&per_page=100", None, False) for name in extras: title = name url, date, is_live = extras[name] item = MediaItem(title, url) item.dontGroup = True item.complete = True item.thumb = self.noImage item.HttpHeaders = self.httpHeaders item.isLive = is_live if date is not None: item.set_date(date.year, date.month, date.day, 0, 0, 0, text=date.strftime("%Y-%m-%d")) else: item.set_date(1901, 1, 1, 0, 0, 0, text="") items.append(item) if not self.channelCode == "tv4se": return data, items # Add Live TV # live = MediaItem("\a.: Live-TV :.", # "http://tv4events1-lh.akamaihd.net/i/EXTRAEVENT5_1@324055/master.m3u8", # type="video") # live.dontGroup = True # # live.isDrmProtected = True # live.isGeoLocked = True # live.isLive = True # items.append(live) Logger.debug("Pre-Processing finished") return data, items
def AddLiveChannel(self, data): Logger.Info("Performing Pre-Processing") # if self.channelCode != "vtm": # return data, [] username = AddonSettings.GetSetting("mediaan_username") if not username: return data, [] items = [] if self.channelCode == "vtm": item = MediaItem("Live VTM", "#livestream") else: item = MediaItem("Live Q2", "#livestream") item.type = "video" item.isLive = True item.fanart = self.fanart item.thumb = self.noImage now = datetime.datetime.now() item.SetDate(now.year, now.month, now.day, now.hour, now.minute, now.second) items.append(item) if self.channelCode == "vtm": recent = MediaItem( "\a.: Recent :.", "https://vtm.be/video/volledige-afleveringen/id") item.fanart = self.fanart item.thumb = self.noImage item.dontGroup = True items.append(recent) Logger.Debug("Pre-Processing finished") return data, items
def pre_process_folder_list(self, data): """ Performs pre-process actions for data processing. Accepts an data from the process_folder_list method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). :param str|unicode data: The retrieve data that was loaded for the current item and URL. :return: A tuple of the data and a list of MediaItems that were generated. :rtype: tuple[str|JsonHelper,list[MediaItem]] """ Logger.info("Performing Pre-Processing") items = [] # Add a klip folder only on the first page and only if it is not already a clip page if "type=clip" not in self.parentItem.url \ and "&page=1&" in self.parentItem.url \ and "node_nids=" in self.parentItem.url: # get the category ID cat_start = self.parentItem.url.rfind("node_nids=") cat_id = self.parentItem.url[cat_start + 10:] Logger.debug("Currently doing CatId: '%s'", cat_id) url = "http://webapi.tv4play.se/play/video_assets?platform=tablet&per_page=%s&" \ "type=clip&page=1&node_nids=%s&start=0" % (self.maxPageSize, cat_id,) clips_title = LanguageHelper.get_localized_string( LanguageHelper.Clips) clips = MediaItem(clips_title, url) clips.icon = self.icon clips.thumb = self.noImage clips.complete = True items.append(clips) # find the max number of items ("total_hits":2724) total_items = int(Regexer.do_regex(r'total_hits\W+(\d+)', data)[-1]) Logger.debug("Found total of %s items. Only showing %s.", total_items, self.maxPageSize) if total_items > self.maxPageSize and "&page=1&" in self.parentItem.url: # create a group item more_title = LanguageHelper.get_localized_string( LanguageHelper.MorePages) more = MediaItem(more_title, "") more.icon = self.icon more.thumb = self.noImage more.complete = True items.append(more) # what are the total number of pages? current_page = 1 total_pages = int(math.ceil(1.0 * total_items / self.maxPageSize)) current_url = self.parentItem.url needle = "&page=" while current_page < total_pages: # what is the current page current_page += 1 url = current_url.replace("%s1" % (needle, ), "%s%s" % (needle, current_page)) Logger.debug("Adding next page: %s\n%s", current_page, url) page = MediaItem(str(current_page), url) page.icon = self.icon page.thumb = self.noImage page.type = "page" page.complete = True if total_pages == 2: items = [page] break else: more.items.append(page) Logger.debug("Pre-Processing finished") return data, items
def StievieChannelMenu(self, data): items = [] live = MediaItem("Live %s" % (self.parentItem.name, ), "#livestream") live.isLive = True live.type = "video" live.description = self.parentItem.description live.metaData = self.parentItem.metaData live.thumb = self.parentItem.thumb items.append(live) if not self.__dashStreamsSupported: # Only list the channel content if DASH is supported return data, items # https://epg.medialaan.io/epg/v2/schedule?date=2017-04-25&channels%5B%5D=vtm&channels%5B%5D=2be&channels%5B%5D=vitaya&channels%5B%5D=caz&channels%5B%5D=kzoom&channels%5B%5D=kadet&channels%5B%5D=qmusic # https://epg.medialaan.io/epg/v2/schedule?date=2017-04-25&channels[]=vtm&channels[]=2be&channels[]=vitaya&channels[]=caz&channels[]=kzoom&channels[]=kadet&channels[]=qmusic # https://epg.medialaan.io/epg/v2/schedule?date=2017-05-04&channels[]=vtm&channels[]=2be&channels[]=vitaya&channels[]=caz&channels[]=kzoom&channels[]=kadet&channels[]=qmusic channelId = self.parentItem.metaData["channelId"] channels = (channelId, ) query = "&channels%%5B%%5D=%s" % ("&channels%5B%5D=".join(channels), ) today = datetime.datetime.now() days = [ "Maandag", "Dinsdag", "Woensdag", "Donderdag", "Vrijdag", "Zaterdag", "Zondag" ] for i in range(0, 7, 1): airDate = today - datetime.timedelta(i) Logger.Trace("Adding item for: %s", airDate) day = days[airDate.weekday()] if i == 0: day = "Vandaag" elif i == 1: day = "Gisteren" elif i == 2: day = "Eergisteren" title = "%04d-%02d-%02d - %s" % (airDate.year, airDate.month, airDate.day, day) url = "https://epg.medialaan.io/epg/v2/schedule?date=%d-%02d-%02d%s" % ( airDate.year, airDate.month, airDate.day, query) extra = MediaItem(title, url) extra.complete = True extra.icon = self.icon extra.thumb = self.noImage extra.dontGroup = True extra.SetDate(airDate.year, airDate.month, airDate.day, text="") extra.metaData["airDate"] = airDate items.append(extra) return data, items