def OnActionFromContextMenu(self, action): """Peforms the action from a custom contextmenu Arguments: action : String - The name of the method to call """ Logger.Debug("Performing Custom Contextmenu command: %s", action) item = Pickler.DePickleMediaItem(self.params[self.keywordPickle]) if not item.complete: Logger.Debug("The contextmenu action requires a completed item. Updating %s", item) item = self.channelObject.ProcessVideoItem(item) if not item.complete: Logger.Warning("UpdateVideoItem returned an item that had item.complete = False:\n%s", item) # invoke functionString = "returnItem = self.channelObject.%s(item)" % (action,) Logger.Debug("Calling '%s'", functionString) try: exec functionString except: Logger.Error("OnActionFromContextMenu :: Cannot execute '%s'.", functionString, exc_info=True) return
def AddClips(self, data): Logger.Info("Adding Clips Pre-Processing") items = [] # if the main list was retrieve using json, are the current data is json, just determine # the clip URL clipUrl = None if data.lstrip().startswith("{"): if self.parentItem.url.endswith("type=program"): # http://playapi.mtgx.tv/v3/videos?format=6723&order=-airdate&type=program # http://playapi.mtgx.tv/v3/videos?format=6723&order=-updated&type=clip" % (dataId,) clipUrl = self.parentItem.url.replace("type=program", "type=clip") else: # now we determine the ID and load the json data dataId = Regexer.DoRegex('data-format-id="(\d+)"', data)[-1] Logger.Debug("Found FormatId = %s", dataId) programUrl = "http://playapi.mtgx.tv/v3/videos?format=%s&order=-airdate&type=program" % ( dataId, ) data = UriHandler.Open(programUrl, proxy=self.proxy) clipUrl = "http://playapi.mtgx.tv/v3/videos?format=%s&order=-updated&type=clip" % ( dataId, ) if clipUrl is not None: clipTitle = LanguageHelper.GetLocalizedString(LanguageHelper.Clips) clipItem = mediaitem.MediaItem("\a.: %s :." % (clipTitle, ), clipUrl) clipItem.thumb = self.noImage items.append(clipItem) Logger.Debug("Pre-Processing finished") return data, items
def GetProxyForChannel(channelInfo): """ returns the proxy for a specific channel Arguments: channelInfo : ChannelInfo - The channel to get proxy info for """ proxies = AddonSettings.GetProxyGroupIds(asCountryCodes=True) proxyId = AddonSettings.GetProxyIdForChannel(channelInfo) if proxyId == 0: Logger.Debug("No proxy configured for %s", channelInfo) return None prefix = proxies[proxyId] server = AddonSettings.__GetSetting("%s_proxy_server" % (prefix, )) port = int( AddonSettings.__GetSetting("%s_proxy_port" % (prefix, )) or 0) proxyType = AddonSettings.__GetSetting("%s_proxy_type" % (prefix, )) or "http" username = AddonSettings.__GetSetting("%s_proxy_username" % (prefix, )) password = AddonSettings.__GetSetting("%s_proxy_password" % (prefix, )) pInfo = ProxyInfo(server, port, scheme=proxyType.lower(), username=username, password=password) Logger.Debug("Found proxy for channel %s:\n%s", channelInfo, pInfo) return pInfo
def CreatePageItem(self, resultSet): """Creates a MediaItem of type 'page' using the resultSet from the regex. Arguments: resultSet : tuple(string) - the resultSet of the self.pageNavigationRegex Returns: A new MediaItem of type 'page' This method creates a new MediaItem from the Regular Expression or Json results <resultSet>. The method should be implemented by derived classes and are specific to the channel. """ Logger.Debug("Starting CreatePageItem") Logger.Trace(resultSet) url = resultSet["href"] page = url.rsplit("=", 1)[-1] item = mediaitem.MediaItem(page, url) item.type = "page" Logger.Debug("Created '%s' for url %s", item.name, item.url) return item
def CreatePageItem(self, resultSet): """Creates a MediaItem of type 'page' using the resultSet from the regex. Arguments: resultSet : tuple(string) - the resultSet of the self.pageNavigationRegex Returns: A new MediaItem of type 'page' This method creates a new MediaItem from the Regular Expression or Json results <resultSet>. The method should be implemented by derived classes and are specific to the channel. """ Logger.Debug("Starting CreatePageItem") total = '' for result in resultSet: total = "%s%s" % (total, result) total = htmlentityhelper.HtmlEntityHelper.StripAmp(total) if not self.pageNavigationRegexIndex == '': item = mediaitem.MediaItem(resultSet[self.pageNavigationRegexIndex], urlparse.urljoin(self.baseUrl, total)) else: item = mediaitem.MediaItem("0", "") item.type = "page" item.fanart = self.fanart item.HttpHeaders = self.httpHeaders Logger.Debug("Created '%s' for url %s", item.name, item.url) return item
def UpdateVideoItemLive(self, item): """Updates an existing MediaItem with more data. Arguments: item : MediaItem - the MediaItem that needs to be updated Returns: The original item with more data added to it's properties. Used to update none complete MediaItems (self.complete = False). This could include opening the item's URL to fetch more data and then process that data or retrieve it's real media-URL. The method should at least: * cache the thumbnail to disk (use self.noImage if no thumb is available). * set at least one MediaItemPart with a single MediaStream. * set self.complete = True. if the returned item does not have a MediaItemPart then the self.complete flag will automatically be set back to False. """ Logger.Debug('Starting UpdateVideoItem: %s', item.name) item.MediaItemParts = [] part = item.CreateNewEmptyMediaPart() referer = {"referer": self.baseUrlLive} streams = NpoStream.GetLiveStreamsFromNpo(item.url, Config.cacheDir, proxy=self.proxy, headers=referer) if streams: Logger.Debug("Found live stream urls from item url") for s, b in streams: item.complete = True part.AppendMediaStream(s, b) else: # we need to determine radio or live tv Logger.Debug("Fetching live stream data from item url") htmlData = UriHandler.Open(item.url, proxy=self.proxy) mp3Urls = Regexer.DoRegex("""data-streams='{"url":"([^"]+)","codec":"[^"]+"}'""", htmlData) if len(mp3Urls) > 0: Logger.Debug("Found MP3 URL") part.AppendMediaStream(mp3Urls[0], 192) else: jsonUrl = item.url if not item.url.startswith("http://e.omroep.nl/metadata/"): Logger.Debug("Finding the actual metadata url from %s", item.url) jsonUrls = Regexer.DoRegex('<div class="video-player-container"[^>]+data-prid="([^"]+)"', htmlData) jsonUrl = None for url in jsonUrls: jsonUrl = "http://e.omroep.nl/metadata/%s" % (url,) for s, b in NpoStream.GetLiveStreamsFromNpo(jsonUrl, Config.cacheDir, proxy=self.proxy, headers=referer): item.complete = True part.AppendMediaStream(s, b) item.complete = True # Logger.Trace(item) return item
def CorrectFileName(self, filename): """Corrects a filename to prevent XFAT issues and other folder issues Arguments: filename : string - the original filename Returns: a filename that is save for the the XFAT and other file systems. """ original = filename # filter out the chars that are not allowed filename = re.sub(self.inValidCharacters, "", filename) # and check for length on Xbox if self.maxFileNameLength and len( filename) > self.maxFileNameLength: Logger.Debug( "Making sure the file lenght does not exceed the maximum allowed on Xbox" ) (base, ext) = os.path.splitext(filename) baseLength = self.maxFileNameLength - len(ext) # regex = "^.{1,%s}" % (baseLength) # base = re.compile(regex).findall(base)[-1] if len(base) > baseLength: base = base[0:baseLength - 1] filename = "%s%s" % (base, ext) Logger.Debug("Corrected from '%s' to '%s'", original, filename) return filename
def NoNickJr(self, data): """Performs pre-process actions for data processing/ Arguments: data : string - the retrieve data that was loaded for the current item and URL. Returns: A tuple of the data and a list of MediaItems that were generated. Accepts an data from the ProcessFolderList method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). """ Logger.Info("Performing Pre-Processing") items = [] end = data.find("<h2 class='row-title'>Nick Jr") Logger.Debug("Pre-Processing finished") if end > 0: Logger.Debug("Nick Jr content found starting at %d", end) return data[:end], items return data, items
def GetLocalIPHeaderForChannel(channelInfo): """ returns the local IP for a specific channel Arguments: channelInfo : ChannelInfo - The channel to get proxy info for """ countries = AddonSettings.GetAvailableCountries(asCountryCodes=True) settingId = AddonSettings.__LOCAL_IP_SETTING_PATTERN % ( channelInfo.guid, ) countryId = int(AddonSettings.GetSetting(settingId) or 0) if countryId == 0: Logger.Debug("No Local IP configured for %s", channelInfo) return None prefix = countries[countryId] Logger.Debug("Country settings '%s' configured for Local IP for %s", prefix, channelInfo) server = AddonSettings.GetSetting("%s_local_ip" % (prefix, )) if not server: Logger.Debug("No Local IP found for country '%s'", prefix) return None Logger.Debug("Found Local IP for channel %s:\nLocal IP: %s", channelInfo, server) return {"X-Forwarded-For": server}
def PreProcessFolderList(self, data): """Performs pre-process actions for data processing/ Arguments: data : string - the retrieve data that was loaded for the current item and URL. Returns: A tuple of the data and a list of MediaItems that were generated. Accepts an data from the ProcessFolderList method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). """ Logger.Info("Performing Pre-Processing") items = [] if "episode.json" in self.parentItem.url: Logger.Debug("Fetching Carousel data") json = JsonHelper(data) data = json.GetValue("carousel") Logger.Debug("Pre-Processing finished") return data, items
def AddPageItems(self, data): """ Adds page items to the main listing @param data: the Parsed Data @return: a tuple of data and items """ Logger.Info("Performing Pre-Processing") items = [] json = JsonHelper(data) totalResults = json.GetValue("totalResults") fromValue = json.GetValue("from") sizeValue = json.GetValue("size") if fromValue + sizeValue < totalResults: morePages = LanguageHelper.GetLocalizedString( LanguageHelper.MorePages) url = self.parentItem.url.split('?')[0] url = "%s?size=%s&from=%s&sort=Nieuwste" % (url, sizeValue, fromValue + sizeValue) Logger.Debug("Adding next-page item from %s to %s", fromValue + sizeValue, fromValue + sizeValue + sizeValue) nextPage = mediaitem.MediaItem(morePages, url) nextPage.icon = self.parentItem.icon nextPage.fanart = self.parentItem.fanart nextPage.thumb = self.parentItem.thumb nextPage.dontGroup = True items.append(nextPage) Logger.Debug("Pre-Processing finished") return json, items
def ListSomeVideos(self, data): """ If there was a Lastest section in the data return those video files """ items = [] if not self.__showSomeVideosInListing: return data, items jsonData = JsonHelper(data) sections = jsonData.GetValue("relatedVideoContent", "relatedVideosAccordion") sections = filter(lambda s: s['type'] not in self.__excludedTabs, sections) Logger.Debug("Found %s folders/tabs", len(sections)) if len(sections) == 1: # we should exclude that tab from the folders list and show the videos here self.__listedRelatedTab = sections[0]["type"] # otherwise the default "RELATED_VIDEO_TABS_LATEST" is used Logger.Debug("Excluded tab '%s' which will be show as videos", self.__listedRelatedTab) for section in sections: if not section["type"] == self.__listedRelatedTab: continue for videoData in section['videos']: items.append(self.CreateJsonItem(videoData)) return data, items
def UpdateVideoItem(self, item): """ Accepts an item. It returns an updated item. Usually retrieves the MediaURL and the Thumb! It should return a completed item. """ Logger.Debug('Starting UpdateVideoItem for %s (%s)', item.name, self.channelName) if not item.url.endswith("m3u8"): data = UriHandler.Open(item.url, proxy=self.proxy) jsonData = Regexer.DoRegex(self.mediaUrlRegex, data) if not jsonData: Logger.Error("Cannot find JSON stream info.") return item json = JsonHelper(jsonData[0]) Logger.Trace(json.json) stream = json.GetValue("source", "hls") Logger.Debug("Found HLS: %s", stream) else: stream = item.url part = item.CreateNewEmptyMediaPart() for s, b in M3u8.GetStreamsFromM3u8(stream, self.proxy): item.complete = True # s = self.GetVerifiableVideoUrl(s) part.AppendMediaStream(s, b) # var playerConfig = {"id":"mediaplayer","width":"100%","height":"100%","autostart":"false","image":"http:\/\/www.ketnet.be\/sites\/default\/files\/thumb_5667ea22632bc.jpg","brand":"ketnet","source":{"hls":"http:\/\/vod.stream.vrt.be\/ketnet\/_definst_\/mp4:ketnet\/2015\/12\/Ben_ik_familie_van_R001_A0023_20151208_143112_864.mp4\/playlist.m3u8"},"analytics":{"type_stream":"vod","playlist":"Ben ik familie van?","program":"Ben ik familie van?","episode":"Ben ik familie van?: Warre - Aflevering 3","parts":"1","whatson":"270157835527"},"title":"Ben ik familie van?: Warre - Aflevering 3","description":"Ben ik familie van?: Warre - Aflevering 3"} return item
def AddSearchAndGenres(self, data): """Performs pre-process actions for data processing, in this case adding a search Arguments: data : string - the retrieve data that was loaded for the current item and URL. Returns: A tuple of the data and a list of MediaItems that were generated. Accepts an data from the ProcessFolderList method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). """ Logger.Info("Performing Pre-Processing") items = [] if self.parentItem is not None and "genre" in self.parentItem.metaData: self.__genre = self.parentItem.metaData["genre"] Logger.Debug("Parsing a specific genre: %s", self.__genre) return data, items searchItem = mediaitem.MediaItem("\a.: Sök :.", "searchSite") searchItem.complete = True searchItem.thumb = self.noImage searchItem.dontGroup = True searchItem.fanart = self.fanart # searchItem.SetDate(2099, 1, 1, text="") # -> No items have dates, so adding this will force a date sort in Retrospect items.append(searchItem) genresItem = mediaitem.MediaItem("\a.: Genrer :.", "") genresItem.complete = True genresItem.thumb = self.noImage genresItem.dontGroup = True genresItem.fanart = self.fanart items.append(genresItem) # find the actual genres genreRegex = '<li[^>]+genre[^>]*><button[^>]+data-value="(?<genre>[^"]+)"[^>]*>(?<title>[^>]+)</button></li>' genreRegex = Regexer.FromExpresso(genreRegex) genres = Regexer.DoRegex(genreRegex, data) for genre in genres: if genre["genre"] == "all": continue genreItem = mediaitem.MediaItem(genre["title"], self.mainListUri) genreItem.complete = True genreItem.thumb = self.noImage genreItem.fanart = self.fanart genreItem.metaData = {"genre": genre["genre"]} genresItem.items.append(genreItem) Logger.Debug("Pre-Processing finished") return data, items
def __UpdateItemFromVideoReferences(self, item, videos, subtitles=None): item.MediaItemParts = [] part = item.CreateNewEmptyMediaPart() if self.localIP: part.HttpHeaders.update(self.localIP) for video in videos: videoFormat = video.get("format", "") if not videoFormat: videoFormat = video.get("playerType", "") videoFormat = videoFormat.lower() if "dash" in videoFormat or "hds" in videoFormat: Logger.Debug("Skipping video format: %s", videoFormat) continue Logger.Debug("Found video item for format: %s", videoFormat) url = video['url'] if len(filter(lambda s: s.Url == url, part.MediaStreams)) > 0: Logger.Debug("Skippping duplicate Stream url: %s", url) continue if "m3u8" in url: altIndex = url.find("m3u8?") # altIndex = videoUrl.find("~uri") if altIndex > 0: url = url[0:altIndex + 4] for s, b in M3u8.GetStreamsFromM3u8(url, proxy=self.proxy, headers=part.HttpHeaders): part.AppendMediaStream(s, b) elif video["url"].startswith("rtmp"): # just replace some data in the URL part.AppendMediaStream(self.GetVerifiableVideoUrl(video["url"]).replace("_definst_", "?slist="), video[1]) else: part.AppendMediaStream(url, 0) if subtitles: Logger.Info("Found subtitles to play") for sub in subtitles: subFormat = sub["format"].lower() url = sub["url"] if subFormat == "websrt": subUrl = url # elif subFormat == "webvtt": # Logger.Info("Found M3u8 subtitle, replacing with WSRT") # start, name, index = sub[-1].rsplit("/", 2) # subUrl = "%s/%s/%s.wsrt" % (start, name, name) else: # look for more continue part.Subtitle = subtitlehelper.SubtitleHelper.DownloadSubtitle(subUrl, format="srt", proxy=self.proxy) # stop when finding one break item.complete = True return item
def StievieCreateEpgItems(self, epg): Logger.Trace(epg) Logger.Debug("Processing EPG for channel %s", epg["id"]) items = [] summerTime = time.localtime().tm_isdst now = datetime.datetime.now() for resultSet in epg["items"]: # if not resultSet["parentSeriesOID"]: # continue # Does not always work # videoId = resultSet["epgId"].replace("-", "_") # url = "https://vod.medialaan.io/vod/v2/videos/%s_Stievie_free" % (videoId, ) videoId = resultSet["programOID"] url = "https://vod.medialaan.io/vod/v2/videos?episodeIds=%s&limit=10&offset=0&sort=broadcastDate&sortDirection=asc" % (videoId, ) title = resultSet["title"] if resultSet["episode"] and resultSet["season"]: title = "%s - s%02de%02d" % (title, resultSet["season"], resultSet["episode"]) if "startTime" in resultSet and resultSet["startTime"]: dateTime = resultSet["startTime"] dateValue = DateHelper.GetDateFromString(dateTime, dateFormat="%Y-%m-%dT%H:%M:%S.000Z") # Convert to Belgium posix time stamp dateValue2 = time.mktime(dateValue) + (1 + summerTime) * 60 * 60 # Conver the posix to a time stamp startTime = DateHelper.GetDateFromPosix(dateValue2) title = "%02d:%02d - %s" % (startTime.hour, startTime.minute, title) # Check for items in their black-out period if "blackout" in resultSet and resultSet["blackout"]["enabled"]: blackoutDuration = resultSet["blackout"]["duration"] blackoutStart = startTime + datetime.timedelta(seconds=blackoutDuration) if blackoutStart < now: Logger.Debug("Found item in Black-out period: %s (started at %s)", title, blackoutStart) continue # else: # startTime = self.parentItem.metaData["airDate"] item = MediaItem(title, url) item.type = "video" item.isGeoLocked = resultSet["geoblock"] item.description = resultSet["shortDescription"] # item.SetDate(startTime.year, startTime.month, startTime.day) if "images" in resultSet and resultSet["images"] and "styles" in resultSet["images"][0]: images = resultSet["images"][0]["styles"] # if "1520x855" in images: # item.fanart = images["1520x855"] if "400x225" in images: item.thumb = images["400x225"] items.append(item) return items
def GetStreamsFromM3u8(url, proxy=None, headers=None, appendQueryString=False): """ Parsers standard M3U8 lists and returns a list of tuples with streams and bitrates that can be used by other methods. @param headers: (dict) Possible HTTP Headers @param proxy: (Proxy) The proxy to use for opening @param url: (String) The url to download @param appendQueryString: (boolean) should the existing query string be appended? Can be used like this: part = item.CreateNewEmptyMediaPart() for s, b in M3u8.GetStreamsFromM3u8(m3u8Url, self.proxy): item.complete = True # s = self.GetVerifiableVideoUrl(s) part.AppendMediaStream(s, b) """ streams = [] data = UriHandler.Open(url, proxy, additionalHeaders=headers) Logger.Trace(data) qs = None if appendQueryString and "?" in url: base, qs = url.split("?", 1) Logger.Info("Going to append QS: %s", qs) Logger.Debug("Processing M3U8 Streams: %s", url) needle = "BANDWIDTH=(\d+)\d{3}[^\n]*\W+([^\n]+.m3u8[^\n\r]*)" needles = Regexer.DoRegex(needle, data) baseUrlLogged = False baseUrl = url[:url.rindex("/")] for n in needles: # see if we need to append a server path Logger.Trace(n) if "://" not in n[1]: if not baseUrlLogged: Logger.Debug("Using baseUrl %s for M3u8", baseUrl) baseUrlLogged = True stream = "%s/%s" % (baseUrl, n[1]) else: if not baseUrlLogged: Logger.Debug("Full url found in M3u8") baseUrlLogged = True stream = n[1] bitrate = n[0] if qs is not None and stream.endswith("?null="): stream = stream.replace("?null=", "?%s" % (qs, )) elif qs is not None: stream = "%s?%s" % (stream, qs) streams.append((stream, bitrate)) Logger.Debug("Found %s substreams in M3U8", len(streams)) return streams
def DoRegex(regex, data): """Performs a regular expression Arguments: regex : string - the regex to perform on the data. data : string - the data to perform the regex on. Returns: A list of matches that came from the regex.findall method. Performs a regular expression findall on the <data> and returns the results that came from the method. From the sre.py library: If one or more groups are present in the pattern, return a list of groups; this will be a list of tuples if the pattern has more than one group. Empty matches are included in the result. """ try: if not isinstance(regex, (tuple, list)): if "?P<" in regex: return Regexer.__DoDictionaryRegex(regex, data) else: return Regexer.__DoRegex(regex, data) else: Logger.Debug("Performing multi-regex find on '%s'", regex) results = [] count = 0 for r in regex: if "?P<" in r: regexResults = Regexer.__DoDictionaryRegex(r, data) # add to the results with a count in front of the results results += map(lambda x: (count, x), regexResults) else: regexResults = Regexer.__DoRegex(r, data) if len(regexResults) > 0: if isinstance(regexResults[0], (tuple, list)): # is a tupe/list was returned, prepend it with the count results += map(lambda x: (count, ) + x, regexResults) else: # create a tuple with the results results += map(lambda x: (count, x), regexResults) # increase count count += 1 Logger.Debug("Returning %s results", len(results)) return results except: Logger.Critical('error regexing', exc_info=True) return []
def http_response(self, request, response): # @UnusedVariables Logger.Trace("Processing HTTP response for possible decompression") # Logger.Trace("%s\n%s", response.url, response.info()) oldResponse = response # do the decompression contentEncoding = response.headers.get("content-encoding") if contentEncoding: responseEncoding = contentEncoding data = response.read() try: if "gzip" in contentEncoding: Logger.Debug("Decompressing '%s' response", contentEncoding) # the GzipFileReader expect a StringIO object gzipStream = StringIO(data) fileStream = gzip.GzipFile(fileobj=gzipStream) responseEncoding = "none" elif "deflate" in contentEncoding: Logger.Debug("Decompressing '%s' response", contentEncoding) fileStream = StringIO(zlib.decompress(data)) responseEncoding = "none" elif contentEncoding == "none": Logger.Debug( "Nothing to decompress. Content-encoding: '%s'", contentEncoding) # we have already used the response.read() so we need to create # a new filestream with the original data in it. fileStream = StringIO(data) else: Logger.Warning("Unknown Content-Encoding: '%s'", contentEncoding) # we have already used the response.read() so we need to create # a new filestream with the original data in it. fileStream = StringIO(data) except: Logger.Error("Cannot Decompress this response", exc_info=True) # we have already used the response.read() so we need to create # a new filestream with the original data in it. fileStream = StringIO(data) response = urllib2.addinfourl(fileStream, oldResponse.headers, oldResponse.url, oldResponse.code) response.msg = oldResponse.msg # Update the content-encoding header response.headers["content-encoding"] = responseEncoding return response else: Logger.Debug("No Content-Encoding header found") return oldResponse
def ChangePin(self, applicationKey=None): # type: (str) -> bool """ Stores an existing ApplicationKey using a new PIN @param applicationKey: an existing ApplicationKey that will be stored. If none specified, the existing ApplicationKey of the Vault will be used. @return: indication of success """ Logger.Info("Updating the ApplicationKey with a new PIN") if self.__newKeyGeneratedInConstructor: Logger.Info("A key was just generated, no need to change PINs.") return True if applicationKey is None: Logger.Debug("Using the ApplicationKey from the vault.") applicationKey = Vault.__Key else: Logger.Debug("Using the ApplicationKey from the input parameter.") if not applicationKey: raise ValueError("No ApplicationKey specified.") # Now we get a new PIN and (re)encrypt pin = XbmcWrapper.ShowKeyBoard( heading=LanguageHelper.GetLocalizedString(LanguageHelper.VaultNewPin), hidden=True) if not pin: XbmcWrapper.ShowNotification( "", LanguageHelper.GetLocalizedString(LanguageHelper.VaultNoPin), XbmcWrapper.Error) return False pin2 = XbmcWrapper.ShowKeyBoard( heading=LanguageHelper.GetLocalizedString(LanguageHelper.VaultRepeatPin), hidden=True) if pin != pin2: Logger.Critical("Mismatch in PINs") XbmcWrapper.ShowNotification( "", LanguageHelper.GetLocalizedString(LanguageHelper.VaultPinsDontMatch), XbmcWrapper.Error) return False encryptedKey = "%s=%s" % (self.__APPLICATION_KEY_SETTING, applicationKey) # let's generate a pin using the scrypt password-based key derivation pinKey = self.__GetPBK(pin) encryptedKey = self.__Encrypt(encryptedKey, pinKey) AddonSettings.SetSetting(Vault.__APPLICATION_KEY_SETTING, encryptedKey) Logger.Info("Successfully updated the Retrospect PIN") return True
def ShowFavourites(self, channel, replaceExisting=False): """ Show the favourites Arguments: channel : Channel - The channel to show favourites for. Might be None to show all. Keyword Arguments: replaceExisting : boolean - if True it will replace the current list """ Logger.Debug("Plugin::ShowFavourites") if channel is None: Logger.Info("Showing all favourites") else: Logger.Info("Showing favourites for: %s", channel) stopWatch = stopwatch.StopWatch("Plugin Favourites timer", Logger.Instance()) try: ok = True f = Favourites(Config.favouriteDir) favs = f.List(channel) # get (actionUrl, pickle) tuples # favs = map(lambda (a, p): (a, Pickler.DePickleMediaItem(p)), favs) if len(favs) == 0: ok = self.__ShowEmptyInformation(favs, favs=True) stopWatch.Lap("Items retrieved") # create the XBMC items xbmcItems = map(lambda item: self.__ConvertMainlistItemToXbmcItem(channel, item[1], True, item[0]), favs) stopWatch.Lap("%s items for Kodi generated" % (len(xbmcItems),)) # add them to XBMC ok = ok and xbmcplugin.addDirectoryItems(self.handle, xbmcItems, len(xbmcItems)) # add sort handle, but don't use any dates as they make no sense for favourites self.__AddSortMethodToHandle(self.handle) # set the content xbmcplugin.setContent(handle=self.handle, content=self.contentType) # make sure we do not cache this one to disc! xbmcplugin.endOfDirectory(self.handle, succeeded=ok, updateListing=replaceExisting, cacheToDisc=False) stopWatch.Lap("items send to Kodi") Logger.Debug("Plugin::Favourites completed. Returned %s item(s)", len(favs)) stopWatch.Stop() except: XbmcWrapper.ShowNotification(LanguageHelper.GetLocalizedString(LanguageHelper.ErrorId), LanguageHelper.GetLocalizedString(LanguageHelper.ErrorList), XbmcWrapper.Error, 4000) Logger.Error("Plugin::Error parsing favourites", exc_info=True) xbmcplugin.endOfDirectory(self.handle, False)
def GetChannel(self, className, channelCode): """ Fetches a single channel for a given className and channelCode If updated channels are found, the those channels are indexed and the channel index is rebuild. @param className: the chn_<name> class name @param channelCode: a possible channel code within the channel set @return: a ChannelInfo object """ channelSet = self.__channelIndex[self.__CHANNEL_INDEX_CHANNEL_KEY].get(className, None) if channelSet is None: Logger.Error("Could not find info for channelClass '%s'.", className) return None channelSetInfoPath = channelSet[self.__CHANNEL_INDEX_CHANNEL_INFO_KEY] channelSetVersion = channelSet[self.__CHANNEL_INDEX_CHANNEL_VERSION_KEY] if not os.path.isfile(channelSetInfoPath) and not self.__reindexed: Logger.Warning("Missing channelSet file: %s.", channelSetInfoPath) self.__RebuildIndex() return self.GetChannel(className, channelCode) channelInfos = ChannelInfo.FromJson(channelSetInfoPath, channelSetVersion) if channelCode is None: channelInfos = filter(lambda ci: ci.channelCode is None, channelInfos) else: channelInfos = filter(lambda ci: ci.channelCode == channelCode, channelInfos) if len(channelInfos) != 1: Logger.Error("Found none or more than 1 matches for '%s' and '%s' in the channel index.", className, channelCode or "None") return None else: Logger.Debug("Found single channel in the channel index: %s.", channelInfos[0]) if self.__IsChannelSetUpdated(channelInfos[0]): # let's see if the index has already been updated this section, of not, do it and # restart the ChannelRetrieval. if not self.__reindexed: # rebuild and restart Logger.Warning("Re-index channel index due to channelSet update: %s.", channelSetInfoPath) self.__RebuildIndex() else: Logger.Warning("Found updated channelSet: %s.", channelSetInfoPath) # new we should init all channels by loading them all, just to be shure that all is ok Logger.Debug("Going to fetching all channels to init them all.") self.GetChannels() return self.GetChannel(className, channelCode) return channelInfos[0].GetChannel()
def __FetchActualStream(idaData, proxy): actualStreamJson = JsonHelper(idaData, Logger.Instance()) m3u8Url = actualStreamJson.GetValue('stream') Logger.Debug("Fetching redirected stream for: %s", m3u8Url) # now we have the m3u8 URL, but it will do a HTML 302 redirect (headData, m3u8Url) = UriHandler.Header(m3u8Url, proxy=proxy) # : @UnusedVariables Logger.Debug("Found redirected stream: %s", m3u8Url) return m3u8Url
def __UpdateFromUrl(self, url, zipName): """ Update a channel from an URL @param url: The url to download @param zipName: The name to give the download """ Logger.Info("Going to update from %s", url) # wrapper = XbmcDialogProgressWrapper("Updating XOT", url) # destFilename = UriHandler.Download(url, zipName, Config.cacheDir, wrapper.ProgressUpdate) destFilename = UriHandler.Download(url, zipName, Config.cacheDir, self.__RetrieveProgressDummy) Logger.Debug("Download succeeded: %s", destFilename) # we extract to the deploy folder, so with the first start of XOT, the new channel is deployed deployDir = os.path.abspath(os.path.join(Config.rootDir, "deploy")) zipFile = zipfile.ZipFile(destFilename) # now extract first = True Logger.Debug("Extracting %s to %s", destFilename, deployDir) for name in zipFile.namelist(): if first: folder = os.path.split(name)[0] if os.path.exists(os.path.join(deployDir, folder)): shutil.rmtree(os.path.join(deployDir, folder)) first = False if not name.endswith("/") and not name.endswith("\\"): fileName = os.path.join(deployDir, name) path = os.path.dirname(fileName) if not os.path.exists(path): os.makedirs(path) Logger.Debug("Extracting %s", fileName) outfile = open(fileName, 'wb') outfile.write(zipFile.read(name)) outfile.close() zipFile.close() os.remove(destFilename) Logger.Info("Update completed and zip file (%s) removed", destFilename) message = LanguageHelper.GetLocalizedString( LanguageHelper.UpdateCompleteId, splitOnPipes=False) % (zipName.replace(".zip", ""), ) message = message.split("|") XbmcWrapper.ShowNotification(LanguageHelper.GetLocalizedString( LanguageHelper.RestartId), message, displayTime=5000, logger=Logger.Instance())
def PreProcessFolderList(self, data): """Performs pre-process actions for data processing/ Arguments: data : string - the retrieve data that was loaded for the current item and URL. Returns: A tuple of the data and a list of MediaItems that were generated. Accepts an data from the ProcessFolderList method, BEFORE the items are processed. Allows setting of parameters (like title etc) for the channel. Inside this method the <data> could be changed and additional items can be created. The return values should always be instantiated in at least ("", []). """ Logger.Info("Performing Pre-Processing") items = [] if "live=1" in self.parentItem.url: # don't add folders, this should no longer be the case as we use AJAX pages now. start = data.find('<div class="svtUnit svtNth-1">') end = data.find('<div class="playBoxContainer playBroadcastItemLast">') Logger.Debug("Stripping folders for live items") return data[start:end], items # if "=klipp" in self.parentItem.url: # self.pageNavigationRegex = self.pageNavigationRegexBase % ("klipp", ) # elif "tab=news" in self.parentItem.url: # self.pageNavigationRegex = self.pageNavigationRegexBase % ("news", ) # else: # self.pageNavigationRegex = self.pageNavigationRegexBase % ("program", ) # Logger.Debug("PageNav Regex set to: %s", self.pageNavigationRegex) end = data.find('<div id="playJs-videos-in-same-category" ') Logger.Debug("Stripping from position: %s", end) data = data[:end] if '<a href="?tab=klipp"' in data and self.parentItem.name != self.__klippName: klippItem = mediaitem.MediaItem(self.__klippName, self.parentItem.url.replace("tab=program", self.__klippUrlIndicator)) klippItem.icon = self.icon klippItem.thumb = self.parentItem.thumb klippItem.complete = True items.append(klippItem) Logger.Debug("Pre-Processing finished") return data, items
def UpdateVideoItemLive(self, item): """Updates an existing MediaItem with more data. Arguments: item : MediaItem - the MediaItem that needs to be updated Returns: The original item with more data added to it's properties. Used to update none complete MediaItems (self.complete = False). This could include opening the item's URL to fetch more data and then process that data or retrieve it's real media-URL. The method should at least: * cache the thumbnail to disk (use self.noImage if no thumb is available). * set at least one MediaItemPart with a single MediaStream. * set self.complete = True. if the returned item does not have a MediaItemPart then the self.complete flag will automatically be set back to False. """ Logger.Debug('Starting UpdateVideoItem: %s', item.name) item.MediaItemParts = [] part = item.CreateNewEmptyMediaPart() # we need to determine radio or live tv Logger.Debug("Fetching live stream data from item url: %s", item.url) htmlData = UriHandler.Open(item.url, proxy=self.proxy) mp3Urls = Regexer.DoRegex("""data-streams='{"url":"([^"]+)","codec":"[^"]+"}'""", htmlData) if len(mp3Urls) > 0: Logger.Debug("Found MP3 URL") part.AppendMediaStream(mp3Urls[0], 192) else: Logger.Debug("Finding the actual metadata url from %s", item.url) # NPO3 normal stream had wrong subs if "npo-3" in item.url and False: # NPO3 has apparently switched the normal and hearing impaired streams? jsonUrls = Regexer.DoRegex('<div class="video-player-container"[^>]+data-alt-prid="([^"]+)"', htmlData) else: jsonUrls = Regexer.DoRegex('<npo-player media-id="([^"]+)"', htmlData) for episodeId in jsonUrls: return self.__UpdateVideoItem(item, episodeId) Logger.Warning("Cannot update live item: %s", item) return item item.complete = True return item
def GetRegister(): """Returns the current active channel register. Used for backward compatibility with Xbox. """ if not ChannelImporter.__channelImporter: Logger.Debug("Creating a new ChannelImporter") ChannelImporter() Logger.Debug("Fetching an existing channelImporter: %s", ChannelImporter.__channelImporter) return ChannelImporter.__channelImporter
def GetRegister(): """ Returns the current active channel register. Used for backward compatibility with Xbox. """ if not ChannelIndex.__channelIndexer: Logger.Debug("Creating a new ChannelIndex-er.") ChannelIndex.__channelIndexer = ChannelIndex() else: Logger.Debug("Fetching an existing %s.", ChannelIndex.__channelIndexer) return ChannelIndex.__channelIndexer
def GetXBMCPlayListItem(self, parent, bitrate=None, name=None, updateItemUrls=False): """Returns a XBMC List Item than can be played or added to an XBMC PlayList. Arguments: parent : MediaItem - the parent MediaItem Keyword Arguments: quality : [opt] integer - The quality of the requested XBMC PlayListItem streams. name : [opt] string - If set, it overrides the original name of the MediaItem. updateItemUrls : [opt] boolean - If set, the xbmc items will have a path that corresponds with the actual stream. Returns: A tuple with (stream url, XBMC PlayListItem). The XBMC PlayListItem can be used to add to a XBMC Playlist. The stream url can be used to set as the stream for the PlayListItem using xbmc.PlayList.add() If quality is not specified the quality is retrieved from the add-on settings. """ if not name: Logger.Debug("Creating XBMC ListItem '%s'", self.Name) else: Logger.Debug("Creating XBMC ListItem '%s'", name) item = parent.GetXBMCItem(name=name) if not bitrate: bitrate = AddonSettings.GetMaxStreamBitrate() for prop in self.Properties: Logger.Trace("Adding property: %s", prop) item.setProperty(prop[0], prop[1]) # now find the correct quality stream stream = self.GetMediaStreamForBitrate(bitrate) # if self.UserAgent and "|User-Agent" not in stream.Url: # url = "%s|User-Agent=%s" % (stream.Url, htmlentityhelper.HtmlEntityHelper.UrlEncode(self.UserAgent)) # stream.Url = url if updateItemUrls: Logger.Info("Updating xbmc playlist-item path: %s", stream.Url) item.setProperty("path", stream.Url) return stream, item
def GetXBMCPlayListItem(self, parent, bitrate, updateItemUrls=False): """Returns a XBMC List Item than can be played or added to an XBMC PlayList. Arguments: parent : MediaItem - the parent MediaItem bitrate: integer - the bitrate for the list items Keyword Arguments: quality : [opt] integer - The quality of the requested XBMC PlayListItem streams. updateItemUrls : [opt] boolean - If set, the xbmc items will have a path that corresponds with the actual stream. Returns: A tuple with (stream url, XBMC PlayListItem). The XBMC PlayListItem can be used to add to a XBMC Playlist. The stream url can be used to set as the stream for the PlayListItem using xbmc.PlayList.add() If quality is not specified the quality is retrieved from the add-on settings. """ if self.Name: Logger.Debug("Creating XBMC ListItem '%s'", self.Name) item = parent.GetXBMCItem(name=self.Name) else: Logger.Debug("Creating XBMC ListItem '%s'", parent.name) item = parent.GetXBMCItem() if not bitrate: raise ValueError("Bitrate not specified") for prop in self.Properties: Logger.Trace("Adding property: %s", prop) item.setProperty(prop[0], prop[1]) # now find the correct quality stream and set the properties if there are any stream = self.GetMediaStreamForBitrate(bitrate) for prop in stream.Properties: Logger.Trace("Adding stream property: %s", prop) item.setProperty(prop[0], prop[1]) if updateItemUrls: Logger.Info("Updating xbmc playlist-item path: %s", stream.Url) item.setProperty("path", stream.Url) return stream, item