コード例 #1
0
    def UpdateVideoItem(self, item):
        Logger.Debug('Starting UpdateVideoItem for %s (%s)', item.name, self.channelName)

        # we need to fetch the actual url as it might differ for single video items
        data, secureUrl = UriHandler.Header(item.url, proxy=self.proxy)

        secureUrl = secureUrl.rstrip("/")
        secureUrl = "%s.mssecurevideo.json" % (secureUrl, )
        data = UriHandler.Open(secureUrl, proxy=self.proxy, additionalHeaders=item.HttpHeaders)
        secureData = JsonHelper(data, logger=Logger.Instance())
        mzid = secureData.GetValue(secureData.json.keys()[0], "videoid")
        assetUrl = "https://mediazone.vrt.be/api/v1/vrtvideo/assets/%s" % (mzid, )
        data = UriHandler.Open(assetUrl, proxy=self.proxy)
        assetData = JsonHelper(data, logger=Logger.Instance())

        for streamData in assetData.GetValue("targetUrls"):
            if streamData["type"] != "HLS":
                continue

            part = item.CreateNewEmptyMediaPart()
            for s, b, a in M3u8.GetStreamsFromM3u8(streamData["url"], self.proxy, mapAudio=True):
                item.complete = True
                if a:
                    audioPart = a.rsplit("-", 1)[-1]
                    audioPart = "-%s" % (audioPart, )
                    s = s.replace(".m3u8", audioPart)
                # s = self.GetVerifiableVideoUrl(s)
                part.AppendMediaStream(s, b)
        return item
コード例 #2
0
    def ParseMainList(self, returnData=False):
        """Parses the mainlist of the channel and returns a list of MediaItems

        This method creates a list of MediaItems that represent all the different
        programs that are available in the online source. The list is used to fill
        the ProgWindow.

        Keyword parameters:
        returnData : [opt] boolean - If set to true, it will return the retrieved
                                     data as well

        Returns a list of MediaItems that were retrieved.

        """

        items = []
        if len(self.mainListItems) > 1:
            if returnData:
                return self.mainListItems, ""
            else:
                return self.mainListItems

        data = UriHandler.Open(self.mainListUri, proxy=self.proxy, additionalHeaders=self.httpHeaders)
        Logger.Trace("Retrieved %s chars as mainlist data", len(data))

        # first process folder items.
        watch = stopwatch.StopWatch('Mainlist', Logger.Instance())

        episodeItems = []
        if not self.episodeItemRegex == "" and self.episodeItemRegex is not None:
            Logger.Trace("Using Regexer for episodes")
            episodeItems = Regexer.DoRegex(self.episodeItemRegex, data)
            watch.Lap("Mainlist Regex complete")

        elif self.episodeItemJson is not None:
            Logger.Trace("Using JsonHelper for episodes")
            json = JsonHelper(data, Logger.Instance())
            episodeItems = json.GetValue(*self.episodeItemJson)
            watch.Lap("Mainlist Json complete")

        Logger.Debug('Starting CreateEpisodeItem for %s items', len(episodeItems))
        for episodeItem in episodeItems:
            Logger.Trace('Starting CreateEpisodeItem for %s', self.channelName)
            tmpItem = self.CreateEpisodeItem(episodeItem)
            # catch the return of None
            if tmpItem:
                items.append(tmpItem)

        # Filter out the duplicates using the HASH power of a set
        items = list(set(items))

        watch.Lap("MediaItem creation complete")
        self.mainListItems = items

        if returnData:
            return items, data
        else:
            return items
コード例 #3
0
    def UpdateVideoItem(self, item):
        """
        Updates the item
        """

        data = UriHandler.Open(item.url, proxy=self.proxy)

        baseEncode = Regexer.DoRegex(self.mediaUrlRegex, data)[-1]
        jsonData = EncodingHelper().DecodeBase64(baseEncode)
        json = JsonHelper(jsonData, logger=Logger.Instance())
        Logger.Trace(json)

        # "flv": "http://media.dumpert.nl/flv/e2a926ff_10307954_804223649588516_151552487_n.mp4.flv",
        # "tablet": "http://media.dumpert.nl/tablet/e2a926ff_10307954_804223649588516_151552487_n.mp4.mp4",
        # "mobile": "http://media.dumpert.nl/mobile/e2a926ff_10307954_804223649588516_151552487_n.mp4.mp4",

        item.MediaItemParts = []
        part = item.CreateNewEmptyMediaPart()
        streams = json.GetValue()
        for key in streams:
            if key == "flv":
                part.AppendMediaStream(streams[key], 1000)
            elif key == "tablet":
                part.AppendMediaStream(streams[key], 800)
            elif key == "mobile":
                part.AppendMediaStream(streams[key], 450)
            else:
                Logger.Debug("Key '%s' was not used", key)

        item.complete = True
        Logger.Trace("VideoItem updated: %s", item)
        return item
コード例 #4
0
    def UpdateVideoApiItem(self, item):
        """ Updates an existing MediaItem with more data.

        Arguments:
        item : MediaItem - the MediaItem that needs to be updated
        date : String    - the json content of the item's URL

        Returns:
        The original item with more data added to it's properties.

        Used to update none complete MediaItems (self.complete = False). This
        could include opening the item's URL to fetch more data and then process that
        data or retrieve it's real media-URL.

        The method should at least:
        * cache the thumbnail to disk (use self.noImage if no thumb is available).
        * set at least one MediaItemPart with a single MediaStream.
        * set self.complete = True.

        if the returned item does not have a MediaItemPart then the self.complete flag
        will automatically be set back to False.

        """
        Logger.Debug('Starting UpdateChannelItem for %s (%s)', item.name, self.channelName)

        data = UriHandler.Open(item.url, proxy=self.proxy)

        json = JsonHelper(data, logger=Logger.Instance())
        videos = json.GetValue("videoReferences")
        subtitles = json.GetValue("subtitleReferences")
        Logger.Trace(videos)
        return self.__UpdateItemFromVideoReferences(item, videos, subtitles)
コード例 #5
0
def run_libc_tests() -> List[LibCTest]:
    """ run the libc tests and get result summary 

    run all unit tests based on rules configured in configuration file. All test results 
    will be logged. At the end there will be summary for failed/success/excluded/run tests.
    """
    candidates, selected_tests = get_libc_test_candidates()
    records = []
    # TODO: optimize for parallel execution
    failures = []
    for name, path in candidates.items():
        records.append(f"{name}\t:{path}")
        libc_test = LibCTest(name, path)
        libc_test.run_test()
        if not libc_test.success:
            failures.append(libc_test)

    message = []
    message.append("LibC Tests Running Summary:")
    message.append(f"Testing Mode - INCLUDE")
    message.append(
        f"Success/Fail: {len(candidates)-len(failures)}/{len(failures)} out of {len(candidates)} Tests."
    )
    message.append("\nList of LibC Tests Run:\n{}".format("\n".join(
        sorted(records))))
    if failures:
        message.append("\nList of Failed LibC Tests:\n{}".format("\n".join([
            f"{ut.name}\t:{ut.path}"
            for ut in sorted(failures, key=lambda t: t.name)
        ])))

    logger = Logger.Instance()
    logger.log_test_summary('\n'.join(message))

    return failures
コード例 #6
0
    def UpdateVideoItem(self, item):
        """Updates an existing MediaItem with more data.

        Arguments:
        item : MediaItem - the MediaItem that needs to be updated

        Returns:
        The original item with more data added to it's properties.

        Used to update none complete MediaItems (self.complete = False). This
        could include opening the item's URL to fetch more data and then process that
        data or retrieve it's real media-URL.

        The method should at least:
        * cache the thumbnail to disk (use self.noImage if no thumb is available).
        * set at least one MediaItemPart with a single MediaStream.
        * set self.complete = True.

        if the returned item does not have a MediaItemPart then the self.complete flag
        will automatically be set back to False.

        """

        Logger.Debug('Starting UpdateVideoItem for %s (%s)', item.name,
                     self.channelName)

        videoId = item.url[item.url.rfind("/") + 1:]

        url = "http://embed.kijk.nl/?width=868&height=491&video=%s" % (
            videoId, )
        referer = "http://www.kijk.nl/video/%s" % (videoId, )

        # now the mediaurl is derived. First we try WMV
        data = UriHandler.Open(url, proxy=self.proxy, referer=referer)
        Logger.Trace(self.mediaUrlRegex)
        objectData = Regexer.DoRegex(self.mediaUrlRegex, data)[0]
        Logger.Trace(objectData)

        # seed = "61773bc7479ab4e69a5214f17fd4afd21fe1987a"
        # seed = "0a2b91ec0fdb48c5dd5239d3e796d6f543974c33"
        seed = "0b0234fa8e2435244cdb1603d224bb8a129de5c1"
        amfHelper = BrightCove(Logger.Instance(), objectData[0], objectData[1],
                               url,
                               seed)  # , proxy=ProxyInfo("localhost", 8888)
        item.description = amfHelper.GetDescription()

        part = item.CreateNewEmptyMediaPart()
        for stream, bitrate in amfHelper.GetStreamInfo():
            if "m3u8" in stream:
                for s, b in M3u8.GetStreamsFromM3u8(stream, self.proxy):
                    item.complete = True
                    # s = self.GetVerifiableVideoUrl(s)
                    part.AppendMediaStream(s, b)
            part.AppendMediaStream(stream.replace("&mp4:", ""), bitrate)

        item.complete = True
        return item
コード例 #7
0
    def __init__(self, name: str, path: str):
        """
        constructor function. 

        """
        self.path, self.name, self.success = path, name, False
        self.target_suffix = f"-C {path}"
        self.logger = Logger.Instance()
        super().__init__()
コード例 #8
0
        def __init__(self,
                     cacheDir=None,
                     useCompression=True,
                     webTimeOut=30,
                     maxFileNameLength=None,
                     blockSize=4096,
                     cookieJar=None,
                     ignoreSslErrors=False):
            """Initialises the UriHandler class

            Keyword Arguments:
            @param blockSize:         integer - the size of download blocks.
            @param maxFileNameLength: integer - the max filename length (should be 42 on Xbox)
            @param cacheDir:          string  - a path for http caching. If specified, caching will be used.
            @param useCompression:    boolean - Indicates whether compression is supported or not.
            @param webTimeOut:        integer - timeout for requests in seconds
            @param cookieJar:         string  - the path to the cookie jar (in case of file storage)

            """
            if cookieJar:
                self.cookieJar = cookielib.MozillaCookieJar(cookieJar)
                if not os.path.isfile(cookieJar):
                    # noinspection PyUnresolvedReferences
                    self.cookieJar.save()
                # noinspection PyUnresolvedReferences
                self.cookieJar.load()
                self.cookieJarFile = True
            else:
                self.cookieJar = cookielib.CookieJar()
                self.cookieJarFile = False

            # set caching stuff
            if cacheDir:
                cachePath = os.path.join(cacheDir, "www")
                self.cacheStore = filecache.FileCache(cachePath,
                                                      logger=Logger.Instance())
            self.useCaching = cacheDir is not None
            self.useCompression = useCompression
            self.maxFileNameLength = maxFileNameLength

            self.blockSize = blockSize
            self.__bytesToMB = 1048576
            self.inValidCharacters = "[^a-zA-Z0-9!#$%&'()-.@\[\]^_`{}]"
            Logger.Info(
                "UriHandler initialised [useCompression=%s, useCaching=%s]",
                self.useCompression, self.useCaching)

            # self.timerTimeOut = 2.0                       # used for the emergency canceler

            self.webTimeOutInterval = webTimeOut  # max duration of request
            self.pollInterval = 0.1  # time between polling of activity
            self.dnsCache = {"localhost": "127.0.0.1"}  # init with localhost

            self.ignoreSslErrors = ignoreSslErrors  # ignore SSL errors
            if self.ignoreSslErrors:
                Logger.Warning("Ignoring all SSL errors in Python")
コード例 #9
0
    def UpdateVideoItem(self, item):
        """
        Accepts an item. It returns an updated item. Usually retrieves the MediaURL 
        and the Thumb! It should return a completed item. 
        """
        Logger.Debug('Starting UpdateVideoItem for %s (%s)', item.name, self.channelName)
        
        # get additional info
        data = UriHandler.Open(item.url, proxy=self.proxy)
        guid = Regexer.DoRegex('<meta property="og:video" content="http://player.extreme.com/FCPlayer.swf\?id=([^&]+)&amp[^"]+" />', data)

        #<param name="flashvars" value="id=dj0xMDEzNzQyJmM9MTAwMDAwNA&amp;tags=source%253Dfreecaster&amp;autoplay=1" />
        # http://freecaster.tv/player/smil/dj0xMDEzNzQyJmM9MTAwMDAwNA -> playlist with bitrate
        # http://freecaster.tv/player/smil/dj0xMDEzNzQyJmM9MTAwMDAwNA -> info (not needed, get description from main page.

        if len(guid) > 0:
            url = '%s/player/smil/%s' % (self.baseUrl, guid[0],) 
            data = UriHandler.Open(url)

            smiller = Smil(data)
            baseUrl = smiller.GetBaseUrl()
            urls = smiller.GetVideosAndBitrates()

            part = item.CreateNewEmptyMediaPart()
            for url in urls:
                if "youtube" in url[0]:
                    for s, b in YouTube.GetStreamsFromYouTube(url[0], self.proxy):
                        item.complete = True
                        part.AppendMediaStream(s, b)
                else:
                    part.AppendMediaStream("%s%s" % (baseUrl, url[0]), bitrate=int(int(url[1]) / 1000))
                item.complete = True

            Logger.Trace("UpdateVideoItem complete: %s", item)
            return item

        # Try the brightcove
        brightCoveRegex = '<object id="myExperience[\w\W]+?videoPlayer" value="(\d+)"[\w\W]{0,1000}?playerKey" value="([^"]+)'
        brightCoveData = Regexer.DoRegex(brightCoveRegex, data)
        Logger.Trace(brightCoveData)
        if len(brightCoveData) > 0:
            seed = "c5f9ae8729f7054d43187989ef3421531ee8678d"
            objectData = brightCoveData[0]
            # from proxyinfo import ProxyInfo
            playerKey = str(objectData[1])
            videoId = int(objectData[0])

            part = item.CreateNewEmptyMediaPart()
            # But we need the IOS streams!
            amfHelper = BrightCove(Logger.Instance(), playerKey, videoId, str(item.url), seed, proxy=self.proxy)
            for stream, bitrate in amfHelper.GetStreamInfo(renditions="IOSRenditions"):
                part.AppendMediaStream(stream, bitrate)

        # Logger.Error("Cannot find GUID in url: %s", item.url)
        return item
コード例 #10
0
    def ShowFavourites(self, channel, replaceExisting=False):
        """ Show the favourites

        Arguments:
        channel : Channel - The channel to show favourites for. Might be None to show all.

        Keyword Arguments:
        replaceExisting : boolean - if True it will replace the current list

        """
        Logger.Debug("Plugin::ShowFavourites")

        if channel is None:
            Logger.Info("Showing all favourites")
        else:
            Logger.Info("Showing favourites for: %s", channel)
        stopWatch = stopwatch.StopWatch("Plugin Favourites timer", Logger.Instance())

        try:
            ok = True
            f = Favourites(Config.favouriteDir)
            favs = f.List(channel)

            # get (actionUrl, pickle) tuples
            # favs = map(lambda (a, p): (a, Pickler.DePickleMediaItem(p)), favs)
            if len(favs) == 0:
                ok = self.__ShowEmptyInformation(favs, favs=True)

            stopWatch.Lap("Items retrieved")

            # create the XBMC items
            xbmcItems = map(lambda item: self.__ConvertMainlistItemToXbmcItem(channel, item[1],
                                                                              True, item[0]), favs)
            stopWatch.Lap("%s items for Kodi generated" % (len(xbmcItems),))

            # add them to XBMC
            ok = ok and xbmcplugin.addDirectoryItems(self.handle, xbmcItems, len(xbmcItems))
            # add sort handle, but don't use any dates as they make no sense for favourites
            self.__AddSortMethodToHandle(self.handle)

            # set the content
            xbmcplugin.setContent(handle=self.handle, content=self.contentType)
            # make sure we do not cache this one to disc!
            xbmcplugin.endOfDirectory(self.handle, succeeded=ok, updateListing=replaceExisting, cacheToDisc=False)
            stopWatch.Lap("items send to Kodi")

            Logger.Debug("Plugin::Favourites completed. Returned %s item(s)", len(favs))
            stopWatch.Stop()
        except:
            XbmcWrapper.ShowNotification(LanguageHelper.GetLocalizedString(LanguageHelper.ErrorId),
                                         LanguageHelper.GetLocalizedString(LanguageHelper.ErrorList),
                                         XbmcWrapper.Error, 4000)
            Logger.Error("Plugin::Error parsing favourites", exc_info=True)
            xbmcplugin.endOfDirectory(self.handle, False)
コード例 #11
0
    def __FetchActualStream(idaData, proxy):
        actualStreamJson = JsonHelper(idaData, Logger.Instance())
        m3u8Url = actualStreamJson.GetValue('stream')
        Logger.Debug("Fetching redirected stream for: %s", m3u8Url)

        # now we have the m3u8 URL, but it will do a HTML 302 redirect
        (headData,
         m3u8Url) = UriHandler.Header(m3u8Url,
                                      proxy=proxy)  # : @UnusedVariables

        Logger.Debug("Found redirected stream: %s", m3u8Url)
        return m3u8Url
コード例 #12
0
ファイル: updater.py プロジェクト: normico21/repository.xvbmc
    def __UpdateFromUrl(self, url, zipName):
        """ Update a channel from an URL

        @param url:     The url to download
        @param zipName: The name to give the download

        """

        Logger.Info("Going to update from %s", url)
        # wrapper = XbmcDialogProgressWrapper("Updating XOT", url)
        # destFilename = UriHandler.Download(url, zipName, Config.cacheDir, wrapper.ProgressUpdate)
        destFilename = UriHandler.Download(url, zipName, Config.cacheDir,
                                           self.__RetrieveProgressDummy)
        Logger.Debug("Download succeeded: %s", destFilename)

        # we extract to the deploy folder, so with the first start of XOT, the new channel is deployed
        deployDir = os.path.abspath(os.path.join(Config.rootDir, "deploy"))
        zipFile = zipfile.ZipFile(destFilename)

        # now extract
        first = True
        Logger.Debug("Extracting %s to %s", destFilename, deployDir)
        for name in zipFile.namelist():
            if first:
                folder = os.path.split(name)[0]
                if os.path.exists(os.path.join(deployDir, folder)):
                    shutil.rmtree(os.path.join(deployDir, folder))
                first = False

            if not name.endswith("/") and not name.endswith("\\"):
                fileName = os.path.join(deployDir, name)
                path = os.path.dirname(fileName)
                if not os.path.exists(path):
                    os.makedirs(path)
                Logger.Debug("Extracting %s", fileName)
                outfile = open(fileName, 'wb')
                outfile.write(zipFile.read(name))
                outfile.close()

        zipFile.close()
        os.remove(destFilename)
        Logger.Info("Update completed and zip file (%s) removed", destFilename)

        message = LanguageHelper.GetLocalizedString(
            LanguageHelper.UpdateCompleteId,
            splitOnPipes=False) % (zipName.replace(".zip", ""), )
        message = message.split("|")
        XbmcWrapper.ShowNotification(LanguageHelper.GetLocalizedString(
            LanguageHelper.RestartId),
                                     message,
                                     displayTime=5000,
                                     logger=Logger.Instance())
コード例 #13
0
    def FromJson(path, version="x.x.x.x"):
        channelInfos = []
        # Logger.Trace("Using JSON reader for %s", path)

        jsonFile = open(path)
        jsonData = jsonFile.read()
        jsonFile.close()

        json = JsonHelper(jsonData, logger=Logger.Instance())
        channels = json.GetValue("channels")

        if "settings" in json.json:
            settings = json.GetValue("settings")
        else:
            settings = []
        Logger.Debug("Found %s channels and %s settings in %s", len(channels), len(settings), path)

        for channel in channels:
            channelInfo = ChannelInfo(channel["guid"],
                                      channel["name"],
                                      channel["description"],
                                      channel["icon"],
                                      channel["category"],
                                      path,

                                      # none required items
                                      channel.get("channelcode", None),
                                      channel.get("sortorder", 255),
                                      channel.get("language", None),
                                      eval(channel.get("compatible", "Environments.All")),
                                      channel.get("fanart", None))
            channelInfo.firstTimeMessage = channel.get("message", None)
            # Disable spoofing for the moment
            # channelInfo.localIPSupported = channel.get("localIPSupported", False)
            channelInfo.settings = settings
            channelInfo.version = version

            # validate a bit
            if channelInfo.channelCode == "None":
                raise Exception("'None' as channelCode")
            if channelInfo.language == "None":
                raise Exception("'None' as language")

            channelInfos.append(channelInfo)

        return channelInfos
コード例 #14
0
    def ExtractCategoriesAndAddSearch(self, data):
        """ Extracts the Category information from the JSON data

        @param data: the JSON data
        @return: Unmodified JSON data
        """

        Logger.Info("Extracting Category Information")
        dummyData, items = self.AddSearch(data)

        json = JsonHelper(data, logger=Logger.Instance())
        categories = json.GetValue("context", "dispatcher", "stores",
                                   "ApplicationStore", "categories")
        for category in categories:
            self.__categories[category["id"]] = category

        Logger.Debug("Extracting Category Information finished")
        return data, items
コード例 #15
0
    def UpdateVideoItem(self, item):
        """
        Accepts an item. It returns an updated item. Usually retrieves the MediaURL
        and the Thumb! It should return a completed item.
        """
        Logger.Debug('Starting UpdateVideoItem for %s (%s)', item.name,
                     self.channelName)

        data = UriHandler.Open(item.url, proxy=self.proxy)
        javascriptUrls = Regexer.DoRegex(
            '<script type="text/javascript" src="(http://l1.bbvms.com/p/standaard/c/\d+.js)">',
            data)
        dataUrl = None
        for javascriptUrl in javascriptUrls:
            dataUrl = javascriptUrl

        if not dataUrl:
            return item

        data = UriHandler.Open(dataUrl, proxy=self.proxy)
        jsonData = Regexer.DoRegex(
            'clipData\W*:([\w\W]{0,10000}?\}),"playerWidth', data)
        Logger.Trace(jsonData)
        json = JsonHelper(jsonData[0], logger=Logger.Instance())
        Logger.Trace(json)

        streams = json.GetValue("assets")
        item.MediaItemParts = []
        part = item.CreateNewEmptyMediaPart()
        for stream in streams:
            url = stream.get("src", None)
            if "://" not in url:
                url = "http://static.l1.nl/bbw%s" % (url, )
            bitrate = stream.get("bandwidth", None)
            if url:
                part.AppendMediaStream(url, bitrate)

        if not item.thumb and json.GetValue("thumbnails"):
            url = json.GetValue("thumbnails")[0].get("src", None)
            if url and "http:/" not in url:
                url = "%s%s" % (self.baseUrl, url)
            item.thumb = url
        item.complete = True
        return item
コード例 #16
0
    def UpdateVideoItem(self, item):
        """Updates an existing MediaItem with more data.

        Arguments:
        item : MediaItem - the MediaItem that needs to be updated

        Returns:
        The original item with more data added to it's properties.

        Used to update none complete MediaItems (self.complete = False). This
        could include opening the item's URL to fetch more data and then process that
        data or retrieve it's real media-URL.

        The method should at least:
        * cache the thumbnail to disk (use self.noImage if no thumb is available).
        * set at least one MediaItemPart with a single MediaStream.
        * set self.complete = True.

        if the returned item does not have a MediaItemPart then the self.complete flag
        will automatically be set back to False.

        """

        Logger.Debug('Starting UpdateVideoItem for %s (%s)', item.name, self.channelName)

        if not self.loggedOn:
            Logger.Warning("Cannot log on")
            return None

        data = UriHandler.Open(item.url, proxy=self.proxy)
        dataRegex = "JSON\.parse\('([\w\W]+?)'\);\W+window\.media"
        videoData = Regexer.DoRegex(dataRegex, data)[0]
        # VTM has some strange escapes
        videoData = videoData.replace("\\\"", "\"")
        videoData = videoData.replace("\\\\", "\\")
        videoData = videoData.replace("\\'", "'")
        videoJson = JsonHelper(videoData, logger=Logger.Instance())

        # duration is not calculated correctly
        duration = videoJson.GetValue("videoConfig", "duration")
        item.SetInfoLabel("Duration", duration)

        return self.__UpdateVideoItem(item, videoJson.json["vodId"])
コード例 #17
0
    def UpdateVideoHtmlItem(self, item):
        """Updates an existing MediaItem with more data.

        Arguments:
        item : MediaItem - the MediaItem that needs to be updated

        Returns:
        The original item with more data added to it's properties.

        Used to update none complete MediaItems (self.complete = False). This
        could include opening the item's URL to fetch more data and then process that
        data or retrieve it's real media-URL.

        The method should at least:
        * cache the thumbnail to disk (use self.noImage if no thumb is available).
        * set at least one MediaItemPart with a single MediaStream.
        * set self.complete = True.

        if the returned item does not have a MediaItemPart then the self.complete flag
        will automatically be set back to False.

        """
        data = UriHandler.Open(item.url, proxy=self.proxy)
        # Logger.Trace(data)
        data = self.ExtractJsonData(data)[0]
        json = JsonHelper(data, logger=Logger.Instance())

        # check for direct streams:
        streams = json.GetValue("videoTitlePage", "video", "videoReferences")
        subtitles = json.GetValue("videoTitlePage", "video", "subtitles")

        if streams:
            Logger.Info("Found stream information within HTML data")
            return self.__UpdateItemFromVideoReferences(item, streams, subtitles)

        videoId = json.GetValue("videoPage", "video", "id")
        # in case that did not work, try the old version.
        if not videoId:
            videoId = json.GetValue("videoPage", "video", "programVersionId")
        if videoId:
            # item.url = "https://www.svt.se/videoplayer-api/video/%s" % (videoId, )
            item.url = "https://api.svt.se/videoplayer-api/video/%s" % (videoId, )
        return self.UpdateVideoApiItem(item)
コード例 #18
0
    def LogOn(self):
        if self.__idToken:
            return True

        # check if there is a refresh token
        # refresh token: viervijfzes_refresh_token
        refreshToken = AddonSettings.GetSetting("viervijfzes_refresh_token")
        client = AwsIdp("eu-west-1_dViSsKM5Y",
                        "6s1h851s8uplco5h6mqh1jac8m",
                        proxy=self.proxy,
                        logger=Logger.Instance())
        if refreshToken:
            idToken = client.RenewToken(refreshToken)
            if idToken:
                self.__idToken = idToken
                return True
            else:
                Logger.Info("Extending token for VierVijfZes failed.")

        # username: viervijfzes_username
        username = AddonSettings.GetSetting("viervijfzes_username")
        # password: viervijfzes_password
        v = Vault()
        password = v.GetSetting("viervijfzes_password")
        if not username or not password:
            XbmcWrapper.ShowDialog(
                title=None,
                lines=LanguageHelper.GetLocalizedString(
                    LanguageHelper.MissingCredentials),
            )
            return False

        idToken, refreshToken = client.Authenticate(username, password)
        if not idToken or not refreshToken:
            Logger.Error("Error getting a new token. Wrong password?")
            return False

        self.__idToken = idToken
        AddonSettings.SetSetting("viervijfzes_refresh_token", refreshToken)
        return True
コード例 #19
0
        def __init__(self,
                     cacheDir=None,
                     useCompression=True,
                     webTimeOut=30,
                     maxFileNameLength=None,
                     blockSize=4096):
            """Initialises the UriHandler class

            Keyword Arguments:
            @param blockSize:         integer - the size of download blocks.
            @param maxFileNameLength: integer - the max filename length (should be 42 on Xbox)
            @param cacheDir:          string  - a path for http caching. If specified, caching will be used.
            @param useCompression:    boolean - Indicates whether compression is supported or not.
            @param webTimeOut:        integer - timeout for requests in seconds

            """
            self.cookieJar = cookielib.CookieJar()

            # set caching stuff
            if cacheDir:
                cachePath = os.path.join(cacheDir, "www")
                self.cacheStore = filecache.FileCache(cachePath,
                                                      logger=Logger.Instance())
            self.useCaching = cacheDir is not None
            self.useCompression = useCompression
            self.maxFileNameLength = maxFileNameLength

            self.blockSize = blockSize
            self.__bytesToMB = 1048576
            self.inValidCharacters = "[^a-zA-Z0-9!#$%&'()-.@\[\]^_`{}]"
            Logger.Info(
                "UriHandler initialised [useCompression=%s, useCaching=%s]",
                self.useCompression, self.useCaching)

            # self.timerTimeOut = 2.0                       # used for the emergency canceler

            self.webTimeOutInterval = webTimeOut  # max duration of request
            self.pollInterval = 0.1  # time between polling of activity
            self.dnsCache = {"localhost": "127.0.0.1"}  # init with localhost
コード例 #20
0
    def __GetIndex(self):
        # type: () -> dict
        """ Loads the channel index and if there is none, makes sure one is created.

        Checks:
        1. Existence of the index
        2. Channel add-ons in the index vs actual add-ons

        @return:
        """

        # if it was not already re-index and the bit was set
        if self.__reindex:
            if self.__reindexed:
                Logger.Warning("Forced re-index set, but a re-index was already done previously. Not Rebuilding.")
            else:
                Logger.Info("Forced re-index set. Rebuilding.")
                return self.__RebuildIndex()

        if not os.path.isfile(self.__CHANNEL_INDEX):
            Logger.Info("No index file found at '%s'. Rebuilding.", self.__CHANNEL_INDEX)
            return self.__RebuildIndex()

        fd = None
        try:
            fd = open(self.__CHANNEL_INDEX)
            data = fd.read()
            indexJson = JsonHelper(data, logger=Logger.Instance())
            Logger.Debug("Loaded index from '%s'.", self.__CHANNEL_INDEX)

            if not self.__IsIndexConsistent(indexJson.json):
                return self.__RebuildIndex()
            return indexJson.json
        except:
            Logger.Critical("Error reading channel index. Rebuilding.", exc_info=True)
            return self.__RebuildIndex()
        finally:
            if fd is not None and not fd.closed:
                fd.close()
コード例 #21
0
    def UpdateJsonVideo(self, item):
        """Updates an existing MediaItem with more data.

        Arguments:
        item : MediaItem - the MediaItem that needs to be updated

        Returns:
        The original item with more data added to it's properties.

        Used to update none complete MediaItems (self.complete = False). This
        could include opening the item's URL to fetch more data and then process that
        data or retrieve it's real media-URL.

        The method should at least:
        * cache the thumbnail to disk (use self.noImage if no thumb is available).
        * set at least one MediaItemPart with a single MediaStream.
        * set self.complete = True.

        if the returned item does not have a MediaItemPart then the self.complete flag
        will automatically be set back to False.

        """

        Logger.Debug('Starting UpdateVideoItem: %s', item.name)

        qualities = {"mp4-web03": 1200, "mp4-web01": 500}  # , "http-hls": 1500, "3gp-mob01": 300, "flv-web01": 500}
        part = item.CreateNewEmptyMediaPart()
        for q in qualities:
            url = item.url.replace("mp4-web01", q)
            data = UriHandler.Open(url, proxy=self.proxy, additionalHeaders=item.HttpHeaders)
            if not data:
                Logger.Warning("No data found for: %s", q)
                continue
            json = JsonHelper(data, logger=Logger.Instance())
            url = json.GetValue("url")
            part.AppendMediaStream(url, qualities[q])
        item.complete = True
        return item
コード例 #22
0
ファイル: default.py プロジェクト: normico21/repository.xvbmc
def RunPlugin():
    """ Runs Retrospect as a Video Add-On """

    logFile = None

    try:
        from config import Config
        from helpers.sessionhelper import SessionHelper

        # get a logger up and running
        from logger import Logger

        # only append if there are no active sessions
        if not SessionHelper.IsSessionActive():
            # first call in the session, so do not append the log
            appendLogFile = False
        else:
            appendLogFile = True

        logFile = Logger.CreateLogger(os.path.join(Config.profileDir,
                                                   Config.logFileNameAddon),
                                      Config.appName,
                                      append=appendLogFile,
                                      dualLogger=lambda x, y=4: xbmc.log(x, y))

        from urihandler import UriHandler
        from addonsettings import AddonSettings
        from textures import TextureHandler

        # update the loglevel
        Logger.Instance().minLogLevel = AddonSettings.GetLogLevel()

        useCaching = AddonSettings.CacheHttpResponses()
        cacheDir = None
        if useCaching:
            cacheDir = Config.cacheDir

        # determine the platform
        from envcontroller import EnvController
        from environments import Environments
        maxFileNameLength = None
        if EnvController.IsPlatform(Environments.Xbox):
            maxFileNameLength = 42

        ignoreSslErrors = AddonSettings.IgnoreSslErrors()
        UriHandler.CreateUriHandler(cacheDir=cacheDir,
                                    maxFileNameLength=maxFileNameLength,
                                    cookieJar=os.path.join(
                                        Config.profileDir, "cookiejar.dat"),
                                    ignoreSslErrors=ignoreSslErrors)

        # start texture handler
        TextureHandler.SetTextureHandler(Config, Logger.Instance(),
                                         UriHandler.Instance())

        # run the plugin
        import plugin
        plugin.Plugin(sys.argv[0], sys.argv[2], sys.argv[1])

        # close the log to prevent locking on next call
        Logger.Instance().CloseLog()
        logFile = None

        # make sure we leave no references behind
        AddonSettings.ClearCachedAddonSettingsObject()
    except:
        if logFile:
            logFile.Critical("Error running plugin", exc_info=True)
        raise
コード例 #23
0
    def List(self, channel=None):
        """ Lists favourites. If a channel was specified it will limit them to that.

        @param channel: The channel to limit the favourites to.


        Returns a list of tupples (actionUrl, pickle)

        """

        favs = []

        if channel:
            pathMask = os.path.join(self.FavouriteFolder, "%s-*.xotfav" % (channel.guid,))
        else:
            pathMask = os.path.join(self.FavouriteFolder, "*.xotfav")

        Logger.Debug("Fetching favourites for mask: %s", pathMask)
        for fav in glob.glob(pathMask):
            Logger.Trace("Fetching %s", fav)

            fileHandle = None
            try:
                fileHandle = open(fav)
                channelName = fileHandle.readline().rstrip()
                name = fileHandle.readline().rstrip()
                actionUrl = fileHandle.readline().rstrip()
                pickle = fileHandle.readline()
                fileHandle.close()
            except:
                Logger.Error("Error fetching favourite", exc_info=True)
                if fileHandle and not fileHandle.closed:
                    fileHandle.close()
                raise

            if channelName == "" or name == "" or actionUrl == "" or pickle == "":
                Logger.Error("Apparently the file had too few lines, corrupt Favourite, removing it:\n"
                             "Pickle: %s\n"
                             "Channel: %s\n"
                             "Item: %s\n"
                             "ActionUrl: %s\n"
                             "Pickle: %s",
                             fav, channelName, name, actionUrl, pickle)

                # Remove the invalid favourite
                os.remove(fav)
                continue

            Logger.Debug("Found favourite: %s", name)
            item = Pickler.DePickleMediaItem(pickle)
            validationError = Pickler.Validate(item, logger=Logger.Instance())
            if validationError:
                Logger.Error("Invalid Pickled Item: %s\nRemoving favourite: %s", validationError, fav)

                # Remove the invalid favourite
                os.remove(fav)
                continue

            # add the channel name
            if channel is None:
                item.name = "%s [%s]" % (item.name, channelName)

            item.ClearDate()

            favs.append((actionUrl % (pickle,), item))
        return favs
コード例 #24
0
    def GetStreamsFromNpo(url, episodeId, proxy=None, headers=None):
        """ Retrieve NPO Player Live streams from a different number of stream urls.

        @param url:               (String) The url to download
        @param episodeId:         (String) The NPO episode ID
        @param headers:           (dict) Possible HTTP Headers
        @param proxy:             (Proxy) The proxy to use for opening

        Can be used like this:

            part = item.CreateNewEmptyMediaPart()
            for s, b in NpoStream.GetStreamsFromNpo(m3u8Url, self.proxy):
                item.complete = True
                # s = self.GetVerifiableVideoUrl(s)
                part.AppendMediaStream(s, b)

        """

        if url:
            Logger.Info("Determining streams for url: %s", url)
            episodeId = url.split("/")[-1]
        elif episodeId:
            Logger.Info("Determining streams for VideoId: %s", episodeId)
        else:
            Logger.Error("No url or streamId specified!")
            return []

        # we need an hash code
        tokenJsonData = UriHandler.Open("http://ida.omroep.nl/app.php/auth",
                                        noCache=True,
                                        proxy=proxy,
                                        additionalHeaders=headers)
        tokenJson = JsonHelper(tokenJsonData)
        token = tokenJson.GetValue("token")

        url = "http://ida.omroep.nl/app.php/%s?adaptive=yes&token=%s" % (
            episodeId, token)
        streamData = UriHandler.Open(url,
                                     proxy=proxy,
                                     additionalHeaders=headers)
        if not streamData:
            return []

        streamJson = JsonHelper(streamData, logger=Logger.Instance())
        streamInfos = streamJson.GetValue("items")[0]
        Logger.Trace(streamInfos)
        streams = []
        for streamInfo in streamInfos:
            Logger.Debug("Found stream info: %s", streamInfo)
            if streamInfo["format"] == "mp3":
                streams.append((streamInfo["url"], 0))
                continue

            elif streamInfo["contentType"] == "live":
                Logger.Debug("Found live stream")
                url = streamInfo["url"]
                url = url.replace("jsonp", "json")
                liveUrlData = UriHandler.Open(url,
                                              proxy=proxy,
                                              additionalHeaders=headers)
                liveUrl = liveUrlData.strip("\"").replace("\\", "")
                Logger.Trace(liveUrl)
                streams += M3u8.GetStreamsFromM3u8(liveUrl,
                                                   proxy,
                                                   headers=headers)

            elif streamInfo["format"] == "hls":
                m3u8InfoUrl = streamInfo["url"]
                m3u8InfoData = UriHandler.Open(m3u8InfoUrl,
                                               proxy=proxy,
                                               additionalHeaders=headers)
                m3u8InfoJson = JsonHelper(m3u8InfoData,
                                          logger=Logger.Instance())
                m3u8Url = m3u8InfoJson.GetValue("url")
                streams += M3u8.GetStreamsFromM3u8(m3u8Url,
                                                   proxy,
                                                   headers=headers)

            elif streamInfo["format"] == "mp4":
                bitrates = {"hoog": 1000, "normaal": 500}
                url = streamInfo["url"]
                if "contentType" in streamInfo and streamInfo[
                        "contentType"] == "url":
                    mp4Url = url
                else:
                    url = url.replace("jsonp", "json")
                    mp4UrlData = UriHandler.Open(url,
                                                 proxy=proxy,
                                                 additionalHeaders=headers)
                    mp4InfoJson = JsonHelper(mp4UrlData,
                                             logger=Logger.Instance())
                    mp4Url = mp4InfoJson.GetValue("url")
                bitrate = bitrates.get(streamInfo["label"].lower(), 0)
                if bitrate == 0 and "/ipod/" in mp4Url:
                    bitrate = 200
                elif bitrate == 0 and "/mp4/" in mp4Url:
                    bitrate = 500
                streams.append((mp4Url, bitrate))

        return streams
コード例 #25
0
    def UpdateVideoItem(self, item):
        """Updates an existing MediaItem with more data.

        Arguments:
        item : MediaItem - the MediaItem that needs to be updated

        Returns:
        The original item with more data added to it's properties.

        Used to update none complete MediaItems (self.complete = False). This
        could include opening the item's URL to fetch more data and then process that
        data or retrieve it's real media-URL.

        The method should at least:
        * cache the thumbnail to disk (use self.noImage if no thumb is available).
        * set at least one MediaItemPart with a single MediaStream.
        * set self.complete = True.

        if the returned item does not have a MediaItemPart then the self.complete flag
        will automatically be set back to False.

        """

        Logger.Debug('Starting UpdateVideoItem for %s (%s)', item.name, self.channelName)

        data = UriHandler.Open(item.url, proxy=self.proxy)
        Logger.Trace(data)

        if 'livestart":-' in data:
            Logger.Debug("Live item that has not yet begun.")
            json = JsonHelper(data, Logger.Instance())
            secondsToStart = json.GetValue("video", "livestart")
            if secondsToStart:
                secondsToStart = -int(secondsToStart)
                Logger.Debug("Seconds till livestream: %s", secondsToStart)
                timeLeft = "%s:%02d:%02d" % (secondsToStart / 3600, (secondsToStart % 3600) / 60, secondsToStart % 60)
                Logger.Debug("Live items starts at %s", timeLeft)
                lines = list(LanguageHelper.GetLocalizedString(LanguageHelper.NoLiveStreamId))
                lines[-1] = "%s ETA: %s" % (lines[-1], timeLeft)
                XbmcWrapper.ShowDialog(LanguageHelper.GetLocalizedString(LanguageHelper.NoLiveStreamTitleId),
                                       lines)
            else:
                XbmcWrapper.ShowDialog(LanguageHelper.GetLocalizedString(LanguageHelper.NoLiveStreamTitleId),
                                       LanguageHelper.GetLocalizedString(LanguageHelper.NoLiveStreamId))
            return item

        item.MediaItemParts = []
        mediaPart = item.CreateNewEmptyMediaPart()
        spoofIp = self._GetSetting("spoof_ip", "0.0.0.0")
        if spoofIp is not None:
            mediaPart.HttpHeaders["X-Forwarded-For"] = spoofIp

        # mediaPart.UserAgent = "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:17.0) Gecko/20100101 Firefox/17.0"

        # isLive = False
        if '"live":true' in data or "/live/" in item.url:
            mediaPart.AddProperty("IsLive", "true")
            Logger.Debug("Live video item found.")
            # isLive = True
        else:
            Logger.Debug("Normal (not live, or possible was live) video item found")

        # replace items
        #videos = map(lambda v: self.__ReplaceClist(v), videos)

        jsonVideoData = JsonHelper(data)
        videos = jsonVideoData.GetValue("video", "videoReferences")
        # videos = Regexer.DoRegex(self.mediaUrlRegex, data)
        for video in videos:
            playerType = video.get("playerType", "")
            if "dash" in playerType:
                continue

            if video["url"].startswith("rtmp"):
                # just replace some data in the URL
                mediaPart.AppendMediaStream(self.GetVerifiableVideoUrl(video["url"]).replace("_definst_", "?slist="),
                                            video[1])

            elif "m3u8" in video["url"]:
                Logger.Info("SVTPlay.se m3u8 stream found: %s", video["url"])

                # apparently the m3u8 do not work well for server www0.c91001.dna.qbrick.com
                if "www0.c91001.dna.qbrick.com" in video["url"]:
                    continue

                # m3u8 we need to parse. Get more streams from this file.
                videoUrl = video["url"]
                altIndex = videoUrl.find("m3u8?")
                # altIndex = videoUrl.find("~uri")
                if altIndex > 0:
                    videoUrl = videoUrl[0:altIndex + 4]
                for s, b in M3u8.GetStreamsFromM3u8(videoUrl, self.proxy, headers=mediaPart.HttpHeaders):
                    item.complete = True
                    mediaPart.AppendMediaStream(s, b)

            elif "f4m" in video["url"]:
                Logger.Info("SVTPlay.se manifest.f4m stream found: %s", video["url"])

                #if "manifest.f4m?start=" in video["url"]:
                #    # this was a live stream, convert it to M3u8
                #    # http://svt06-lh.akamaihd.net/z/svt06_0@77501/manifest.f4m?start=1386566700&end=1386579600
                #    # to
                #    # http://svt06hls-lh.akamaihd.net/i/svt06_0@77501/master.m3u8?__b__=563&start=1386566700&end=1386579600
                #    m3u8Url = video["url"].replace("-lh.akamaihd.net/z", "hls-lh.akamaihd.net/i").replace("manifest.f4m?", "master.m3u8?__b__=563&")
                #    Logger.Info("Found f4m stream for an old Live stream. Converting to M3U8:\n%s -to -\n%s", video["url"], m3u8Url)
                #    videos.append((m3u8Url, 0))
                #    continue

                # for now we skip these as they do not yet work with XBMC
                continue
                # http://svtplay8m-f.akamaihd.net/z/se/krypterat/20120830/254218/LILYHAMMER-003A-mp4-,c,d,b,e,-v1-4bc7ecc090b19c82.mp4.csmil/manifest.f4m?hdcore=2.8.0&g=TZOMVRTEILSE
                #videoDataUrl = video["url"]
                # videoUrl = "%s?hdcore=2.8.0&g=TZOMVRTEILSE" % (videoDataUrl,)
                #videoUrl = "%s?hdcore=2.10.3&g=IJGTWSVWPPKH" % (videoDataUrl,)

                # metaData = UriHandler.Open(videoUrl, proxy=self.proxy, referer=self.swfUrl)
                # Logger.Debug(metaData)

                # The referer seems to be unimportant
                # header = "referer=%s" % (urllib.quote(self.swfUrl),)
                # videoUrl = "%s|%s" % (videoUrl, header)
                #mediaPart.AppendMediaStream(videoUrl, video[1])

            else:
                Logger.Info("SVTPlay.se standard HTTP stream found.")
                # else just use the URL
                mediaPart.AppendMediaStream(video["url"], video["bitrate"])

        subtitle = Regexer.DoRegex('"url":"([^"]+.wsrt)"|"url":"(http://media.svt.se/download/[^"]+.m3u8)', data)
        for sub in subtitle:
            if sub[-1]:
                Logger.Info("Found M3u8 subtitle, replacing with WSRT")
                start, name, index = sub[-1].rsplit("/", 2)
                subUrl = "%s/%s/%s.wsrt" % (start, name, name)
            else:
                subUrl = sub[0]
            mediaPart.Subtitle = subtitlehelper.SubtitleHelper.DownloadSubtitle(subUrl, format="srt", proxy=self.proxy)

        item.complete = True
        return item
コード例 #26
0
    def UpdateChannelItem(self, item):
        """ Updates an existing MediaItem with more data.

        Arguments:
        item : MediaItem - the MediaItem that needs to be updated
        date : String    - the json content of the item's URL

        Returns:
        The original item with more data added to it's properties.

        Used to update none complete MediaItems (self.complete = False). This
        could include opening the item's URL to fetch more data and then process that
        data or retrieve it's real media-URL.

        The method should at least:
        * cache the thumbnail to disk (use self.noImage if no thumb is available).
        * set at least one MediaItemPart with a single MediaStream.
        * set self.complete = True.

        if the returned item does not have a MediaItemPart then the self.complete flag
        will automatically be set back to False.

        """
        Logger.Debug('Starting UpdateChannelItem for %s (%s)', item.name, self.channelName)

        data = UriHandler.Open(item.url, proxy=self.proxy)

        json = JsonHelper(data, logger=Logger.Instance())
        videos = json.GetValue("videoReferences")
        Logger.Trace(videos)

        item.MediaItemParts = []
        part = item.CreateNewEmptyMediaPart()
        spoofIp = self._GetSetting("spoof_ip", "0.0.0.0")
        if spoofIp is not None:
            part.HttpHeaders["X-Forwarded-For"] = spoofIp

        for video in videos:
            # bitrate = video['bitrate']
            url = video['url']
            # player = video['playerType']
            # if "ios" in player:
            #     bitrate += 1

            if "akamaihd" in url and "f4m" in url:
                continue
                # these are not supported as they return a 503 error
                #noinspection PyUnreachableCode
                #url = url.replace("/z/", "/i/").replace("/manifest.f4m", "/master.m3u8")

            if len(filter(lambda s: s.Url == url, part.MediaStreams)) > 0:
                Logger.Debug("Skippping duplicate Stream url: %s", url)
                continue

            if "m3u8" in url:
                for s, b in M3u8.GetStreamsFromM3u8(url, proxy=self.proxy, headers=part.HttpHeaders):
                    part.AppendMediaStream(s, b)
            else:
                part.AppendMediaStream(url, 0)

        item.complete = True
        return item
コード例 #27
0
    def UpdateAddOnSettingsWithChannels(channels, config):
        """ updats the settings.xml to include all the channels


        Arguments:
        channels : List<channels> - The channels to add to the settings.xml
        config   : Config         - The configuration object

        """

        # sort the channels
        channels.sort()

        # Then we read the original file
        filenameTemplate = os.path.join(config.rootDir, "resources",
                                        "settings_template.xml")
        # noinspection PyArgumentEqualDefault
        settingsXml = open(filenameTemplate, "r")
        contents = settingsXml.read()
        settingsXml.close()

        newContents = AddonSettings.__UpdateAddOnSettingsWithLanguages(
            contents, channels)
        newContents = AddonSettings.__UpdateAddOnSettingsWithChannelSelection(
            newContents, channels)
        newContents, settingsOffsetForVisibility = \
            AddonSettings.__UpdateAddOnSettingsWithChannelSettings(newContents, channels)
        newContents = AddonSettings.__UpdateAddOnSettingsWithProxies(
            newContents, channels, settingsOffsetForVisibility)

        # Now fill the templates, we only import here due to performance penalties of the
        # large number of imports.
        from helpers.templatehelper import TemplateHelper
        th = TemplateHelper(Logger.Instance(), template=newContents)
        newContents = th.Transform()

        # No more spoofing or proxies
        newContents = newContents.replace('<!-- start of proxy selection -->',
                                          '<!-- start of proxy selection')
        newContents = newContents.replace('<!-- end of proxy selection -->',
                                          'end of proxy selection -->')
        newContents = newContents.replace('<!-- start of proxy settings -->',
                                          '<!-- start of proxy settings')
        newContents = newContents.replace('<!-- end of proxy settings -->',
                                          'end of proxy settings -->')

        # Finally we insert the new XML into the old one
        filename = os.path.join(config.rootDir, "resources", "settings.xml")
        filenameTemp = os.path.join(config.rootDir, "resources",
                                    "settings.tmp.xml")
        try:
            # Backup the user profile settings.xml because sometimes it gets reset. Because in some
            # concurrency situations, Kodi might decide to think we have no settings and just
            # erase all user settings.
            userSettings = os.path.join(Config.profileDir, "settings.xml")
            userSettingsBackup = os.path.join(Config.profileDir,
                                              "settings.old.xml")
            Logger.Debug("Backing-up user settings: %s", userSettingsBackup)
            if os.path.isfile(userSettings):
                shutil.copy(userSettings, userSettingsBackup)
            else:
                Logger.Warning("No user settings found at: %s", userSettings)

            # Update the addonsettings.xml by first updating a temp xml file.
            Logger.Debug("Creating new settings.xml file: %s", filenameTemp)
            Logger.Trace(newContents)
            settingsXml = open(filenameTemp, "w+")
            settingsXml.write(newContents)
            settingsXml.close()
            Logger.Debug("Replacing existing settings.xml file: %s", filename)
            shutil.move(filenameTemp, filename)

            # restore the user profile settings.xml file when needed
            if os.path.isfile(
                    userSettings) and os.stat(userSettings).st_size != os.stat(
                        userSettingsBackup).st_size:
                Logger.Critical(
                    "User settings.xml was overwritten during setttings update. Restoring from %s",
                    userSettingsBackup)
                shutil.copy(userSettingsBackup, userSettings)
        except:
            Logger.Error(
                "Something went wrong trying to update the settings.xml",
                exc_info=True)
            try:
                settingsXml.close()
            except:
                pass

            #  clean up time file
            if os.path.isfile(filenameTemp):
                os.remove(filenameTemp)

            # restore original settings
            settingsXml = open(filenameTemp, "w+")
            settingsXml.write(contents)
            settingsXml.close()
            shutil.move(filenameTemp, filename)
            return

        Logger.Info("Settings.xml updated succesfully. Reloading settings.")
        AddonSettings.__LoadSettings()
        return
コード例 #28
0
        def __GetOpener(self,
                        url,
                        proxy=None,
                        userAgent=None,
                        headOnly=False,
                        disableCaching=False,
                        referer=None,
                        additionalHeaders=None,
                        acceptCompression=True):
            """Get's a urllib2 URL opener with cookie jar

            Arguments:
            url               : string        - The URL to get an opener for

            Keyword Arguments:
            proxy             : [opt] string  - The address and port (proxy.address.ext:port) of
                                                a proxy server that should be used.
            headOnly          : [opt] boolean - Indication that only the header is needed.
            disableCaching    : [opt] boolean - Indication to disable the caching.
            referer           : [opt] string  - The referer URL
            additionalHeaders : [opt] dict    - A dictionary of additional headers

            Returns:
            An urllib2 OpenerDirector object for handling URL requests.

            """

            # create an empty dict, as it cannot be used as a default parameter
            # http://pythonconquerstheuniverse.wordpress.com/category/python-gotchas/
            if not additionalHeaders:
                additionalHeaders = dict()

            headHandler = HttpHeadHandler()

            cacheHandler = None
            if self.useCaching:
                if disableCaching:
                    Logger.Info("Disabling caching for this request")
                else:
                    cacheHandler = cachehttphandler.CacheHttpHandler(
                        self.cacheStore, logger=Logger.Instance())

            urlHandlers = [urllib2.HTTPCookieProcessor(self.cookieJar)]

            if proxy is None:
                pass

            elif not proxy.UseProxyForUrl(url):
                Logger.Debug("Not using proxy due to filter mismatch")

            elif proxy.Scheme == "http":
                Logger.Debug("Using a http(s) %s", proxy)
                urlHandlers.append(proxy.GetSmartProxyHandler())
                # if there was an http scheme proxy, also add a https one as they will probably work
                if proxy.Scheme == "http":
                    urlHandlers.append(proxy.GetSmartProxyHandler("https"))

            elif proxy.Scheme == "dns":
                Logger.Debug("Using an alternative DNS %s", proxy)
                # noinspection PyTypeChecker
                urlHandlers.append(DnsHTTPHandler)
                # noinspection PyTypeChecker
                urlHandlers.append(DnsHTTPSHandler)

                # now we cache the DNS result
                resolver = DnsQuery(proxy.Proxy)
                host = resolver.GetHost(url)
                results = resolver.ResolveAddress(host)
                Logger.Debug("Resolved DNS %s to %s", host, results)
                result = resolver.ResolveAddress(host, (1, ))[-1][1]
                # store it in the cache
                self.dnsCache[host] = result
                Logger.Debug("Cached DNS for %s to %s", host, result)

            # create the opener
            uriOpener = urllib2.build_opener(*urlHandlers)

            if headOnly:
                uriOpener.add_handler(headHandler)

            # add the compression handler before the cache in the
            # chain. That way we store decompressed data and save
            # cpu time.
            if acceptCompression and self.useCompression:
                compressionHandler = HttpCompressionHandler()
                uriOpener.add_handler(compressionHandler)

            if cacheHandler:
                uriOpener.add_handler(cacheHandler)

            # let's add some headers
            headers = []

            # change the user agent (thanks to VincePirez @ xbmc forums)
            Logger.Trace(additionalHeaders)
            if 'User-Agent' in additionalHeaders:
                Logger.Info("Using UserAgent from AdditionalHeaders: %s",
                            additionalHeaders['User-Agent'])
            else:
                if userAgent is None:
                    user_agent = "Mozilla/5.0 (Windows; U; Windows NT 6.1; en-GB; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13 (.NET CLR 3.5.30729)"
                else:
                    Logger.Info("Using custom UserAgent for url: %s",
                                userAgent)
                    user_agent = userAgent
                # user_agent = "XOT/3.0 (compatible; XBMC; U)"
                # uriOpener.addheaders = [('User-Agent', user_agent)]
                # headers.append(('User-Agent', user_agent))
                additionalHeaders['User-Agent'] = user_agent

            # add the custom referer
            if referer is not None:
                Logger.Info("Adding custom Referer: '%s'", referer)
                headers.append(('referer', referer))

            # if additionalHeaders: -> there always is an user agent
            for header in additionalHeaders:
                headers.append((header, additionalHeaders[header]))

            uriOpener.addheaders = headers

            return uriOpener
コード例 #29
0
    def ProcessFolderList(self, item=None):
        """Process the selected item and get's it's child items using the available dataparsers.

        Arguments:
        item : [opt] MediaItem - the selected item

        Returns:
        A list of MediaItems that form the childeren of the <item>.

        Accepts an <item> and returns a list of MediaListems with at least name & url
        set. The following actions are done:

        * determining the correct parsers to use
        * call a pre-processor
        * parsing the data with the parsers
        * calling the creators for item creations

        if the item is NOne, we assume that we are dealing with the first call for this channel and the mainlist uri
        is used.

        """

        items = []
        self.parentItem = item

        if item is None:
            Logger.Info("ProcessFolderList :: No item was specified. Assuming it was the main channel list")
            url = self.mainListUri
            headers = self.httpHeaders
        elif len(item.items) > 0:
            return item.items
        else:
            url = item.url
            headers = item.HttpHeaders

        if url.startswith("http:") or url.startswith("https:") or url.startswith("file:"):
            data = UriHandler.Open(url, proxy=self.proxy, additionalHeaders=headers)
        elif url.startswith("#"):
            data = ""
        elif url == "searchSite":
            Logger.Debug("Starting to search")
            return self.SearchSite()
        else:
            Logger.Debug("Unknown URL format. Setting data to ''")
            data = ""

        # Determine the handlers and process
        dataParsers = self.__GetDataParsers(url)

        # first check if there is a generic pre-processor
        preProcs = filter(lambda p: p.IsGenericPreProcessor(), dataParsers)
        numPreProcs = len(preProcs)
        Logger.Trace("Processing %s Generic Pre-Processors DataParsers", numPreProcs)
        if numPreProcs > 1:
            # warn for strange results if more than 1 generic pre-processor is present.
            Logger.Warning("More than one Generic Pre-Processor is found (%s). They are being processed in the "
                           "order that Python likes which might result in unexpected result.", numPreProcs)

        for dataParser in preProcs:
            # remove it from the list
            dataParsers.remove(dataParser)

            # and process it
            Logger.Debug("Processing %s", dataParser)
            (data, preItems) = dataParser.PreProcessor(data)
            items += preItems

        # The the other handlers
        Logger.Trace("Processing %s Normal DataParsers", len(dataParsers))
        handlerJson = None
        for dataParser in dataParsers:
            Logger.Debug("Processing %s", dataParser)

            # Check for preprocessors
            if dataParser.PreProcessor:
                Logger.Debug("Processing DataParser.PreProcessor")
                (handlerData, preItems) = dataParser.PreProcessor(data)
                items += preItems
            else:
                handlerData = data

            Logger.Debug("Processing DataParser.Parser")
            if dataParser.Parser is None or (dataParser.Parser == "" and not dataParser.IsJson):
                if dataParser.Creator:
                    Logger.Warning("No <parser> found for %s. Skipping.", dataParser.Creator)
                continue

            if dataParser.IsJson:
                if handlerJson is None:
                    # Cache the json requests to improve performance
                    Logger.Trace("Caching JSON results for Dataparsing")
                    handlerJson = JsonHelper(handlerData, Logger.Instance())

                Logger.Trace(dataParser.Parser)
                parserResults = handlerJson.GetValue(fallback=[], *dataParser.Parser)

                if not isinstance(parserResults, (tuple, list)):
                    # if there is just one match, return that as a list
                    parserResults = [parserResults]
            else:
                parserResults = Regexer.DoRegex(dataParser.Parser, handlerData)

            Logger.Debug("Processing DataParser.Creator for %s items", len(parserResults))
            for parserResult in parserResults:
                handlerItem = dataParser.Creator(parserResult)
                if handlerItem is not None:
                    items.append(handlerItem)

        # should we exclude DRM/GEO?
        hideGeoLocked = AddonSettings.HideGeoLockedItemsForLocation(self.language)
        hideDrmProtected = AddonSettings.HideDrmItems()
        hidePremium = AddonSettings.HidePremiumItems()
        hideFolders = AddonSettings.HideRestrictedFolders()
        typeToExclude = None
        if not hideFolders:
            typeToExclude = "folder"

        oldCount = len(items)
        if hideDrmProtected:
            Logger.Debug("Hiding DRM items")
            items = filter(lambda i: not i.isDrmProtected or i.type == typeToExclude, items)
        if hideGeoLocked:
            Logger.Debug("Hiding GEO Locked items due to GEO region: %s", self.language)
            items = filter(lambda i: not i.isGeoLocked or i.type == typeToExclude, items)
        if hidePremium:
            Logger.Debug("Hiding Premium items")
            items = filter(lambda i: not i.isPaid or i.type == typeToExclude, items)
            # items = filter(lambda i: not i.isPaid or i.type == "folder", items)

        if len(items) != oldCount:
            Logger.Info("Hidden %s items due to DRM/GEO/Premium filter (Hide Folders=%s)",
                        oldCount - len(items), hideFolders)

        # Check for grouping or not
        limit = AddonSettings.GetListLimit()
        folderItems = filter(lambda x: x.type.lower() == "folder", items)

        # we should also de-duplicate before calculating
        folderItems = list(set(folderItems))
        folders = len(folderItems)

        if 0 < limit < folders:
            # let's filter them by alphabet if the number is exceeded
            Logger.Debug("Creating Groups for list exceeding '%s' folder items. Total folders found '%s'.",
                         limit, folders)
            other = LanguageHelper.GetLocalizedString(LanguageHelper.OtherChars)
            titleFormat = LanguageHelper.GetLocalizedString(LanguageHelper.StartWith)
            result = dict()
            nonGrouped = []
            # prefixes = ("de", "het", "the", "een", "a", "an")

            for subItem in items:
                if subItem.dontGroup or subItem.type != "folder":
                    nonGrouped.append(subItem)
                    continue

                char = subItem.name[0].upper()
                # Should we de-prefix?
                # for p in prefixes:
                #     if subItem.name.lower().startswith(p + " "):
                #         char = subItem.name[len(p) + 1][0].upper()

                if char.isdigit():
                    char = "0-9"
                elif not char.isalpha():
                    char = other

                if char not in result:
                    Logger.Trace("Creating Grouped item from: %s", subItem)
                    if char == other:
                        item = mediaitem.MediaItem(titleFormat.replace("'", "") % (char,), "")
                    else:
                        item = mediaitem.MediaItem(titleFormat % (char.upper(),), "")
                    item.thumb = self.noImage
                    item.complete = True
                    # item.SetDate(2100 + ord(char[0]), 1, 1, text='')
                    result[char] = item
                else:
                    item = result[char]
                item.items.append(subItem)

            items = nonGrouped + result.values()

        Logger.Trace("Found '%s' items", len(items))
        return list(set(items))
コード例 #30
0
        Logger.Debug("Found %s substreams in M3U8", len(streams))
        return streams


if __name__ == "__main__":
    from debug.initdebug import DebugInitializer
    DebugInitializer()

    # url = "http://tv4play-i.akamaihd.net/i/mp4root/2014-01-27/Bingolotto2601_2534830_,T6MP43,T6MP48,T6MP415,_.mp4.csmil/master.m3u8"
    # url = "http://iphone.streampower.be/een_nogeo/_definst_/2013/08/1000_130830_placetobe_marjolein_Website_Een_M4V.m4v/playlist.m3u8"
    # url = "http://livestreams.omroep.nl/live/npo/regionaal/rtvnoord2/rtvnoord2.isml/rtvnoord2.m3u8?protection=url"  # appendQueryString
    # url = "https://smoote1a.omroep.nl/urishieldv2/l2cm221c27e6ca0058c1adda000000.e6592cb04974c5ff/live/npo/tvlive/npo3/npo3.isml/npo3.m3u8"
    # url = "http://embed.kijk.nl/api/playlist/9JKFARNrJEz_dbzyr6.m3u8?user_token=S0nHgrI3Sh16XSxOpLm7m2Xt7&app_token=CgZzYW5vbWESEjlKS0ZBUk5ySkV6X2RienlyNhoOMTkzLjExMC4yMzQuMjIiGVMwbkhnckkzU2gxNlhTeE9wTG03bTJYdDcotIDZpKsrMgJoADoERlZPREIDU0JTShI5SktGQVJOckpFel9kYnp5cjY%3D%7CmGGy/TM5eOmoSCNwG2I4bGKvMBOvBD9YsadprKSVqv4%3D&base_url=http%3A//emp-prod-acc-we.ebsd.ericsson.net/sbsgroup"
    # url = "http://manifest.us.rtl.nl/rtlxl/v166/network/pc/adaptive/components/soaps/theboldandthebeautiful/338644/4c1b51b9-864d-31fe-ba53-7ea6da0b614a.ssm/4c1b51b9-864d-31fe-ba53-7ea6da0b614a.m3u8"
    url = "http://svtplay2r-f.akamaihd.net/i/world/open/20170307/1377039-008A/PG-1377039-008A-AGENDA2017-03_,988,240,348,456,636,1680,2796,.mp4.csmil/master.m3u8"
    url = "http://livestreams.omroep.nl/live/npo/regionaal/rtvnoord2/rtvnoord2.isml/rtvnoord2.m3u8?protection=url"
    url = "https://ondemand-w.lwc.vrtcdn.be/content/vod/vid-dd0ddbe5-7a83-477a-80d0-6e9c75369c1e-CDN_2/vid-dd0ddbe5-7a83-477a-80d0-6e9c75369c1e-CDN_2_nodrm_a635917e-abe5-49d4-a202-e81b6cfa08a0.ism/.m3u8?test=1"  # audio streams
    results = M3u8.GetStreamsFromM3u8(url,
                                      DebugInitializer.Proxy,
                                      appendQueryString=True,
                                      mapAudio=True)
    results.sort(lambda x, y: cmp(int(x[1]), int(y[1])))
    a = None
    for s, b, a in results:
        if s.count("://") > 1:
            raise Exception("Duplicate protocol in url: %s", s)
        print "%s - %s (%s)" % (b, s, a)
        Logger.Info("%s - %s (%s)", b, s, a)

    Logger.Instance().CloseLog()