示例#1
0
    def __extract_session_data(self, logon_data):
        """

        :param logon_data:
        :return:
        :rtype: AuthenticationResult

        """

        logon_json = json.loads(logon_data)
        result_code = logon_json.get("statusCode")
        Logger.trace("Logging in returned: %s", result_code)
        if result_code != 200:
            Logger.error("Error loging in: %s - %s",
                         logon_json.get("errorMessage"),
                         logon_json.get("errorDetails"))
            return AuthenticationResult(None)

        user_name = logon_json.get("profile", {}).get("email") or None

        signature_setting = logon_json.get("sessionInfo",
                                           {}).get("login_token")
        if signature_setting:
            Logger.info("Found 'login_token'. Saving it.")
            AddonSettings.set_setting(self.__setting_signature,
                                      signature_setting.split("|")[0],
                                      store=LOCAL)

        self.__signature = logon_json.get("UIDSignature")
        self.__user_id = logon_json.get("UID")
        self.__signature_timestamp = logon_json.get("signatureTimestamp")

        # TODO: is this correct?
        has_premium = logon_json.\
            get("data", {}).\
            get("authorization", {}).\
            get("rtlxl_premium", {}).\
            get("subscription", {}).\
            get("id") == "premium"

        # The channels are not interesting
        # premium_channels = logon_json.get_value(
        #     "data", "authorization", "Stievie_free", "channels")
        return AuthenticationResult(user_name, has_premium=has_premium)
示例#2
0
    def create_folder_item(self, result_set):
        """ Creates a MediaItem of type 'folder' using the result_set from the regex.

        This method creates a new MediaItem from the Regular Expression or Json
        results <result_set>. The method should be implemented by derived classes
        and are specific to the channel.

        :param list[str]|dict[str,str] result_set: The result_set of the self.episodeItemRegex

        :return: A new MediaItem of type 'folder'.
        :rtype: MediaItem|None

        """

        Logger.trace(result_set)

        # Validate the input and raise errors
        if not isinstance(result_set, dict):
            Logger.critical(
                "No Dictionary as a result_set. Implement a custom create_video_item"
            )
            raise NotImplementedError(
                "No Dictionary as a result_set. Implement a custom create_video_item"
            )

        elif "title" not in result_set or "url" not in result_set:
            Logger.warning("No ?P<title> or ?P<url> in result_set")
            raise LookupError("No ?P<title> or ?P<url> in result_set")

        # The URL
        url = self._prefix_urls(result_set["url"])

        # The title
        title = result_set["title"]
        if title.isupper():
            title = title.title()

        item = MediaItem(title, url)
        item.description = result_set.get("description", "")
        item.thumb = result_set.get("thumburl", "")
        item.type = 'folder'
        item.HttpHeaders = self.httpHeaders
        item.complete = True
        return item
示例#3
0
class LogAction(AddonAction):
    def __init__(self, parameter_parser):
        super(LogAction, self).__init__(parameter_parser)

    @LockWithDialog(logger=Logger.instance())
    def execute(self):
        """ Send log files via Pastbin or Gist. """

        from resources.lib.helpers.logsender import LogSender
        sender_mode = 'hastebin'
        log_sender = LogSender(Config.logSenderApi,
                               logger=Logger.instance(),
                               mode=sender_mode)
        try:
            title = LanguageHelper.get_localized_string(
                LanguageHelper.LogPostSuccessTitle)
            url_text = LanguageHelper.get_localized_string(
                LanguageHelper.LogPostLogUrl)
            files_to_send = [
                Logger.instance().logFileName,
                Logger.instance().logFileName.replace(".log", ".old.log")
            ]
            paste_url = log_sender.send_file(Config.logFileNameAddon,
                                             files_to_send[0])
            XbmcWrapper.show_dialog(title, url_text % (paste_url, ))
        except Exception as e:
            Logger.error("Error sending %s",
                         Config.logFileNameAddon,
                         exc_info=True)

            title = LanguageHelper.get_localized_string(
                LanguageHelper.LogPostErrorTitle)
            error_text = LanguageHelper.get_localized_string(
                LanguageHelper.LogPostError)
            error = error_text % (str(e), )
            XbmcWrapper.show_dialog(title, error.strip(": "))
示例#4
0
    def add_page_items(self, data):
        """ Performs pre-process actions for data processing.

        Accepts an data from the process_folder_list method, BEFORE the items are
        processed. Allows setting of parameters (like title etc) for the channel.
        Inside this method the <data> could be changed and additional items can
        be created.

        The return values should always be instantiated in at least ("", []).

        :param str data: The retrieve data that was loaded for the current item and URL.

        :return: A tuple of the data and a list of MediaItems that were generated.
        :rtype: tuple[str|JsonHelper,list[MediaItem]]

        """

        Logger.info("Performing Pre-Processing")
        items = []
        json = JsonHelper(data)
        total_results = json.get_value("totalResults")
        from_value = json.get_value("from")
        size_value = json.get_value("size")

        if from_value + size_value < total_results:
            more_pages = LanguageHelper.get_localized_string(
                LanguageHelper.MorePages)
            url = self.parentItem.url.split('?')[0]
            url = "%s?size=%s&from=%s&sort=Nieuwste" % (
                url, size_value, from_value + size_value)
            Logger.debug("Adding next-page item from %s to %s",
                         from_value + size_value,
                         from_value + size_value + size_value)

            next_page = MediaItem(more_pages, url)
            next_page.icon = self.parentItem.icon
            next_page.fanart = self.parentItem.fanart
            next_page.thumb = self.parentItem.thumb
            next_page.dontGroup = True
            items.append(next_page)

        Logger.debug("Pre-Processing finished")
        return json, items
    def __get_application_key(self):
        """ Gets the decrypted application key that is used for all the encryption.

        :return: The decrypted application key that is used for all the encryption.
        :rtype: bytes

        """

        application_key_encrypted = AddonSettings.get_setting(Vault.__APPLICATION_KEY_SETTING, store=LOCAL)
        # The key was never in the local store the value was None. It was "" if it was reset.
        if application_key_encrypted is None:
            application_key_encrypted = AddonSettings.get_setting(Vault.__APPLICATION_KEY_SETTING, store=KODI)
            if not application_key_encrypted:
                return None

            Logger.info("Moved ApplicationKey to local storage")
            AddonSettings.set_setting(Vault.__APPLICATION_KEY_SETTING, application_key_encrypted, store=LOCAL)

        # Still no application key? Then there was no key!
        if application_key_encrypted == "" or application_key_encrypted is None:
            return None

        vault_incorrect_pin = LanguageHelper.get_localized_string(LanguageHelper.VaultIncorrectPin)
        pin = XbmcWrapper.show_key_board(
            heading=LanguageHelper.get_localized_string(LanguageHelper.VaultInputPin),
            hidden=True)
        if not pin:
            XbmcWrapper.show_notification("", vault_incorrect_pin, XbmcWrapper.Error)
            raise RuntimeError("Incorrect Retrospect PIN specified")
        pin_key = self.__get_pbk(pin)
        application_key = self.__decrypt(application_key_encrypted, pin_key)
        if not application_key.startswith(Vault.__APPLICATION_KEY_SETTING):
            Logger.critical("Invalid Retrospect PIN")
            XbmcWrapper.show_notification("", vault_incorrect_pin, XbmcWrapper.Error)
            raise RuntimeError("Incorrect Retrospect PIN specified")

        application_key_value = application_key[len(Vault.__APPLICATION_KEY_SETTING) + 1:]
        Logger.info("Successfully decrypted the ApplicationKey.")
        if PY2:
            return application_key_value

        # We return bytes on Python 3
        return application_key_value.encode()
示例#6
0
    def __init__(self, cache_dir=None, web_time_out=30, cookie_jar=None,
                 ignore_ssl_errors=False):
        """ Initialises the UriHandler class

        Keyword Arguments:
        :param str cache_dir:         A path for http caching. If specified, caching will be used.
        :param int web_time_out:      Timeout for requests in seconds
        :param str cookie_jar:        The path to the cookie jar (in case of file storage)
        :param ignore_ssl_errors:     Ignore any SSL certificate errors.

        """

        self.id = int(time.time())

        if cookie_jar:
            self.cookieJar = MozillaCookieJar(cookie_jar)
            if not os.path.isfile(cookie_jar):
                self.cookieJar.save()
            self.cookieJar.load()
            self.cookieJarFile = True
        else:
            self.cookieJar = CookieJar()
            self.cookieJarFile = False

        self.cacheDir = cache_dir
        self.cacheStore = None
        if cache_dir:
            self.cacheStore = StreamCache(cache_dir)
            Logger.debug("Opened %s", self.cacheStore)
        else:
            Logger.debug("No cache-store provided. Cached disabled.")

        self.userAgent = "Mozilla/5.0 (Windows; U; Windows NT 6.1; en-GB; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13 (.NET CLR 3.5.30729)"
        self.webTimeOut = web_time_out                # max duration of request
        self.ignoreSslErrors = ignore_ssl_errors      # ignore SSL errors
        if self.ignoreSslErrors:
            Logger.warning("Ignoring all SSL errors in Python")

        # status of the most recent call
        self.status = UriStatus(code=0, url=None, error=False, reason=None)

        # for download animation
        self.__animationIndex = -1
    def create_api_typed_item(self, result_set, add_parent_title=False):
        """ Creates a new MediaItem based on the __typename attribute.

        This method creates a new MediaItem from the Regular Expression or Json
        results <result_set>. The method should be implemented by derived classes
        and are specific to the channel.

        :param list[str]|dict result_set: The result_set of the self.episodeItemRegex
        :param bool add_parent_title: Should the parent's title be included?

        :return: A new MediaItem of type 'folder'.
        :rtype: MediaItem|None

        """

        api_type = result_set["__typename"].lower()
        custom_type = result_set.get("type")
        Logger.trace("%s: %s", api_type, result_set)

        item = None
        if custom_type is not None:
            # Use the kijk.nl custom type
            if custom_type == "EPISODE":
                item = self.create_api_episode_type(result_set)
            elif custom_type == "SERIES":
                item = self.create_api_program_type(result_set)
            elif custom_type == "MOVIE":
                item = self.create_api_movie_type(result_set)
            else:
                Logger.warning("Missing type: %s", api_type)
                return None
            return item

        if api_type == "program":
            item = self.create_api_program_type(result_set)
        elif api_type == "tvseason":
            item = self.create_api_tvseason_type(result_set)
        else:
            Logger.warning("Missing type: %s", api_type)
            return None

        return item
    def add_clips(self, data):
        """ Add an items that lists clips.

        The return values should always be instantiated in at least ("", []).

        :param str data: The retrieve data that was loaded for the current item and URL.

        :return: A tuple of the data and a list of MediaItems that were generated.
        :rtype: tuple[str|JsonHelper,list[MediaItem]]

        """

        Logger.info("Adding Clips Pre-Processing")
        items = []

        # if the main list was retrieve using json, are the current data is json, just determine
        # the clip URL
        clip_url = None
        if data.lstrip().startswith("{"):
            if self.parentItem.url.endswith("type=program"):
                # http://playapi.mtgx.tv/v3/videos?format=6723&order=-airdate&type=program
                # http://playapi.mtgx.tv/v3/videos?format=6723&order=-updated&type=clip" % (data_id,)
                clip_url = self.parentItem.url.replace("type=program",
                                                       "type=clip")
        else:
            # now we determine the ID and load the json data
            data_id = Regexer.do_regex(r'data-format-id="(\d+)"', data)[-1]
            Logger.debug("Found FormatId = %s", data_id)
            program_url = \
                "http://playapi.mtgx.tv/v3/videos?format=%s&order=-airdate&type=program" % (data_id,)
            data = UriHandler.open(program_url, proxy=self.proxy)
            clip_url = \
                "http://playapi.mtgx.tv/v3/videos?format=%s&order=-updated&type=clip" % (data_id,)

        if clip_url is not None:
            clip_title = LanguageHelper.get_localized_string(
                LanguageHelper.Clips)
            clip_item = MediaItem("\a.: %s :." % (clip_title, ), clip_url)
            items.append(clip_item)

        Logger.debug("Pre-Processing finished")
        return data, items
    def update_video_item(self, item):
        """ Updates an existing MediaItem with more data.

        Used to update none complete MediaItems (self.complete = False). This
        could include opening the item's URL to fetch more data and then process that
        data or retrieve it's real media-URL.

        The method should at least:
        * cache the thumbnail to disk (use self.noImage if no thumb is available).
        * set at least one MediaItemPart with a single MediaStream.
        * set self.complete = True.

        if the returned item does not have a MediaItemPart then the self.complete flag
        will automatically be set back to False.

        :param MediaItem item: the original MediaItem that needs updating.

        :return: The original item with more data added to it's properties.
        :rtype: MediaItem

        """

        Logger.debug('Starting update_video_item for %s (%s)', item.name, self.channelName)

        data = UriHandler.open(item.url, proxy=self.proxy, additional_headers=item.HttpHeaders)

        url = Regexer.do_regex(self.mediaUrlRegex, data)[-1]
        part = MediaItemPart(item.name, url)
        item.MediaItemParts.append(part)

        Logger.info('finishing update_video_item. MediaItems are %s', item)

        if not item.thumb and self.noImage:
            # no thumb was set yet and no url
            Logger.debug("Setting thumb to %s", item.thumb)
            item.thumb = self.noImage

        if not item.has_media_item_parts():
            item.complete = False
        else:
            item.complete = True
        return item
示例#10
0
    def execute(self):
        """ Show the favourites (for a channel). """

        Logger.debug("Plugin::show_favourites")

        if self.__channel is None:
            Logger.info("Showing all favourites")
        else:
            Logger.info("Showing favourites for: %s", self.__channel)

        # Local import for performance
        from resources.lib.favourites import Favourites
        f = Favourites(Config.favouriteDir)
        favs = f.list(self.__channel)

        # Execute the process folder action
        sub_action = FolderAction(parameter_parser=self.parameter_parser,
                                  channel=self.__channel,
                                  favorites=favs)
        sub_action.execute()
示例#11
0
    def show_favourites(self, channel):
        """ Show the favourites (for a channel).

        :param ChannelInfo|None channel:    The channel to show favourites for.
                                            Might be None to show all.

        """

        Logger.debug("Plugin::show_favourites")

        if channel is None:
            Logger.info("Showing all favourites")
        else:
            Logger.info("Showing favourites for: %s", channel)

        # Local import for performance
        from resources.lib.favourites import Favourites
        f = Favourites(Config.favouriteDir)
        favs = f.list(channel)
        self.process_folder_list(favs)
示例#12
0
    def get_register():
        """ Returns the current active channel register. """

        valid_for = datetime.timedelta(minutes=1)
        # In Kodi Leia the Python instance is not killed and the ChannelRegister stays alive.
        # This might cause some issues. So better to let it expire after some time. But to make it
        # not happen during a user's browsing session, we use sliding expiration of 1 minute.

        if not ChannelIndex.__channelIndexer:
            Logger.debug("Creating a new ChannelIndex-er.")
            ChannelIndex.__channelIndexer = ChannelIndex()
        elif ChannelIndex.__channelIndexer.validAt + valid_for < datetime.datetime.now():
            Logger.debug("Existing ChannelIndex-er expired. Creating a new ChannelIndex-er.")
            ChannelIndex.__channelIndexer = ChannelIndex()
        else:
            Logger.debug("Using an existing %s.", ChannelIndex.__channelIndexer)
            # We are using a sliding expiration, so we should let the expiration slide.
            ChannelIndex.__channelIndexer.validAt = datetime.datetime.now()

        return ChannelIndex.__channelIndexer
示例#13
0
    def set_inputstream_adaptive(self):
        """ Set the InputStream Adaptive for this channel """

        if self.channelObject is None:
            raise ValueError("Missing channel")

        if not self.channelObject.adaptiveAddonSelectable:
            Logger.warning(
                "Cannot set InputStream Adaptive add-on mode for %s",
                self.channelObject)
            return

        current_mode = AddonSettings.get_adaptive_mode(self.channelObject)
        mode_values = [None, True, False]
        current_index = mode_values.index(current_mode)
        mode_options = [
            LanguageHelper.get_localized_string(LanguageHelper.Retrospect),
            LanguageHelper.get_localized_string(LanguageHelper.Enabled),
            LanguageHelper.get_localized_string(LanguageHelper.Disabled)
        ]

        dialog = xbmcgui.Dialog()
        heading = LanguageHelper.get_localized_string(
            LanguageHelper.ChannelAdaptiveMode)
        selected_index = dialog.select(heading,
                                       mode_options,
                                       preselect=current_index)
        if selected_index < 0:
            return
        selected_value = mode_values[selected_index]

        Logger.info("Changing InputStream Adaptive mode for %s from %s to %s",
                    self.channelObject, mode_options[current_index],
                    mode_options[selected_index])

        AddonSettings.set_adaptive_mode(self.channelObject, selected_value)

        # Refresh if we have a video item selected, so the cached urls are removed.
        if self.keywordPickle in self.params:
            Logger.debug("Refreshing list to clear URL caches")
            self.refresh()
示例#14
0
    def __add_sort_method_to_handle(self, handle, items=None):
        """ Add a sort method to the plugin output. It takes the Add-On settings into
        account. But if none of the items have a date, it is forced to sort by name.

        :param int handle:              The handle to add the sortmethod to.
        :param list[MediaItem] items:   The items that need to be sorted

        :rtype: None

        """

        if AddonSettings.mix_folders_and_videos():
            label_sort_method = xbmcplugin.SORT_METHOD_LABEL_IGNORE_FOLDERS
        else:
            label_sort_method = xbmcplugin.SORT_METHOD_LABEL

        if items:
            has_dates = len(list([i for i in items if i.has_date()])) > 0
            if has_dates:
                Logger.debug("Sorting method: Dates")
                xbmcplugin.addSortMethod(handle=handle, sortMethod=xbmcplugin.SORT_METHOD_DATE)
                xbmcplugin.addSortMethod(handle=handle, sortMethod=label_sort_method)
                xbmcplugin.addSortMethod(handle=handle, sortMethod=xbmcplugin.SORT_METHOD_TRACKNUM)
                xbmcplugin.addSortMethod(handle=handle, sortMethod=xbmcplugin.SORT_METHOD_UNSORTED)
                return

            has_tracks = len(list([i for i in items if i.has_track()])) > 0
            if has_tracks:
                Logger.debug("Sorting method: Tracks")
                xbmcplugin.addSortMethod(handle=handle, sortMethod=xbmcplugin.SORT_METHOD_TRACKNUM)
                xbmcplugin.addSortMethod(handle=handle, sortMethod=xbmcplugin.SORT_METHOD_DATE)
                xbmcplugin.addSortMethod(handle=handle, sortMethod=label_sort_method)
                xbmcplugin.addSortMethod(handle=handle, sortMethod=xbmcplugin.SORT_METHOD_UNSORTED)
                return

        Logger.debug("Sorting method: Default (Label)")
        xbmcplugin.addSortMethod(handle=handle, sortMethod=label_sort_method)
        xbmcplugin.addSortMethod(handle=handle, sortMethod=xbmcplugin.SORT_METHOD_DATE)
        xbmcplugin.addSortMethod(handle=handle, sortMethod=xbmcplugin.SORT_METHOD_TRACKNUM)
        xbmcplugin.addSortMethod(handle=handle, sortMethod=xbmcplugin.SORT_METHOD_UNSORTED)
        return
示例#15
0
    def header(self, uri, proxy=None, referer=None, additional_headers=None):
        """ Retrieves header information only.

        :param str uri:                         The URI to fetch the header from.
        :param ProxyInfo|none proxy:            The address and port (proxy.address.ext:port) of a
                                                proxy server that should be used.
        :param str|none referer:                The http referer to use.
        :param dict|none additional_headers:    The optional headers.

        :return: Content-type and the URL to which a redirect could have occurred.
        :rtype: tuple[str,str]

        """

        with requests.session() as s:
            s.cookies = self.cookieJar
            s.verify = not self.ignoreSslErrors

            proxies = self.__get_proxies(proxy, uri)
            headers = self.__get_headers(referer, additional_headers)

            Logger.info("Performing a HEAD for %s", uri)
            r = s.head(uri, proxies=proxies, headers=headers, allow_redirects=True,
                       timeout=self.webTimeOut)

            content_type = r.headers.get("Content-Type", "")
            real_url = r.url

            self.status = UriStatus(code=r.status_code, url=uri, error=not r.ok, reason=r.reason)
            if self.cookieJarFile:
                # noinspection PyUnresolvedReferences
                self.cookieJar.save()

            if r.ok:
                Logger.info("%s resulted in '%s %s' (%s) for %s",
                            r.request.method, r.status_code, r.reason, r.elapsed, r.url)
                return content_type, real_url
            else:
                Logger.error("%s failed with in '%s %s' (%s) for %s",
                             r.request.method, r.status_code, r.reason, r.elapsed, r.url)
                return "", ""
    def add_search(self, data):
        """ Add a "search" item to the listing.

        :param str data: The retrieve data that was loaded for the current item and URL.

        :return: A tuple of the data and a list of MediaItems that were generated.
        :rtype: tuple[str|JsonHelper,list[MediaItem]]

        """

        Logger.info("Performing Pre-Processing")
        items = []

        title = "\a.: %s :." % (self.searchInfo.get(self.language, self.searchInfo["se"])[1], )
        Logger.trace("Adding search item: %s", title)
        search_item = MediaItem(title, "searchSite")
        search_item.dontGroup = True
        items.append(search_item)

        Logger.debug("Pre-Processing finished")
        return data, items
示例#17
0
    def __init__(self):
        """ Creates a new instance of the Vault class """

        self.__newKeyGeneratedInConstructor = False    # : This was the very first time a key was generated

        # ask for PIN of no key is present
        if Vault.__Key is None:
            key = self.__get_application_key()  # type: bytes

            # was there a key? No, let's initialize it.
            if key is None:
                Logger.warning("No Application Key present. Initializing a new one.")
                key = self.__get_new_key()
                if not self.change_pin(key):
                    raise RuntimeError("Error creating Application Key.")
                Logger.info("Created a new Application Key with MD5: %s (length=%s)",
                            EncodingHelper.encode_md5(key), len(key))
                self.__newKeyGeneratedInConstructor = True

            Vault.__Key = key
            Logger.trace("Using Application Key with MD5: %s (length=%s)", EncodingHelper.encode_md5(key), len(key))
    def __set_date(self, result_set, item):
        if "usageRights" in result_set and "availableFrom" in result_set["usageRights"] \
                and result_set["usageRights"]["availableFrom"] is not None:
            Logger.trace("Using 'usageRights.availableFrom' for date")
            # availableFrom=/Date(1540612800000+0200)/
            epoch_stamp = result_set["usageRights"]["availableFrom"][6:16]
            available_from = DateHelper.get_date_from_posix(int(epoch_stamp))
            item.set_date(available_from.year, available_from.month, available_from.day)

        elif "episodeNumberOrDate" in result_set and result_set["episodeNumberOrDate"] is not None:
            Logger.trace("Using 'episodeNumberOrDate' for date")
            date_parts = result_set["episodeNumberOrDate"].split(".")
            if len(date_parts) == 3:
                item.set_date(date_parts[2], date_parts[1], date_parts[0])

        elif "programUrlMetadata" in result_set and result_set["programUrlMetadata"] is not None:
            Logger.trace("Using 'programUrlMetadata' for date")
            date_parts = result_set["programUrlMetadata"].split("-")
            if len(date_parts) == 3:
                item.set_date(date_parts[2], date_parts[1], date_parts[0])
        return
    def get_channel(self):
        """ Instantiates a channel from a ChannelInfo object 

        :returns: an instantiated Channel object based on this ChannelInfo object.

        """

        Logger.trace("Importing module %s from path %s", self.moduleName,
                     self.path)

        sys.path.append(self.path)
        exec("import {}".format(self.moduleName))

        channel_command = '%s.Channel(self)' % (self.moduleName, )
        try:
            Logger.trace("Running command: %s", channel_command)
            channel = eval(channel_command)
        except:
            Logger.error("Cannot Create channel for %s", self, exc_info=True)
            return None
        return channel
    def create_folder_item(self, result_set):
        """ Creates a MediaItem of type 'folder' using the result_set from the regex.

        This method creates a new MediaItem from the Regular Expression or Json
        results <result_set>. The method should be implemented by derived classes
        and are specific to the channel.

        :param list[str]|dict result_set: The result_set of the self.episodeItemRegex

        :return: A new MediaItem of type 'folder'.
        :rtype: MediaItem|None

        """
        Logger.trace(result_set)

        if "/sk=" in self.parentItem.url:
            return None

        abstract_key = result_set["abstract_key"]
        abstract_data = self.abstracts.get(abstract_key, None)
        if not abstract_data:
            Logger.warning("Could not find abstract data for key: %s", abstract_key)
            return None

        Logger.debug("Found Abstract Data: %s", abstract_data)

        abstract_name = abstract_data.get("name", "")
        title = result_set["name"]
        if abstract_name:
            title = "%s - %s" % (abstract_name, title)

        description = result_set.get("synopsis", None)
        key_value = result_set["key"]
        url = "http://www.rtl.nl/system/s4m/vfd/version=1/d=pc/output=json/ak=%s/sk=%s/pg=1" % (abstract_key, key_value)

        item = MediaItem(title.title(), url)
        item.description = description
        item.thumb = "%s/%s.png" % (self.posterBase, key_value,)
        item.complete = True
        return item
示例#21
0
    def add_categories(self, data):
        """ Adds categories to the main listings.

        The return values should always be instantiated in at least ("", []).

        :param str data: The retrieve data that was loaded for the current item and URL.

        :return: A tuple of the data and a list of MediaItems that were generated.
        :rtype: tuple[str|JsonHelper,list[MediaItem]]

        """

        Logger.info("Performing Pre-Processing")
        items = []

        if self.parentItem and "code" in self.parentItem.metaData:
            self.__currentChannel = self.parentItem.metaData["code"]
            Logger.info("Only showing items for channel: '%s'",
                        self.__currentChannel)
            return data, items

        cat = MediaItem("\a.: Categori&euml;n :.",
                        "https://www.vrt.be/vrtnu/categorieen.model.json")
        cat.dontGroup = True
        items.append(cat)

        live = MediaItem("\a.: Live Streams :.",
                         "https://services.vrt.be/videoplayer/r/live.json")
        live.dontGroup = True
        live.isLive = True
        items.append(live)

        channel_text = LanguageHelper.get_localized_string(30010)
        channels = MediaItem("\a.: %s :." % (channel_text, ), "#channels")
        channels.dontGroup = True
        items.append(channels)

        Logger.debug("Pre-Processing finished")
        return data, items
    def create_page_item(self, result_set):
        """ Creates a MediaItem of type 'page' using the result_set from the regex.

        This method creates a new MediaItem from the Regular Expression or Json
        results <result_set>. The method should be implemented by derived classes
        and are specific to the channel.

        :param list[str]|dict[str,str] result_set: The result_set of the self.episodeItemRegex

        :return: A new MediaItem of type 'page'.
        :rtype: MediaItem|None

        """

        Logger.debug("Starting create_page_item")
        Logger.trace(result_set)

        url = result_set["href"]
        page = url.rsplit("=", 1)[-1]

        item = MediaItem(page, url)
        item.type = "page"
        Logger.debug("Created '%s' for url %s", item.name, item.url)
        return item
    def create_json_video_item(self, result_set):  # NOSONAR
        """ Creates a MediaItem of type 'video' using the result_set from the regex.

        This method creates a new MediaItem from the Regular Expression or Json
        results <result_set>. The method should be implemented by derived classes
        and are specific to the channel.

        If the item is completely processed an no further data needs to be fetched
        the self.complete property should be set to True. If not set to True, the
        self.update_video_item method is called if the item is focussed or selected
        for playback.

        :param list[str]|dict[str,str] result_set: The result_set of the self.episodeItemRegex

        :return: A new MediaItem of type 'video' or 'audio' (despite the method's name).
        :rtype: MediaItem|None

        """

        Logger.trace(result_set)

        item = MediaItem(result_set["title"], "")
        item.type = "video"
        item.thumb = result_set["still"]
        item.description = result_set["description"]

        if "date" in result_set:
            # date=2019-10-02T08:30:13+02:00
            date_text = result_set["date"]
            date_tuple = date_text.split("T")[0].split("-")
            item.set_date(*[int(i) for i in date_tuple])

        if "media" in result_set and result_set["media"]:
            part = item.create_new_empty_media_part()
            for video_info in result_set["media"]:
                if video_info["mediatype"] == "FOTO":
                    Logger.trace("Ignoring foto: %s", item)
                    return None

                for info in video_info["variants"]:
                    video_type = info["version"]
                    uri = info["uri"]

                    if video_type == "flv":
                        part.append_media_stream(uri, 1000)
                    elif video_type == "720p":
                        part.append_media_stream(uri, 1200)
                    elif video_type == "1080p" or video_type == "original":
                        part.append_media_stream(uri, 1600)
                    elif video_type == "tablet":
                        part.append_media_stream(uri, 800)
                    elif video_type == "mobile":
                        part.append_media_stream(uri, 450)
                    elif video_type == "embed" and uri.startswith("youtube"):
                        embed_type, youtube_id = uri.split(":")
                        url = "https://www.youtube.com/watch?v=%s" % (
                            youtube_id, )
                        for s, b in YouTube.get_streams_from_you_tube(
                                url, self.proxy):
                            item.complete = True
                            part.append_media_stream(s, b)
                    else:
                        Logger.warning("Video type '%s' was not used",
                                       video_type)
            item.complete = True
        return item
    def update_video_item(self, item):
        """ Updates an existing MediaItem with more data.

        Used to update none complete MediaItems (self.complete = False). This
        could include opening the item's URL to fetch more data and then process that
        data or retrieve it's real media-URL.

        The method should at least:
        * cache the thumbnail to disk (use self.noImage if no thumb is available).
        * set at least one MediaItemPart with a single MediaStream.
        * set self.complete = True.

        if the returned item does not have a MediaItemPart then the self.complete flag
        will automatically be set back to False.

        :param MediaItem item: the original MediaItem that needs updating.

        :return: The original item with more data added to it's properties.
        :rtype: MediaItem

        """

        Logger.debug('Starting update_video_item for %s (%s)', item.name,
                     self.channelName)

        # get additional info
        data = UriHandler.open(item.url, proxy=self.proxy)

        #<param name="flashvars" value="id=dj0xMDEzNzQyJmM9MTAwMDAwNA&amp;tags=source%253Dfreecaster&amp;autoplay=1" />
        # http://freecaster.tv/player/smil/dj0xMDEzNzQyJmM9MTAwMDAwNA -> playlist with bitrate
        # http://freecaster.tv/player/smil/dj0xMDEzNzQyJmM9MTAwMDAwNA -> info (not needed, get description from main page.

        you_tube_url = Regexer.do_regex(
            '"(https://www.youtube.com/embed/[^\"]+)', data)
        if you_tube_url:
            Logger.debug("Using Youtube video")
            part = item.create_new_empty_media_part()
            you_tube_url = you_tube_url[0].replace("embed/", "watch?v=")
            for s, b in YouTube.get_streams_from_you_tube(
                    you_tube_url, self.proxy):
                item.complete = True
                part.append_media_stream(s, b)
            return item

        guid = Regexer.do_regex(
            r'<meta property="og:video" content="http://player.extreme.com/FCPlayer.swf\?id=([^&]+)&amp[^"]+" />',
            data)
        if len(guid) > 0:
            url = '%s/player/smil/%s' % (
                self.baseUrl,
                guid[0],
            )
            data = UriHandler.open(url)

            smiller = Smil(data)
            base_url = smiller.get_base_url()
            urls = smiller.get_videos_and_bitrates()

            part = item.create_new_empty_media_part()
            for url in urls:
                if "youtube" in url[0]:
                    for s, b in YouTube.get_streams_from_you_tube(
                            url[0], self.proxy):
                        item.complete = True
                        part.append_media_stream(s, b)
                else:
                    part.append_media_stream("%s%s" % (base_url, url[0]),
                                             bitrate=int(url[1]) // 1000)
                item.complete = True

            Logger.trace("update_video_item complete: %s", item)
            return item

        # Try the brightcove
        bright_cove_regex = r'<object id="myExperience[\w\W]+?videoPlayer" value="(\d+)"[\w\W]{0,1000}?playerKey" value="([^"]+)'
        bright_cove_data = Regexer.do_regex(bright_cove_regex, data)
        Logger.trace(bright_cove_data)
        if len(bright_cove_data) > 0:
            Logger.error(
                "BrightCove AMF is no longer supported (no Py3 library)")

        return item
    def update_video_item(self, item):
        """ Updates an existing MediaItem with more data.

        Used to update none complete MediaItems (self.complete = False). This
        could include opening the item's URL to fetch more data and then process that
        data or retrieve it's real media-URL.

        The method should at least:
        * cache the thumbnail to disk (use self.noImage if no thumb is available).
        * set at least one MediaItemPart with a single MediaStream.
        * set self.complete = True.

        if the returned item does not have a MediaItemPart then the self.complete flag
        will automatically be set back to False.

        :param MediaItem item: the original MediaItem that needs updating.

        :return: The original item with more data added to it's properties.
        :rtype: MediaItem

        """

        Logger.debug('Starting update_video_item for %s (%s)', item.name,
                     self.channelName)

        # noinspection PyStatementEffect
        """
                C:\temp\rtmpdump-2.3>rtmpdump.exe -z -o test.flv -n "cp70051.edgefcs.net" -a "tv
                4ondemand" -y "mp4:/mp4root/2010-06-02/pid2780626_1019976_T3MP48_.mp4?token=c3Rh
                cnRfdGltZT0yMDEwMDcyNjE2NDYyNiZlbmRfdGltZT0yMDEwMDcyNjE2NDgyNiZkaWdlc3Q9ZjFjN2U1
                NTRiY2U5ODMxMDMwYWQxZWEwNzNhZmUxNjI=" -l 2

                C:\temp\rtmpdump-2.3>rtmpdump.exe -z -o test.flv -r rtmpe://cp70051.edgefcs.net/
                tv4ondemand/mp4root/2010-06-02/pid2780626_1019976_T3MP48_.mp4?token=c3RhcnRfdGlt
                ZT0yMDEwMDcyNjE2NDYyNiZlbmRfdGltZT0yMDEwMDcyNjE2NDgyNiZkaWdlc3Q9ZjFjN2U1NTRiY2U5
                ODMxMDMwYWQxZWEwNzNhZmUxNjI=
                """

        # retrieve the mediaurl
        data = UriHandler.open(item.url,
                               proxy=self.proxy,
                               additional_headers=self.localIP)
        stream_info = JsonHelper(data)
        stream_url = stream_info.get_value("playbackItem", "manifestUrl")
        if stream_url is None:
            return item

        if ".mpd" in stream_url:
            return self.__update_dash_video(item, stream_info)

        part = item.create_new_empty_media_part()

        if AddonSettings.use_adaptive_stream_add_on() and False:
            subtitle = M3u8.get_subtitle(stream_url, proxy=self.proxy)
            stream = part.append_media_stream(stream_url, 0)
            M3u8.set_input_stream_addon_input(stream, self.proxy)
            item.complete = True
        else:
            m3u8_data = UriHandler.open(stream_url,
                                        proxy=self.proxy,
                                        additional_headers=self.localIP)
            subtitle = M3u8.get_subtitle(stream_url,
                                         proxy=self.proxy,
                                         play_list_data=m3u8_data)
            for s, b, a in M3u8.get_streams_from_m3u8(stream_url,
                                                      self.proxy,
                                                      play_list_data=m3u8_data,
                                                      map_audio=True):
                item.complete = True
                if not item.isLive and "-video" not in s:
                    continue

                if a and "-audio" not in s:
                    # remove any query parameters
                    video_part = s.rsplit("?", 1)[0]
                    video_part = video_part.rsplit("-", 1)[-1]
                    video_part = "-%s" % (video_part, )
                    s = a.replace(".m3u8", video_part)
                part.append_media_stream(s, b)

        if subtitle:
            subtitle = subtitle.replace(".m3u8", ".webvtt")
            part.Subtitle = SubtitleHelper.download_subtitle(subtitle,
                                                             format="m3u8srt",
                                                             proxy=self.proxy)
        return item
    def create_video_item(self, result_set):
        """ Creates a MediaItem of type 'video' using the result_set from the regex.

        This method creates a new MediaItem from the Regular Expression or Json
        results <result_set>. The method should be implemented by derived classes
        and are specific to the channel.

        If the item is completely processed an no further data needs to be fetched
        the self.complete property should be set to True. If not set to True, the
        self.update_video_item method is called if the item is focussed or selected
        for playback.

        :param list[str]|dict result_set: The result_set of the self.episodeItemRegex

        :return: A new MediaItem of type 'video' or 'audio' (despite the method's name).
        :rtype: MediaItem|None

        """

        Logger.trace('starting FormatVideoItem for %s', self.channelName)
        # Logger.Trace(result_set)

        # the vmanProgramId (like 1019976) leads to http://anytime.tv4.se/webtv/metafileFlash.smil?p=1019976&bw=1000&emulate=true&sl=true
        program_id = result_set["id"]
        # Logger.Debug("ProgId = %s", programId)

        # We can either use M3u8 or Dash
        # url = "https://playback-api.b17g.net/media/%s?service=tv4&device=browser&protocol=hls" % (program_id,)
        url = "https://playback-api.b17g.net/media/%s?service=tv4&device=browser&protocol=dash" % (
            program_id, )
        name = result_set["title"]
        season = result_set.get("season", 0)
        episode = result_set.get("episode", 0)
        is_episodic = 0 < season < 1900 and not episode == 0
        if is_episodic:
            episode_text = None
            if " del " in name:
                name, episode_text = name.split(" del ", 1)
                episode_text = episode_text.lstrip("0123456789")

            if episode_text:
                episode_text = episode_text.lstrip(" -")
                name = "{} - s{:02d}e{:02d} - {}".format(
                    name, season, episode, episode_text)
            else:
                name = "{} - s{:02d}e{:02d}".format(name, season, episode)

        item = MediaItem(name, url)
        item.description = result_set["description"]
        if item.description is None:
            item.description = item.name

        if is_episodic:
            item.set_season_info(season, episode)

        # premium_expire_date_time=2099-12-31T00:00:00+01:00
        expire_date = result_set.get("expire_date_time")
        if bool(expire_date):
            self.__set_expire_time(expire_date, item)

        date = result_set["broadcast_date_time"]
        (date_part, time_part) = date.split("T")
        (year, month, day) = date_part.split("-")
        (hour, minutes, rest1, zone) = time_part.split(":")
        item.set_date(year, month, day, hour, minutes, 00)
        broadcast_date = datetime.datetime(int(year), int(month), int(day),
                                           int(hour), int(minutes))

        item.fanart = result_set.get("program_image", self.parentItem.fanart)
        thumb_url = result_set.get("image", result_set.get("program_image"))
        # some images need to come via a proxy:
        if thumb_url and "://img.b17g.net/" in thumb_url:
            item.thumb = "https://imageproxy.b17g.services/?format=jpg&shape=cut" \
                         "&quality=90&resize=520x293&source={}"\
                .format(HtmlEntityHelper.url_encode(thumb_url))
        else:
            item.thumb = thumb_url

        availability = result_set["availability"]
        # noinspection PyTypeChecker
        free_period = availability["availability_group_free"]
        # noinspection PyTypeChecker
        premium_period = availability["availability_group_premium"]

        now = datetime.datetime.now()
        if False and not premium_period == "0":
            # always premium
            free_expired = now - datetime.timedelta(days=99 * 365)
        elif free_period == "30+" or free_period is None:
            free_expired = broadcast_date + datetime.timedelta(days=99 * 365)
        else:
            free_expired = broadcast_date + datetime.timedelta(
                days=int(free_period))
        Logger.trace(
            "Premium info for: %s\nPremium state: %s\nFree State:    %s\nBroadcast %s vs Expired %s",
            name, premium_period, free_period, broadcast_date, free_expired)

        if now > free_expired:
            item.isPaid = True

        item.type = "video"
        item.complete = False
        item.isGeoLocked = result_set["is_geo_restricted"]
        item.isDrmProtected = result_set["is_drm_protected"]
        item.isLive = result_set.get("is_live", False)
        if item.isLive:
            item.name = "{}:{} - {}".format(hour, minutes, name)
            item.url = "{0}&is_live=true".format(item.url)
        if item.isDrmProtected:
            item.url = "{}&drm=widevine&is_drm=true".format(item.url)

        item.set_info_label("duration", int(result_set.get("duration", 0)))
        return item
    def pre_process_folder_list(self, data):
        """ Performs pre-process actions for data processing.

        Accepts an data from the process_folder_list method, BEFORE the items are
        processed. Allows setting of parameters (like title etc) for the channel.
        Inside this method the <data> could be changed and additional items can
        be created.

        The return values should always be instantiated in at least ("", []).

        :param str|unicode data: The retrieve data that was loaded for the current item and URL.

        :return: A tuple of the data and a list of MediaItems that were generated.
        :rtype: tuple[str|JsonHelper,list[MediaItem]]

        """

        Logger.info("Performing Pre-Processing")
        items = []

        # Add a klip folder only on the first page and only if it is not already a clip page
        if "type=clip" not in self.parentItem.url \
                and "&page=1&" in self.parentItem.url \
                and "node_nids=" in self.parentItem.url:
            # get the category ID
            cat_start = self.parentItem.url.rfind("node_nids=")
            cat_id = self.parentItem.url[cat_start + 10:]
            Logger.debug("Currently doing CatId: '%s'", cat_id)

            url = "https://api.tv4play.se/play/video_assets?platform=tablet&per_page=%s&" \
                  "type=clip&page=1&node_nids=%s&start=0" % (self.maxPageSize, cat_id,)
            clips_title = LanguageHelper.get_localized_string(
                LanguageHelper.Clips)
            clips = MediaItem(clips_title, url)
            clips.complete = True
            items.append(clips)

        # find the max number of items ("total_hits":2724)
        total_items = int(Regexer.do_regex(r'total_hits\W+(\d+)', data)[-1])
        Logger.debug("Found total of %s items. Only showing %s.", total_items,
                     self.maxPageSize)
        if total_items > self.maxPageSize and "&page=1&" in self.parentItem.url:
            # create a group item
            more_title = LanguageHelper.get_localized_string(
                LanguageHelper.MorePages)
            more = MediaItem(more_title, "")
            more.complete = True
            items.append(more)

            # what are the total number of pages?
            current_page = 1
            # noinspection PyTypeChecker
            total_pages = int(math.ceil(1.0 * total_items / self.maxPageSize))

            current_url = self.parentItem.url
            needle = "&page="
            while current_page < total_pages:
                # what is the current page
                current_page += 1

                url = current_url.replace("%s1" % (needle, ),
                                          "%s%s" % (needle, current_page))
                Logger.debug("Adding next page: %s\n%s", current_page, url)
                page = MediaItem(str(current_page), url)
                page.type = "page"
                page.complete = True

                if total_pages == 2:
                    items = [page]
                    break
                else:
                    more.items.append(page)

        Logger.debug("Pre-Processing finished")
        return data, items
    def add_categories_and_specials(self, data):
        """ Performs pre-process actions for data processing.

        Accepts an data from the process_folder_list method, BEFORE the items are
        processed. Allows setting of parameters (like title etc) for the channel.
        Inside this method the <data> could be changed and additional items can
        be created.

        The return values should always be instantiated in at least ("", []).

        :param str data: The retrieve data that was loaded for the current item and URL.

        :return: A tuple of the data and a list of MediaItems that were generated.
        :rtype: tuple[str|JsonHelper,list[MediaItem]]

        """

        Logger.info("Performing Pre-Processing")
        items = []

        extras = {
            LanguageHelper.get_localized_string(LanguageHelper.Search):
            ("searchSite", None, False),
            LanguageHelper.get_localized_string(LanguageHelper.TvShows):
            ("https://api.tv4play.se/play/programs?is_active=true&platform=tablet"
             "&per_page=1000&fl=nid,name,program_image,is_premium,updated_at,channel&start=0",
             None, False)
        }

        # Channel 4 specific items
        if self.channelCode == "tv4se":
            extras.update({
                LanguageHelper.get_localized_string(LanguageHelper.Categories):
                ("https://api.tv4play.se/play/categories.json", None, False),
                LanguageHelper.get_localized_string(LanguageHelper.MostViewedEpisodes):
                ("https://api.tv4play.se/play/video_assets/most_viewed?type=episode"
                 "&platform=tablet&is_live=false&per_page=%s&start=0" %
                 (self.maxPageSize, ), None, False),
            })

            today = datetime.datetime.now()
            days = [
                LanguageHelper.get_localized_string(LanguageHelper.Monday),
                LanguageHelper.get_localized_string(LanguageHelper.Tuesday),
                LanguageHelper.get_localized_string(LanguageHelper.Wednesday),
                LanguageHelper.get_localized_string(LanguageHelper.Thursday),
                LanguageHelper.get_localized_string(LanguageHelper.Friday),
                LanguageHelper.get_localized_string(LanguageHelper.Saturday),
                LanguageHelper.get_localized_string(LanguageHelper.Sunday)
            ]
            for i in range(0, 7, 1):
                start_date = today - datetime.timedelta(i)
                end_date = start_date + datetime.timedelta(1)

                day = days[start_date.weekday()]
                if i == 0:
                    day = LanguageHelper.get_localized_string(
                        LanguageHelper.Today)
                elif i == 1:
                    day = LanguageHelper.get_localized_string(
                        LanguageHelper.Yesterday)

                Logger.trace("Adding item for: %s - %s", start_date, end_date)
                # Old URL:
                # url = "https://api.tv4play.se/play/video_assets?exclude_node_nids=" \
                #       "nyheterna,v%C3%A4der,ekonomi,lotto,sporten,nyheterna-blekinge,nyheterna-bor%C3%A5s," \
                #       "nyheterna-dalarna,nyheterna-g%C3%A4vle,nyheterna-g%C3%B6teborg,nyheterna-halland," \
                #       "nyheterna-helsingborg,nyheterna-j%C3%B6nk%C3%B6ping,nyheterna-kalmar,nyheterna-link%C3%B6ping," \
                #       "nyheterna-lule%C3%A5,nyheterna-malm%C3%B6,nyheterna-norrk%C3%B6ping,nyheterna-skaraborg," \
                #       "nyheterna-skellefte%C3%A5,nyheterna-stockholm,nyheterna-sundsvall,nyheterna-ume%C3%A5," \
                #       "nyheterna-uppsala,nyheterna-v%C3%A4rmland,nyheterna-v%C3%A4st,nyheterna-v%C3%A4ster%C3%A5s," \
                #       "nyheterna-v%C3%A4xj%C3%B6,nyheterna-%C3%B6rebro,nyheterna-%C3%B6stersund,tv4-tolken," \
                #       "fotbollskanalen-europa" \
                #       "&platform=tablet&per_page=32&is_live=false&product_groups=2&type=episode&per_page=100"
                url = "https://api.tv4play.se/play/video_assets?exclude_node_nids=" \
                      "&platform=tablet&per_page=32&is_live=false&product_groups=2&type=episode&per_page=100"
                url = "%s&broadcast_from=%s&broadcast_to=%s&" % (
                    url, start_date.strftime("%Y%m%d"),
                    end_date.strftime("%Y%m%d"))
                extras[day] = (url, start_date, False)

        extras[LanguageHelper.get_localized_string(
            LanguageHelper.CurrentlyPlayingEpisodes
        )] = (
            "https://api.tv4play.se/play/video_assets?exclude_node_nids=&platform=tablet&"
            "per_page=32&is_live=true&product_groups=2&type=episode&per_page=100",
            None, False)

        for name in extras:
            title = name
            url, date, is_live = extras[name]
            item = MediaItem(title, url)
            item.dontGroup = True
            item.complete = True
            item.HttpHeaders = self.httpHeaders
            item.isLive = is_live

            if date is not None:
                item.set_date(date.year,
                              date.month,
                              date.day,
                              0,
                              0,
                              0,
                              text=date.strftime("%Y-%m-%d"))

            items.append(item)

        if not self.channelCode == "tv4se":
            return data, items

        # Add Live TV
        # live = MediaItem("\a.: Live-TV :.",
        #                            "http://tv4events1-lh.akamaihd.net/i/EXTRAEVENT5_1@324055/master.m3u8",
        #                            type="video")
        # live.dontGroup = True
        # # live.isDrmProtected = True
        # live.isGeoLocked = True
        # live.isLive = True
        # items.append(live)

        Logger.debug("Pre-Processing finished")
        return data, items
    def update_video_item(self, item):
        """ Updates an existing MediaItem with more data.

        Used to update none complete MediaItems (self.complete = False). This
        could include opening the item's URL to fetch more data and then process that
        data or retrieve it's real media-URL.

        The method should at least:
        * cache the thumbnail to disk (use self.noImage if no thumb is available).
        * set at least one MediaItemPart with a single MediaStream.
        * set self.complete = True.

        if the returned item does not have a MediaItemPart then the self.complete flag
        will automatically be set back to False.

        :param MediaItem item: the original MediaItem that needs updating.

        :return: The original item with more data added to it's properties.
        :rtype: MediaItem

        """

        Logger.debug('Starting update_video_item for %s (%s)', item.name, self.channelName)

        # noinspection PyStatementEffect
        """
        data-video-id="1613274"
        data-video-type="video"
        data-video-src="http://media.vrtnieuws.net/2013/04/135132051ONL1304255866693.urlFLVLong.flv"
        data-video-title="Het journaal 1 - 25/04/13"
        data-video-rtmp-server="rtmp://vrt.flash.streampower.be/vrtnieuws"
        data-video-rtmp-path="2013/04/135132051ONL1304255866693.urlFLVLong.flv"
        data-video-rtmpt-server="rtmpt://vrt.flash.streampower.be/vrtnieuws"
        data-video-rtmpt-path="2013/04/135132051ONL1304255866693.urlFLVLong.flv"
        data-video-iphone-server="http://iphone.streampower.be/vrtnieuws_nogeo/_definst_"
        data-video-iphone-path="2013/04/135132051ONL1304255866693.urlMP4_H.264.m4v"
        data-video-mobile-server="rtsp://mp4.streampower.be/vrt/vrt_mobile/vrtnieuws_nogeo"
        data-video-mobile-path="2013/04/135132051ONL1304255866693.url3GP_MPEG4.3gp"
        data-video-sitestat-program="het_journaal_1_-_250413_id_1-1613274"
        """

        # now the mediaurl is derived. First we try WMV
        data = UriHandler.open(item.url, proxy=self.proxy)
        data = data.replace("\\/", "/")
        urls = Regexer.do_regex(self.mediaUrlRegex, data)
        part = item.create_new_empty_media_part()
        for url in urls:
            Logger.trace(url)
            if url[0] == "src":
                flv = url[1]
                bitrate = 750
            else:
                flv_server = url[1]
                flv_path = url[2]

                if url[0] == "rtmp-server":
                    flv = "%s//%s" % (flv_server, flv_path)
                    bitrate = 750

                elif url[0] == "rtmpt-server":
                    continue
                    # Not working for now
                    #flv = "%s//%s" % (flv_server, flv_path)
                    #flv = self.get_verifiable_video_url(flv)
                    #bitrate = 1500

                elif url[0] == "iphone-server":
                    flv = "%s/%s" % (flv_server, flv_path)
                    if not flv.endswith("playlist.m3u8"):
                        flv = "%s/playlist.m3u8" % (flv,)

                    for s, b in M3u8.get_streams_from_m3u8(flv, self.proxy):
                        item.complete = True
                        part.append_media_stream(s, b)
                    # no need to continue adding the streams
                    continue

                elif url[0] == "mobile-server":
                    flv = "%s/%s" % (flv_server, flv_path)
                    bitrate = 250

                else:
                    flv = "%s/%s" % (flv_server, flv_path)
                    bitrate = 0

            part.append_media_stream(flv, bitrate)

        item.complete = True
        return item
示例#30
0
    def update_live_item(self, item):
        """ Updates an existing Live stream MediaItem with more data.

        Used to update none complete MediaItems (self.complete = False). This
        could include opening the item's URL to fetch more data and then process that
        data or retrieve it's real media-URL.

        The method should at least:
        * cache the thumbnail to disk (use self.noImage if no thumb is available).
        * set at least one MediaItemPart with a single MediaStream.
        * set self.complete = True.

        if the returned item does not have a MediaItemPart then the self.complete flag
        will automatically be set back to False.

        :param MediaItem item: the original MediaItem that needs updating.

        :return: The original item with more data added to it's properties.
        :rtype: MediaItem

        """

        Logger.debug('Starting update_video_item for %s (%s)', item.name,
                     self.channelName)

        data = UriHandler.open(item.url,
                               proxy=self.proxy,
                               additional_headers=item.HttpHeaders)
        json = JsonHelper(data)
        video_play_lists = json.get_value("Video", "Playlists", "Playlist")

        part = item.create_new_empty_media_part()
        for play_list in video_play_lists:
            streams = play_list["url"]
            Logger.trace("Found %s streams", len(streams))
            for stream in streams:
                stream_url = stream["text"]
                if ".m3u8" in stream_url:
                    for s, b in M3u8.get_streams_from_m3u8(
                            stream_url, self.proxy):
                        item.complete = True
                        part.append_media_stream(s, b)
                else:
                    Logger.debug("Cannot use stream url: %s", stream_url)

        # Unused at the moment
        # videoInfo = json.get_value("content", "videoInfos")
        #
        # part = item.create_new_empty_media_part()
        # if "HLSurlHD" in videoInfo:
        #     # HLSurlHD=http://srfvodhd-vh.akamaihd.net/i/vod/potzmusig/2015/03/potzmusig_20150307_184438_v_webcast_h264_,q10,q20,q30,q40,q50,q60,.mp4.csmil/master.m3u8
        #     for s, b in M3u8.get_streams_from_m3u8(videoInfo["HLSurlHD"], self.proxy):
        #         item.complete = True
        #         # s = self.get_verifiable_video_url(s)
        #         part.append_media_stream(s, b)
        # elif "HLSurl" in videoInfo:
        #     # HLSurl=http://srfvodhd-vh.akamaihd.net/i/vod/potzmusig/2015/03/potzmusig_20150307_184438_v_webcast_h264_,q10,q20,q30,q40,.mp4.csmil/master.m3u8
        #     for s, b in M3u8.get_streams_from_m3u8(videoInfo["HLSurl"], self.proxy):
        #         item.complete = True
        #         # s = self.get_verifiable_video_url(s)
        #         part.append_media_stream(s, b)
        #
        # if "downloadLink" in videoInfo:
        #     # downloadLink=http://podcastsource.sf.tv/nps/podcast/10vor10/2015/03/10vor10_20150304_215030_v_podcast_h264_q10.mp4
        #     part.append_media_stream(videoInfo["downloadLink"], 1000)

        return item