Ejemplo n.º 1
0
def check_version(force=False):
    console.logger.debug("run ... check_version")
    cache = Cache(filename="cli.json")
    latest_version = cache.get("latest_version")

    if force or not latest_version:
        res = requests.get("https://pypi.python.org/pypi/livecli/json")
        data = res.json()
        latest_version = data.get("info").get("version")
        cache.set("latest_version", latest_version, (60 * 60 * 24))

    version_info_printed = cache.get("version_info_printed")
    if not force and version_info_printed:
        return

    installed_version = StrictVersion(livecli.version)
    latest_version = StrictVersion(latest_version)

    if latest_version > installed_version:
        console.logger.info("A new version of Livecli ({0}) is "
                            "available!".format(latest_version))
        cache.set("version_info_printed", True, (60 * 60 * 6))
    elif force:
        console.logger.info("Your Livecli version ({0}) is up to date!",
                            installed_version)

    if force:
        sys.exit()
Ejemplo n.º 2
0
    def reload_session(self):
        """ Replace the current stream with a new stream,
            the new stream will be generated from _get_streams()
            after the given reload time.
        """
        self.logger.debug("Reloading session for playlist")
        cache = Cache(filename="streamdata.json",
                      key_prefix="cache:{0}".format(self.stream.url))
        cache_stream_name = cache.get("cache_stream_name", "best")
        cache_url = cache.get("cache_url")
        self.logger.debug("Current stream_name: {0}".format(cache_stream_name))
        if not cache_url:
            # corrupt cache data
            # if more than one instance of streamlink
            # with the same stream_url and hls-session-reload is running
            self.logger.warning(
                "Missing cache data, hls-session-reload is now deactivated")
            self.session_time = int(time() + time())
            return

        streams = self.session.streams(cache_url)
        if not streams:
            self.logger.debug(
                "No stream found for reload_session, stream might be offline.")
            return

        self.stream = streams[cache_stream_name]
        new_cache = Cache(filename="streamdata.json",
                          key_prefix="cache:{0}".format(self.stream.url))
        new_cache.set("cache_stream_name", cache_stream_name,
                      (self.session_reload + 60))
        new_cache.set("cache_url", cache_url, (self.session_reload + 60))
        self.session_time = int(time())
Ejemplo n.º 3
0
 def _update_cache(self, hls_url):
     cache = Cache(filename="streamdata.json",
                   key_prefix="cache:{0}".format(hls_url))
     cache.set("cache_stream_name", "live",
               (self.session.get_option("hls-session-reload") + 60))
     cache.set("cache_url", self.url,
               (self.session.get_option("hls-session-reload") + 60))
Ejemplo n.º 4
0
def cache_stream_data(cache_stream_name, cache_stream_url):
    """Caches data for hls-session-reload
    :param cache_stream_name: stream quality name
    :param cache_stream_url: stream url
    """
    cache = Cache(filename="streamdata.json",
                  key_prefix="cache:{0}".format(cache_stream_url))
    cache.set("cache_stream_name", cache_stream_name,
              (args.hls_session_reload + 60))
    cache.set("cache_url", args.url, (args.hls_session_reload + 60))
Ejemplo n.º 5
0
    def reload_session(self):
        """ Replace the current stream with a new stream,
            after the given reload time.
        """
        self.logger.debug("Reloading session for playlist")
        cache = Cache(filename="streamdata.json",
                      key_prefix="cache:{0}".format(self.stream.url))
        cache_stream_name = cache.get("cache_stream_name", "best")
        cache_url = cache.get("cache_url")
        self.logger.debug("Current stream_url: {0}".format(cache_url))
        self.logger.debug("Current stream_name: {0}".format(cache_stream_name))
        if not cache_url:
            # corrupt cache data, if more than one instance of streamlink
            # with the same stream_url (m3u8) and hls-session-reload is running
            # this is very rare and shouldn't be a problem
            self.logger.warning(
                "Missing cache data, hls-session-reload is now deactivated, a stream restart might help."
            )
            self.session_reload_time = int(time() + time())
            return

        try:
            streams = self.session.streams(cache_url)
        except Exception as e:
            streams = ""
            self.logger.error(str(e))
            self.logger.warning(
                "something went wrong, hls-session-reload is now deactivated, a stream restart might help."
            )
            self.session_reload_time = int(time() + time())
            return

        if not streams:
            self.logger.debug(
                "No stream found for hls-session-reload, stream might be offline."
            )
            return

        self.stream = streams[cache_stream_name]
        new_cache = Cache(filename="streamdata.json",
                          key_prefix="cache:{0}".format(self.stream.url))
        new_cache.set("cache_stream_name", cache_stream_name,
                      (self.session_reload + 60))
        new_cache.set("cache_url", cache_url, (self.session_reload + 60))
        self.session_reload_time = int(time())
Ejemplo n.º 6
0
    def reload_session(self):
        """ Replace the current stream with a new stream,
            the new stream will be generated from _get_streams()
            after the given reload time.
        """
        self.logger.debug("Reloading session for playlist")
        cache = Cache(filename="streamdata.json",
                      key_prefix="cache:{0}".format(self.stream.url))
        cache_stream_name = cache.get("cache_stream_name", "best")
        cache_url = cache.get("cache_url")
        if not cache_url:
            # corrupt cache data
            # if more than one instance of streamlink
            # with the same stream_url and hls-session-reload is running
            self.logger.warning(
                "Missing cache data, hls-session-reload is now deactivated")
            self.session_time = int(time() + time())
            return

        channel = self.session.resolve_url(cache_url)
        streams = channel._get_streams()
        try:
            # HLSStream with parse_variant_playlist
            self.stream = streams[cache_stream_name]
        except KeyError:
            # if stream_name is '1080p source' but the cache is '1080p'
            for source_stream_name in streams.keys():
                if cache_stream_name in source_stream_name:
                    self.stream = streams[source_stream_name]
        except TypeError:
            # HLSStream without parse_variant_playlist
            for name, hls_stream in streams:
                self.stream = hls_stream

        new_cache = Cache(filename="streamdata.json",
                          key_prefix="cache:{0}".format(self.stream.url))
        new_cache.set("cache_stream_name", cache_stream_name,
                      (self.session_reload + 60))
        new_cache.set("cache_url", cache_url, (self.session_reload + 60))
        self.session_time = int(time())
Ejemplo n.º 7
0
class Zattoo(Plugin):
    API_HELLO = '{0}/zapi/session/hello'
    API_LOGIN = '******'
    API_CHANNELS = '{0}/zapi/v2/cached/channels/{1}?details=False'
    API_WATCH = '{0}/zapi/watch'
    API_WATCH_REC = '{0}/zapi/watch/recording/{1}'
    API_WATCH_VOD = '{0}/zapi/avod/videos/{1}/watch'

    _url_re = re.compile(r'''
        https?://
        (?P<base_url>
        zattoo\.com
        |
        tvonline\.ewe\.de
        |
        nettv\.netcologne\.de
        )/
        (?:
            (?:ondemand/)?(?:watch/(?:[^/\s]+)(?:/[^/]+/(?P<recording_id>\d+)))
            |
            watch/(?P<channel>[^/\s]+)
            |
            ondemand/watch/(?P<vod_id>[^-]+)-
        )
        ''', re.VERBOSE)

    _app_token_re = re.compile(r"""window\.appToken\s+=\s+'([^']+)'""")

    _channels_schema = validate.Schema({
        'success': int,
        'channel_groups': [{
            'channels': [
                {
                    'display_alias': validate.text,
                    'cid': validate.text
                },
            ]
        }]},
        validate.get('channel_groups'),
    )

    options = PluginOptions({
        'email': None,
        'password': None,
        'purge_credentials': None
    })

    def __init__(self, url):
        super(Zattoo, self).__init__(url)
        self._session_attributes = Cache(filename='plugin-cache.json', key_prefix='zattoo:attributes')
        self._authed = self._session_attributes.get('beaker.session.id') and self._session_attributes.get('pzuid') and self._session_attributes.get('power_guide_hash')
        self._uuid = self._session_attributes.get('uuid')
        self._expires = self._session_attributes.get('expires', 946684800)

        self.base_url = 'https://{0}'.format(Zattoo._url_re.match(url).group('base_url'))
        self.headers = {
            'User-Agent': useragents.CHROME,
            'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
            'X-Requested-With': 'XMLHttpRequest',
            'Referer': self.base_url
        }

    @classmethod
    def can_handle_url(cls, url):
        return Zattoo._url_re.match(url)

    def _hello(self):
        self.logger.debug('_hello ...')
        headers = {
            'User-Agent': useragents.CHROME,
            'Referer': self.base_url
        }
        res = requests.get("{0}/login".format(self.base_url), headers=headers)
        match = self._app_token_re.search(res.text)

        app_token = match.group(1)
        hello_url = self.API_HELLO.format(self.base_url)

        if self._uuid:
            __uuid = self._uuid
        else:
            __uuid = str(uuid.uuid4())
            self._session_attributes.set('uuid', __uuid, expires=3600 * 24)

        params = {
            'client_app_token': app_token,
            'uuid': __uuid,
            'lang': 'en',
            'format': 'json'
        }
        res = http.post(hello_url, headers=self.headers, data=params)
        return res

    def _login(self, email, password, _hello):
        self.logger.debug('_login ... Attempting login as {0}'.format(email))

        login_url = self.API_LOGIN.format(self.base_url)

        params = {
            'login': email,
            'password': password,
            'remember': 'true'
        }

        res = http.post(login_url, headers=self.headers, data=params, cookies=_hello.cookies)
        data = http.json(res)

        self._authed = data['success']
        if self._authed:
            self.logger.debug('New Session Data')
            self._session_attributes.set('beaker.session.id', res.cookies.get('beaker.session.id'), expires=3600 * 24)
            self._session_attributes.set('pzuid', res.cookies.get('pzuid'), expires=3600 * 24)
            self._session_attributes.set('power_guide_hash', data['session']['power_guide_hash'], expires=3600 * 24)
            return self._authed
        else:
            return None

    def _watch(self):
        self.logger.debug('_watch ...')
        match = self._url_re.match(self.url)
        if not match:
            self.logger.debug('_watch ... no match')
            return
        channel = match.group('channel')
        vod_id = match.group('vod_id')
        recording_id = match.group('recording_id')

        cookies = {
            'beaker.session.id': self._session_attributes.get('beaker.session.id'),
            'pzuid': self._session_attributes.get('pzuid')
        }

        watch_url = []
        if channel:
            params, watch_url = self._watch_live(channel, cookies)
        elif vod_id:
            params, watch_url = self._watch_vod(vod_id)
        elif recording_id:
            params, watch_url = self._watch_recording(recording_id)

        if not watch_url:
            self.logger.debug('Missing watch_url')
            return

        res = []
        try:
            res = http.post(watch_url, headers=self.headers, data=params, cookies=cookies)
        except Exception as e:
            if '404 Client Error' in str(e):
                self.logger.error('Unfortunately streaming is not permitted in this country or this channel does not exist.')
            elif '402 Client Error: Payment Required' in str(e):
                self.logger.error('Paid subscription required for this channel.')
                self.logger.info('If paid subscription exist, use --zattoo-purge-credentials to start a new session.')
            else:
                self.logger.error(str(e))
            return

        self.logger.debug('Found post data')
        data = http.json(res)

        if data['success']:
            for hls_url in data['stream']['watch_urls']:
                for s in HLSStream.parse_variant_playlist(self.session, hls_url['url']).items():
                    yield s

    def _watch_live(self, channel, cookies):
        self.logger.debug('_watch_live ... Channel: {0}'.format(channel))
        watch_url = self.API_WATCH.format(self.base_url)

        channels_url = self.API_CHANNELS.format(self.base_url, self._session_attributes.get('power_guide_hash'))
        res = http.get(channels_url, headers=self.headers, cookies=cookies)
        data = http.json(res, schema=self._channels_schema)

        c_list = []
        for d in data:
            for c in d['channels']:
                c_list.append(c)

        cid = []
        zattoo_list = []
        for c in c_list:
            zattoo_list.append(c['display_alias'])
            if c['display_alias'] == channel:
                cid = c['cid']

        self.logger.debug('Available zattoo channels in this country: {0}'.format(', '.join(sorted(zattoo_list))))

        if not cid:
            cid = channel

        self.logger.debug('CHANNEL ID: {0}'.format(cid))

        params = {
            'cid': cid,
            'https_watch_urls': True,
            'stream_type': 'hls'
        }
        return params, watch_url

    def _watch_recording(self, recording_id):
        self.logger.debug('_watch_recording ...')
        watch_url = self.API_WATCH_REC.format(self.base_url, recording_id)
        params = {
            'https_watch_urls': True,
            'stream_type': 'hls'
        }
        return params, watch_url

    def _watch_vod(self, vod_id):
        self.logger.debug('_watch_vod ...')
        watch_url = self.API_WATCH_VOD.format(self.base_url, vod_id)
        params = {
            'https_watch_urls': True,
            'stream_type': 'hls'
        }
        return params, watch_url

    def _get_streams(self):
        email = self.get_option('email')
        password = self.get_option('password')

        if self.options.get('purge_credentials'):
            self._session_attributes.set('beaker.session.id', None, expires=0)
            self._session_attributes.set('expires', None, expires=0)
            self._session_attributes.set('power_guide_hash', None, expires=0)
            self._session_attributes.set('pzuid', None, expires=0)
            self._session_attributes.set('uuid', None, expires=0)
            self._authed = False
            self.logger.info('All credentials were successfully removed.')

        if not self._authed and (not email and not password):
            self.logger.error('A login for Zattoo is required, use --zattoo-email EMAIL --zattoo-password PASSWORD to set them')
            return

        if self._authed:
            if self._expires < time.time():
                # login after 24h
                expires = time.time() + 3600 * 24
                self._session_attributes.set('expires', expires, expires=3600 * 24)
                self._authed = False

        if not self._authed:
            __hello = self._hello()
            if not self._login(email, password, __hello):
                self.logger.error('Failed to login, check your username/password')
                return

        return self._watch()
Ejemplo n.º 8
0
def _play_stream(HTTPBase, redirect=False):
    """Creates a livecli session and plays the stream."""
    session = Livecli()
    session.set_logprefix("[ID-{0}]".format(str(int(time()))[4:]))
    logger = session.logger.new_module("livecli-server")
    session.set_loglevel("info")

    logger.info("User-Agent: {0}".format(
        HTTPBase.headers.get("User-Agent", "???")))
    logger.info("Client: {0}".format(HTTPBase.client_address))
    logger.info("Address: {0}".format(HTTPBase.address_string()))

    # Load custom user plugins
    if os.path.isdir(PLUGINS_DIR):
        session.load_plugins(PLUGINS_DIR)

    old_data = parse_qsl(urlparse(HTTPBase.path).query)
    data = []
    for k, v in old_data:
        data += [(unquote_plus(k), unquote_plus(v))]

    data_other, session = command_session(session, data)

    url = data_other.get("url")
    if not url:
        HTTPBase._headers(404, "text/html")
        logger.error("No URL provided.")
        return
    quality = (data_other.get("q") or data_other.get("quality")
               or data_other.get("stream") or data_other.get("default-stream")
               or "best")
    try:
        cache = data_other.get("cache") or 4096
    except TypeError:
        cache = 4096

    loglevel = data_other.get("l") or data_other.get("loglevel") or "debug"
    session.set_loglevel(loglevel)
    try:
        if redirect is True:
            streams = session.streams(url, stream_types=["hls", "http"])
        else:
            streams = session.streams(url)
    except Exception as e:
        HTTPBase._headers(404, "text/html")
        logger.error("No Stream Found!")
        return

    if not streams:
        HTTPBase._headers(404, "text/html")
        return

    # XXX: only one quality will work currently
    try:
        stream = streams[quality]
    except KeyError:
        stream = streams["best"]
        quality = "best"

    if isinstance(stream, HTTPStream) is False and isinstance(
            stream, HDSStream) is False:
        # allow only http based streams: HDS HLS HTTP
        # RTMP is not supported
        HTTPBase._headers(404, "text/html")
        return

    if redirect is True:
        logger.info("301 - URL: {0}".format(stream.url))
        HTTPBase.send_response(301)
        HTTPBase.send_header("Location", stream.url)
        HTTPBase.end_headers()
        logger.info("301 - done")
        return

    hls_session_reload = data_other.get("hls-session-reload")
    if hls_session_reload:
        livecli_cache = Cache(filename="streamdata.json",
                              key_prefix="cache:{0}".format(stream.url))
        livecli_cache.set("cache_stream_name", quality,
                          (int(hls_session_reload) + 60))
        livecli_cache.set("cache_url", url, (int(hls_session_reload) + 60))
        session.set_option("hls-session-reload", int(hls_session_reload))

    try:
        fd = stream.open()
    except StreamError as err:
        HTTPBase._headers(404, "text/html")
        logger.error("Could not open stream: {0}".format(err))
        return

    HTTPBase._headers(200, "video/unknown")
    try:
        logger.debug("Pre-buffering {0} bytes".format(cache))
        while True:
            buff = fd.read(cache)
            if not buff:
                logger.error("No Data!")
                break
            HTTPBase.wfile.write(buff)
        HTTPBase.wfile.close()
    except socket.error as e:
        if isinstance(e.args, tuple):
            if e.errno == errno.EPIPE:
                # remote peer disconnected
                logger.info("Detected remote disconnect")
                pass
            else:
                logger.error(str(e))
        else:
            logger.error(str(e))

    fd.close()
    logger.info("Stream ended")
    fd = None
Ejemplo n.º 9
0
class Resolve(Plugin):
    """Livecli Plugin that will try to find a validate streamurl

    Supported
        - embedded url of an already existing livecli plugin
        - website with an unencrypted fileurl in there source code
             - .m3u8
             - .f4m
             - .mp3 - only none broken urls
             - .mp4 - only none broken urls

    Unsupported
        - websites with dash, rtmp or other.
        - streams that require
            - an authentication
            - an API
        - streams that are hidden behind javascript or other encryption
        - websites with a lot of iframes
          (the blacklist feature can be used for unwanted domains)
    """

    _url_re = re.compile(r"""(resolve://)?(?P<url>.+)""")

    # Regex for: Iframes
    _iframe_re = re.compile(r"""
        <ifr(?:["']\s?\+\s?["'])?ame
        (?!\sname=["']g_iFrame).*?src=
        ["'](?P<url>[^"']+)["']
        .*?(?:/>|>(?:[^<>]+)?
        </ifr(?:["']\s?\+\s?["'])?ame(?:\s+)?>)
        """, re.VERBOSE | re.IGNORECASE | re.DOTALL)
    # Regex for: .f4m and .m3u8 files
    _playlist_re = re.compile(r"""(?:["']|=|&quot;)(?P<url>
        (?:https?:)?(?://|\\/\\/)?
            (?<!title=["'])
                [^"'<>\s\;]+\.(?:m3u8|f4m|mp3|mp4|mpd)
            (?:[^"'<>\s\\]+)?)
        (?:["']|(?<!;)\s|>|\\&quot;)
        """, re.DOTALL | re.VERBOSE)
    # Regex for: rtmp
    _rtmp_re = re.compile(r"""["'](?P<url>rtmp(?:e|s|t|te)?://[^"']+)["']""")
    # Regex for: .mp3 and mp4 files
    _httpstream_bitrate_re = re.compile(r"""_(?P<bitrate>\d{1,4})\.mp(?:3|4)""")
    # Regex for: streamBasePath for .f4m urls
    _stream_base_re = re.compile(r"""streamBasePath\s?(?::|=)\s?["'](?P<base>[^"']+)["']""", re.IGNORECASE)
    # Regex for: javascript redirection
    _window_location_re = re.compile(r"""<script[^<]+window\.location\.href\s?=\s?["'](?P<url>[^"']+)["'];[^<>]+""", re.DOTALL)
    _unescape_iframe_re = re.compile(r"""unescape\(["'](?P<data>%3C(?:iframe|%69%66%72%61%6d%65)%20[^"']+)["']""", re.IGNORECASE)
    # Regex for obviously ad paths
    _ads_path = re.compile(r"""(?:/static)?/ads?/?(?:\w+)?(?:\d+x\d+)?(?:_\w+)?\.(?:html?|php)""")
    options = PluginOptions({
        "blacklist_netloc": None,
        "blacklist_path": None,
        "whitelist_netloc": None,
        "whitelist_path": None,
    })

    def __init__(self, url):
        """Inits Resolve with default settings"""
        super(Resolve, self).__init__(url)
        self._session_attributes = Cache(filename="plugin-cache.json", key_prefix="resolve:attributes")
        self._cache_url = self._session_attributes.get("cache_url")
        if self._cache_url:
            self.referer = self._cache_url
        else:
            self.referer = self.url.replace("resolve://", "")
        self.headers = {
            "User-Agent": useragents.FIREFOX,
            "Referer": self.referer
        }

    @classmethod
    def priority(cls, url):
        """
        Returns
        - NO priority if the URL is not prefixed
        - HIGH priority if the URL is prefixed
        :param url: the URL to find the plugin priority for
        :return: plugin priority for the given URL
        """
        m = cls._url_re.match(url)
        if m:
            prefix, url = cls._url_re.match(url).groups()
            if prefix is None:
                return NO_PRIORITY
            elif prefix is not None:
                return HIGH_PRIORITY
        return NO_PRIORITY

    @classmethod
    def can_handle_url(cls, url):
        m = cls._url_re.match(url)
        if m:
            return m.group("url") is not None

    def help_info_e(self, e):
        if "CERTIFICATE_VERIFY_FAILED" in str(e):
            self.logger.info("A workaround for this error is --http-no-ssl-verify "
                             "https://livecli.github.io/cli.html#cmdoption-http-no-ssl-verify")
        return

    def compare_url_path(self, parsed_url, check_list):
        """compare if the parsed url matches a url in the check list

        Args:
           parsed_url: a url that was used with urlparse
           check_list: a list of urls that should get checked

        Returns:
            True
                if parsed_url in check_list
            False
                if parsed_url not in check_list
        """
        status = False
        for netloc, path in check_list:
            if parsed_url.netloc.endswith(netloc) and parsed_url.path.startswith(path):
                status = True
        return status

    def merge_path_list(self, static_list, user_list):
        """merge the static list from resolve.py with a user list

        Args:
           static_list: static list from this plugin
           user_list: list from a user command

        Returns:
            A new valid static_list
        """
        for _path_url in user_list:
            if not _path_url.startswith(("http", "//")):
                _path_url = update_scheme("http://", _path_url)
            _parsed_path_url = urlparse(_path_url)
            if _parsed_path_url.netloc and _parsed_path_url.path:
                static_list += [(_parsed_path_url.netloc, _parsed_path_url.path)]
        return static_list

    def _make_url_list(self, old_list, base_url, url_type="", stream_base=""):
        """Creates a list of validate urls from a list of broken urls
           and removes every blacklisted url

        Args:
            old_list: List of broken urls
            base_url: url that will get used for scheme and netloc
            url_type: can be iframe or playlist
                - iframe is used for
                    --resolve-whitelist-netloc
                - playlist is not used at the moment
            stream_base: basically same as base_url, but used for .f4m files.

        Returns:
            List of validate urls
        """
        blacklist_netloc_user = self.get_option("blacklist_netloc")
        blacklist_netloc = (
            "127.0.0.1",
            "about:blank",
            "abv.bg",
            "adfox.ru",
            "googletagmanager.com",
            "javascript:false",
        )
        whitelist_netloc_user = self.get_option("whitelist_netloc")

        blacklist_path = [
            ("expressen.se", "/_livetvpreview/"),
            ("facebook.com", "/plugins"),
            ("vesti.ru", "/native_widget.html"),
        ]
        # Add --resolve-blacklist-path to blacklist_path
        blacklist_path_user = self.get_option("blacklist_path")
        if blacklist_path_user is not None:
            blacklist_path = self.merge_path_list(blacklist_path, blacklist_path_user)

        whitelist_path = []
        whitelist_path_user = self.get_option("whitelist_path")
        if whitelist_path_user is not None:
            whitelist_path = self.merge_path_list(whitelist_path, whitelist_path_user)

        blacklist_endswith = (
            ".gif",
            ".jpg",
            ".png",
            ".svg",
            ".vtt",
            "/chat.html",
            "/chat",
        )

        new_list = []
        for url in old_list:
            # Don't add the same url as self.url to the list.
            if url == self.url:
                continue
            # Repair the scheme
            new_url = url.replace("\\", "")
            if new_url.startswith("http&#58;//"):
                new_url = "http:" + new_url[9:]
            elif new_url.startswith("https&#58;//"):
                new_url = "https:" + new_url[10:]
            # Repair the domain
            if stream_base and new_url[1] is not "/":
                if new_url[0] is "/":
                    new_url = new_url[1:]
                new_url = urljoin(stream_base, new_url)
            else:
                new_url = urljoin(base_url, new_url)
            # Parse the url and remove not wanted urls
            parse_new_url = urlparse(new_url)

            REMOVE = False

            # sorted after the way livecli will try to remove an url
            status_remove = [
                "WL-netloc",  # - Allow only whitelisted domains --resolve-whitelist-netloc
                "WL-path",    # - Allow only whitelisted paths from a domain --resolve-whitelist-path
                "BL-static",  # - Removes blacklisted domains
                "BL-netloc",  # - Removes blacklisted domains --resolve-blacklist-netloc
                "BL-path",    # - Removes blacklisted paths from a domain --resolve-blacklist-path
                "BL-ew",      # - Removes images and chatrooms
                "ADS",        # - Remove obviously ad urls
            ]

            if REMOVE is False:
                count = 0
                for url_status in ((url_type == "iframe" and
                                    whitelist_netloc_user is not None and
                                    parse_new_url.netloc.endswith(tuple(whitelist_netloc_user)) is False),
                                   (url_type == "iframe" and
                                    whitelist_path_user is not None and
                                    self.compare_url_path(parse_new_url, whitelist_path) is False),
                                   (parse_new_url.netloc.endswith(blacklist_netloc)),
                                   (blacklist_netloc_user is not None and
                                    parse_new_url.netloc.endswith(tuple(blacklist_netloc_user))),
                                   (self.compare_url_path(parse_new_url, blacklist_path) is True),
                                   (parse_new_url.path.endswith(blacklist_endswith)),
                                   (self._ads_path.match(parse_new_url.path))):

                    count += 1
                    if url_status:
                        REMOVE = True
                        break

            if REMOVE is True:
                self.logger.debug("{0} - Removed url: {1}".format(status_remove[count - 1], new_url))
                continue
            # Add url to the list
            new_list += [new_url]
        # Remove duplicates
        new_list = list(set(new_list))
        return new_list

    def _cache_self_url(self):
        """Cache self.url

        Raises:
            NoPluginError: if self.url is the same as self._cache_url
        """
        # TODO: use a list of all used urls
        #       and remove the urls with self._make_url_list
        # this is now useless for one url check
        # because self._make_url_list will remove self.url
        if self._cache_url == self.url:
            self.logger.debug("Abort: Website is already in cache.")
            raise NoPluginError

        """ set a 2 sec cache to avoid loops with the same url """
        # self.logger.debug("Old cache: {0}".format(self._session_attributes.get("cache_url")))
        self._session_attributes.set("cache_url", self.url, expires=2)
        # self.logger.debug("New cache: {0}".format(self._session_attributes.get("cache_url")))
        return

    def _iframe_src(self, res):
        """Tries to find every iframe url,
           it will use the first iframe as self.url,
           but every other url can will be shown in the terminal.

        Args:
            res: Content from self._res_text

        Returns:
            True
                if self.url was changed with an iframe url.
            None
                if no iframe was found.
        """
        iframe_all = self._iframe_re.findall(res)

        # Fallback for unescape('%3Ciframe%20
        unescape_iframe = self._unescape_iframe_re.findall(res)
        if unescape_iframe:
            unescape_text = []
            for data in unescape_iframe:
                unescape_text += [unquote(data)]
            unescape_text = ",".join(unescape_text)
            unescape_iframe = self._iframe_re.findall(unescape_text)
            if unescape_iframe:
                iframe_all = iframe_all + unescape_iframe

        if iframe_all:
            iframe_list = self._make_url_list(iframe_all, self.url, url_type="iframe")
            if iframe_list:
                self.logger.info("Found iframes: {0}".format(", ".join(iframe_list)))
                self.url = iframe_list[0]
                return True
        return None

    def _window_location(self, res):
        """Tries to find a script with window.location.href

        Args:
            res: Content from self._res_text

        Returns:
            True
                if self.url was changed.
            None
                if no url was found.
        """

        match = self._window_location_re.search(res)
        if match:
            self.url = match.group("url")
            return True
        return None

    def _resolve_playlist(self, res, playlist_all):
        """ yield for _resolve_res

        Args:
            res: Content from self._res_text
            playlist_all: List of streams

        Returns:
            yield every stream
        """
        for url in playlist_all:
            parsed_url = urlparse(url)
            if parsed_url.path.endswith((".m3u8")):
                try:
                    streams = HLSStream.parse_variant_playlist(self.session, url, headers=self.headers).items()
                    if not streams:
                        yield "live", HLSStream(self.session, url, headers=self.headers)
                    for s in streams:
                        yield s
                except Exception as e:
                    self.logger.error("Skipping hls_url - {0}".format(str(e)))
                    self.help_info_e(e)
            elif parsed_url.path.endswith((".f4m")):
                try:
                    for s in HDSStream.parse_manifest(self.session, url, headers=self.headers).items():
                        yield s
                except Exception as e:
                    self.logger.error("Skipping hds_url - {0}".format(str(e)))
                    self.help_info_e(e)
            elif parsed_url.path.endswith((".mp3", ".mp4")):
                try:
                    name = "live"
                    m = self._httpstream_bitrate_re.search(url)
                    if m:
                        name = "{0}k".format(m.group("bitrate"))
                    yield name, HTTPStream(self.session, url, headers=self.headers)
                except Exception as e:
                    self.logger.error("Skipping http_url - {0}".format(str(e)))
                    self.help_info_e(e)
            elif parsed_url.path.endswith((".mpd")):
                try:
                    self.logger.info("Found mpd: {0}".format(url))
                except Exception as e:
                    self.logger.error("Skipping mpd_url - {0}".format(str(e)))
                    self.help_info_e(e)

    def _resolve_res(self, res):
        """Tries to find every .f4m or .m3u8 url on this website,
           it will try to add every url that was found as a stream.

        Args:
            res: Content from self._res_text

        Returns:
            True
              - if stream got added
            False
              - if no stream got added
        """
        playlist_all = self._playlist_re.findall(res)

        # experimental rtmp search, will only print the url.
        m_rtmp = self._rtmp_re.search(res)
        if m_rtmp:
            self.logger.info("Found RTMP: {0}".format(m_rtmp.group("url")))

        if playlist_all:
            # m_base is used for .f4m files that doesn't have a base_url
            m_base = self._stream_base_re.search(res)
            if m_base:
                stream_base = m_base.group("base")
            else:
                stream_base = ""

            playlist_list = self._make_url_list(playlist_all, self.url, url_type="playlist", stream_base=stream_base)
            if playlist_list:
                self.logger.debug("Found URL: {0}".format(", ".join(playlist_list)))
                return self._resolve_playlist(res, playlist_list)
        return False

    def _res_text(self, url):
        """Content of a website

        Args:
            url: URL with an embedded Video Player.

        Returns:
            Content of the response
        """
        try:
            res = http.get(url, headers=self.headers, allow_redirects=True)
        except Exception as e:
            if "Received response with content-encoding: gzip" in str(e):
                headers = {
                    "User-Agent": useragents.FIREFOX,
                    "Referer": self.referer,
                    "Accept-Encoding": "deflate"
                }
                res = http.get(url, headers=headers, allow_redirects=True)
            elif "403 Client Error" in str(e):
                self.logger.error("Website Access Denied/Forbidden, you might be geo-blocked or other params are missing.")
                raise NoStreamsError(self.url)
            elif "404 Client Error" in str(e):
                self.logger.error("Website was not found, the link is broken or dead.")
                raise NoStreamsError(self.url)
            else:
                raise e

        if res.history:
            for resp in res.history:
                self.logger.debug("Redirect: {0} - {1}".format(resp.status_code, resp.url))
            self.logger.debug("URL: {0}".format(res.url))
        return res.text

    def _get_streams(self):
        """Tries to find streams.

        Returns:
            Playable video from self._resolve_res
                or
            New self.url for livecli
        Raises:
            NoPluginError: if no video was found.
        """
        self.url = self.url.replace("resolve://", "")
        self._cache_self_url()
        self.url = update_scheme("http://", self.url)

        """ GET website content """
        o_res = self._res_text(self.url)

        """ HLS or HDS stream """
        x = self._resolve_res(o_res)
        if x:
            return x

        """ iframe url """
        x = self._iframe_src(o_res)

        if not x:
            """ script window.location.href """
            x = self._window_location(o_res)

        if x:
            return self.session.streams(self.url)

        raise NoPluginError
Ejemplo n.º 10
0
class ABweb(Plugin):
    '''BIS Livestreams of french AB Groupe
       http://www.abweb.com/BIS-TV-Online/
    '''

    login_url = 'http://www.abweb.com/BIS-TV-Online/Default.aspx'

    _url_re = re.compile(r'https?://(?:www\.)?abweb\.com/BIS-TV-Online/bistvo-tele-universal.aspx', re.IGNORECASE)
    _hls_re = re.compile(r'''["']file["']:\s?["'](?P<url>[^"']+\.m3u8[^"']+)["']''')
    _iframe_re = re.compile(r'''<iframe[^>]+src=["'](?P<url>[^"']+)["']''')

    _input_re = re.compile(r'''(<input[^>]+>)''')
    _name_re = re.compile(r'''name=["']([^"']*)["']''')
    _value_re = re.compile(r'''value=["']([^"']*)["']''')

    expires_time = 3600 * 24

    options = PluginOptions({
        'username': None,
        'password': None,
        'purge_credentials': None
    })

    def __init__(self, url):
        super(ABweb, self).__init__(url)
        self._session_attributes = Cache(filename='plugin-cache.json', key_prefix='abweb:attributes')
        self._authed = self._session_attributes.get('ASP.NET_SessionId') and self._session_attributes.get('.abportail1')
        self._expires = self._session_attributes.get('expires', time.time() + self.expires_time)

    @classmethod
    def can_handle_url(cls, url):
        return cls._url_re.match(url) is not None

    def set_expires_time_cache(self):
        expires = time.time() + self.expires_time
        self._session_attributes.set('expires', expires, expires=self.expires_time)

    def get_iframe_url(self):
        self.logger.debug('search for an iframe')
        res = http.get(self.url)
        m = self._iframe_re.search(res.text)
        if not m:
            raise PluginError('No iframe found.')

        iframe_url = m.group('url')
        iframe_url = update_scheme('http://', iframe_url)
        self.logger.debug('IFRAME URL={0}'.format(iframe_url))
        return iframe_url

    def get_hls_url(self, iframe_url):
        self.logger.debug('search for hls url')
        res = http.get(iframe_url)
        m = self._hls_re.search(res.text)
        if not m:
            raise PluginError('No playlist found.')

        return m and m.group('url')

    def _login(self, username, password):
        '''login and update cached cookies'''
        self.logger.debug('login ...')

        res = http.get(self.login_url)
        input_list = self._input_re.findall(res.text)
        if not input_list:
            raise PluginError('Missing input data on login website.')

        data = {}
        for _input_data in input_list:
            try:
                _input_name = self._name_re.search(_input_data).group(1)
            except AttributeError:
                continue

            try:
                _input_value = self._value_re.search(_input_data).group(1)
            except AttributeError:
                _input_value = ''

            data[_input_name] = _input_value

        login_data = {
            'ctl00$Login1$UserName': username,
            'ctl00$Login1$Password': password,
            'ctl00$Login1$LoginButton.x': '0',
            'ctl00$Login1$LoginButton.y': '0'
        }
        data.update(login_data)

        res = http.post(self.login_url, data=data)

        for cookie in http.cookies:
            self._session_attributes.set(cookie.name, cookie.value, expires=3600 * 24)

        if self._session_attributes.get('ASP.NET_SessionId') and self._session_attributes.get('.abportail1'):
            self.logger.debug('New session data')
            self.set_expires_time_cache()
            return True
        else:
            self.logger.error('Failed to login, check your username/password')
            return False

    def _get_streams(self):
        http.headers.update({'User-Agent': useragents.CHROME,
                             'Referer': 'http://www.abweb.com/BIS-TV-Online/bistvo-tele-universal.aspx'})

        login_username = self.get_option('username')
        login_password = self.get_option('password')

        if self.options.get('purge_credentials'):
            self._session_attributes.set('ASP.NET_SessionId', None, expires=0)
            self._session_attributes.set('.abportail1', None, expires=0)
            self._authed = False
            self.logger.info('All credentials were successfully removed.')

        if not self._authed and not (login_username and login_password):
            self.logger.error('A login for ABweb is required, use --abweb-username USERNAME --abweb-password PASSWORD')
            return

        if self._authed:
            if self._expires < time.time():
                self.logger.debug('get new cached cookies')
                # login after 24h
                self.set_expires_time_cache()
                self._authed = False
            else:
                self.logger.info('Attempting to authenticate using cached cookies')
                http.cookies.set('ASP.NET_SessionId', self._session_attributes.get('ASP.NET_SessionId'))
                http.cookies.set('.abportail1', self._session_attributes.get('.abportail1'))

        if not self._authed and not self._login(login_username, login_password):
            return

        iframe_url = self.get_iframe_url()
        http.headers.update({'Referer': iframe_url})

        hls_url = self.get_hls_url(iframe_url)
        hls_url = update_scheme(self.url, hls_url)

        self.logger.debug('URL={0}'.format(hls_url))
        variant = HLSStream.parse_variant_playlist(self.session, hls_url)
        if variant:
            for q, s in variant.items():
                yield q, s
        else:
            yield 'live', HLSStream(self.session, hls_url)
Ejemplo n.º 11
0
class FC2(Plugin):
    '''Livecli Plugin for live.fc2.com'''

    url_login = '******'
    url_member_api = 'https://live.fc2.com/api/memberApi.php'
    url_server = 'https://live.fc2.com/api/getControlServer.php'

    _url_re = re.compile(r'''https?://live\.fc2\.com/(?P<user_id>\d+)/?$''')

    count = 0
    count_ping = 0

    _version_schema = validate.Schema({
        'status': int,
        'data': {
            'channel_data': {
                'channelid': validate.text,
                'userid': validate.text,
                'adult': int,
                'login_only': int,
                'version': validate.text,
                'fee': int,
            },
            'user_data': {
                'is_login': int,
                'userid': int,
                'fc2id': int,
                'name': validate.text,
                'point': int,
                'adult_access': int,
                'recauth': int,
            }
        }
    })

    host_data = ''
    host_found = False

    expires_time = 3600 * 24

    options = PluginOptions({
        'username': None,
        'password': None,
        'purge_credentials': None
    })

    def __init__(self, url):
        super(FC2, self).__init__(url)
        self._session_attributes = Cache(filename='plugin-cache.json',
                                         key_prefix='fc2:attributes')
        self._authed = (self._session_attributes.get('fcu')
                        and self._session_attributes.get('fgcv')
                        and self._session_attributes.get('FCSID')
                        and self._session_attributes.get('login_status')
                        and self._session_attributes.get('glgd_val')
                        and self._session_attributes.get('PHPSESSID')
                        and self._session_attributes.get('secure_check_fc2'))
        self._expires = self._session_attributes.get(
            'expires',
            time.time() + self.expires_time)

    @classmethod
    def can_handle_url(cls, url):
        return cls._url_re.match(url)

    def set_expires_time_cache(self):
        expires = time.time() + self.expires_time
        self._session_attributes.set('expires',
                                     expires,
                                     expires=self.expires_time)

    def _login(self, username, password):
        '''login and update cached cookies'''
        self.logger.debug('login ...')
        http.get(self.url)
        data = {
            'pass': password,
            'email': username,
            'done': 'livechat',
            'keep_login': 1
        }

        http.post(self.url_login, data=data, allow_redirects=True)
        for cookie in http.cookies:
            self._session_attributes.set(cookie.name,
                                         cookie.value,
                                         expires=3600 * 24)

        if (self._session_attributes.get('fcu')
                and self._session_attributes.get('fgcv')
                and self._session_attributes.get('FCSID')
                and self._session_attributes.get('login_status')
                and self._session_attributes.get('glgd_val')
                and self._session_attributes.get('PHPSESSID')
                and self._session_attributes.get('secure_check_fc2')):

            self.logger.debug('New session data')
            self.set_expires_time_cache()
            return True
        else:
            self.logger.error('Failed to login, check your username/password')
            return False

    def _get_version(self, user_id):
        data = {
            'user': 1,
            'channel': 1,
            'profile': 1,
            'streamid': int(user_id)
        }
        res = http.post(self.url_member_api, data=data)
        res_data = http.json(res, schema=self._version_schema)
        channel_data = res_data['data']['channel_data']
        user_data = res_data['data']['user_data']

        if (channel_data['login_only'] != 0 and user_data['is_login'] != 1):
            raise PluginError('A login is required for this stream.')

        if channel_data['fee'] != 0:
            raise PluginError(
                'Only streams without a fee are supported by Livecli.')

        version = channel_data['version']
        if user_data['is_login']:
            self.logger.info('Logged in as {0}'.format(user_data['name']))
        self.logger.debug('Found version: {0}'.format(version))
        return version

    def payload_msg(self, name):
        ''' Format the WebSocket message '''
        self.count_ping += 1
        payload = json.dumps({
            'name': str(name),
            'arguments': {},
            'id': int(self.count_ping)
        })
        return payload

    def _get_ws_url(self, user_id, version):
        self.logger.debug('_get_ws_url ...')
        data = {
            'channel_id': user_id,
            'channel_version': version,
            'client_type': 'pc',
            'client_app': 'browser'
        }

        res = http.post(self.url_server, data=data)
        w_data = http.json(res)
        if w_data['status'] == 11:
            raise PluginError('The broadcaster is currently not available')

        new_dict = {
            'control_token': w_data['control_token'],
            'mode': 'pay',
            'comment': '0',
        }
        ws_url = filter_urlquery(w_data['url'], new_dict=new_dict)
        self.logger.debug('WS URL: {0}'.format(ws_url))
        return ws_url

    def _get_ws_data(self, ws_url):
        self.logger.debug('_get_ws_data ...')
        ws = create_connection(ws_url)
        ws.send(self.payload_msg('get_media_server_information'))

        def ws_ping():
            ''' ping the WebSocket '''
            if ws.connected is True:
                t1 = Timer(30.0, ws_ping)
                t1.daemon = True
                t1.start()
                ws.send(self.payload_msg('heartbeat'))

        def ws_recv():
            ''' print WebSocket messages '''
            while True:
                self.count += 1
                data = json.loads(ws.recv())
                time_utc = datetime.utcnow().strftime('%H:%M:%S UTC')
                if data['name'] not in [
                        'comment', 'ng_commentq', 'user_count', 'ng_comment'
                ]:
                    self.logger.debug('{0} - {1} - {2}'.format(
                        time_utc, self.count, data['name']))

                if data['name'] == '_response_' and data['arguments'].get(
                        'host'):
                    self.logger.debug('Found host data')
                    self.host_data = data
                    self.host_found = True
                elif data['name'] == 'media_connection':
                    self.logger.debug('successfully opened stream')
                elif data['name'] == 'control_disconnection':
                    break
                elif data['name'] == 'publish_stop':
                    self.logger.debug('Stream ended')
                elif data['name'] == 'channel_information':
                    if data['arguments'].get('fee') != 0:
                        self.logger.error(
                            'Stream requires a fee now, this is not supported by Livecli.'
                            .format(data['arguments'].get('fee')))
                        break

            ws.close()

        # WebSocket background process
        ws_ping()
        t2 = Thread(target=ws_recv)
        t2.daemon = True
        t2.start()

        # wait for the WebSocket
        host_timeout = False
        while self.host_found is False:
            if self.host_found is True:
                break
            if self.count >= 30:
                host_timeout = False

        if host_timeout:
            return False
        return True

    def _get_rtmp(self, data):
        self.logger.debug('_get_rtmp ...')

        app = filter_urlquery(data['application'],
                              new_dict={'media_token': data['media_token']})
        host = data['host']

        params = {
            'app': app,
            'flashVer': 'WIN 29,0,0,140',
            'swfUrl': 'https://live.fc2.com/swf/liveVideo.swf',
            'tcUrl': 'rtmp://{0}/{1}'.format(host, app),
            'live': 'yes',
            'pageUrl': self.url,
            'playpath': data['play_rtmp_stream'],
            'host': host,
        }
        yield 'live', RTMPStream(self.session, params)

    def _get_streams(self):
        http.headers.update({
            'User-Agent': useragents.FIREFOX,
            'Referer': self.url
        })

        login_username = self.get_option('username')
        login_password = self.get_option('password')

        if self.options.get('purge_credentials'):
            self._session_attributes.set('fcu', None, expires=0)
            self._session_attributes.set('fgcv', None, expires=0)
            self._session_attributes.set('FCSID', None, expires=0)
            self._session_attributes.set('login_status', None, expires=0)
            self._session_attributes.set('glgd_val', None, expires=0)
            self._session_attributes.set('PHPSESSID', None, expires=0)
            self._session_attributes.set('secure_check_fc2', None, expires=0)
            self._authed = False
            self.logger.info('All credentials were successfully removed.')

        if self._authed:
            if self._expires < time.time():
                self.logger.debug('get new cached cookies')
                # login after 24h
                self.set_expires_time_cache()
                self._authed = False
            else:
                self.logger.info(
                    'Attempting to authenticate using cached cookies')
                http.cookies.set('fcu', self._session_attributes.get('fcu'))
                http.cookies.set('fgcv', self._session_attributes.get('fgcv'))
                http.cookies.set('FCSID',
                                 self._session_attributes.get('FCSID'))
                http.cookies.set('login_status',
                                 self._session_attributes.get('login_status'))
                http.cookies.set('glgd_val',
                                 self._session_attributes.get('glgd_val'))
                http.cookies.set('PHPSESSID',
                                 self._session_attributes.get('PHPSESSID'))
                http.cookies.set(
                    'secure_check_fc2',
                    self._session_attributes.get('secure_check_fc2'))

        if (not self._authed and login_username and login_password):
            self._login(login_username, login_password)

        match = self._url_re.match(self.url)
        if not match:
            return

        user_id = match.group('user_id')

        version = self._get_version(user_id)
        ws_url = self._get_ws_url(user_id, version)
        if self._get_ws_data(ws_url):
            return self._get_rtmp(self.host_data['arguments'])
Ejemplo n.º 12
0
class WWENetwork(Plugin):
    url_re = re.compile(r"https?://network.wwe.com")
    content_id_re = re.compile(r'''"content_id" : "(\d+)"''')
    playback_scenario = "HTTP_CLOUD_WIRED"
    login_url = "https://secure.net.wwe.com/workflow.do"
    login_page_url = "https://secure.net.wwe.com/enterworkflow.do?flowId=account.login&forwardUrl=http%3A%2F%2Fnetwork.wwe.com"
    api_url = "https://ws.media.net.wwe.com/ws/media/mf/op-findUserVerifiedEvent/v-2.3"
    _info_schema = validate.Schema(
        validate.union({
            "status": validate.union({
                "code": validate.all(validate.xml_findtext(".//status-code"), validate.transform(int)),
                "message": validate.xml_findtext(".//status-message"),
            }),
            "urls": validate.all(
                validate.xml_findall(".//url"),
                [validate.getattr("text")]
            ),
            validate.optional("fingerprint"): validate.xml_findtext(".//updated-fingerprint"),
            validate.optional("session_key"): validate.xml_findtext(".//session-key"),
            "session_attributes": validate.all(
                validate.xml_findall(".//session-attribute"),
                [validate.getattr("attrib"),
                 validate.union({
                     "name": validate.get("name"),
                     "value": validate.get("value")
                 })]
            )
        })
    )
    options = PluginOptions({
        "email": None,
        "password": None,
    })

    def __init__(self, url):
        super(WWENetwork, self).__init__(url)
        http.headers.update({"User-Agent": useragents.CHROME})
        self._session_attributes = Cache(filename="plugin-cache.json", key_prefix="wwenetwork:attributes")
        self._session_key = self.cache.get("session_key")
        self._authed = self._session_attributes.get("ipid") and self._session_attributes.get("fprt")

    @classmethod
    def can_handle_url(cls, url):
        return cls.url_re.match(url) is not None

    def login(self, email, password):
        self.logger.debug("Attempting login as {0}", email)
        # sets some required cookies to login
        http.get(self.login_page_url)
        # login
        res = http.post(self.login_url, data=dict(registrationAction='identify',
                                                  emailAddress=email,
                                                  password=password,
                                                  submitButton=""),
                        headers={"Referer": self.login_page_url},
                        allow_redirects=False)

        self._authed = "Authentication Error" not in res.text
        if self._authed:
            self._session_attributes.set("ipid", res.cookies.get("ipid"), expires=3600 * 1.5)
            self._session_attributes.set("fprt", res.cookies.get("fprt"), expires=3600 * 1.5)

        return self._authed

    def _update_session_attribute(self, key, value):
        if value:
            self._session_attributes.set(key, value, expires=3600 * 1.5)  # 1h30m expiry
            http.cookies.set(key, value)

    @property
    def session_key(self):
        return self._session_key

    @session_key.setter
    def session_key(self, value):
        self.cache.set("session_key", value)
        self._session_key = value

    def _get_media_info(self, content_id):
        """
        Get the info about the content, based on the ID
        :param content_id:
        :return:
        """
        params = {"identityPointId": self._session_attributes.get("ipid"),
                  "fingerprint": self._session_attributes.get("fprt"),
                  "contentId": content_id,
                  "playbackScenario": self.playback_scenario,
                  "platform": "WEB_MEDIAPLAYER_5",
                  "subject": "LIVE_EVENT_COVERAGE",
                  "frameworkURL": "https://ws.media.net.wwe.com",
                  "_": int(time.time())}
        if self.session_key:
            params["sessionKey"] = self.session_key
        url = self.api_url.format(id=content_id)
        res = http.get(url, params=params)
        return http.xml(res, ignore_ns=True, schema=self._info_schema)

    def _get_content_id(self):
        #  check the page to find the contentId
        res = http.get(self.url)
        m = self.content_id_re.search(res.text)
        if m:
            return m.group(1)

    def _get_streams(self):
        email = self.get_option("email")
        password = self.get_option("password")

        if not self._authed and (not email and not password):
            self.logger.error("A login for WWE Network is required, use --wwenetwork-email/"
                              "--wwenetwork-password to set them")
            return

        if not self._authed:
            if not self.login(email, password):
                self.logger.error("Failed to login, check your username/password")
                return

        content_id = self._get_content_id()
        if content_id:
            self.logger.debug("Found content ID: {0}", content_id)
            info = self._get_media_info(content_id)
            if info["status"]["code"] == 1:
                # update the session attributes
                self._update_session_attribute("fprt", info.get("fingerprint"))
                for attr in info["session_attributes"]:
                    self._update_session_attribute(attr["name"], attr["value"])

                if info.get("session_key"):
                    self.session_key = info.get("session_key")
                for url in info["urls"]:
                    for s in HLSStream.parse_variant_playlist(self.session, url, name_fmt="{pixels}_{bitrate}").items():
                        yield s
            else:
                raise PluginError("Could not load streams: {message} ({code})".format(**info["status"]))