def __extract_mycloud(url, content, referer=None):
    fixHttp = lambda x: ("https://" + x) if not x.startswith("http") else x
    strip_res = lambda x: x.split("/")[-1].split(".")[0]
    formatUrls = lambda x: (strip_res(x), http.add_referer_url(x, url))

    # Get m3u of all res
    m3u_list = re.findall("\"file\"\s*:\s*\"\/*(.+)\"", content)
    m3u_list = map(fixHttp, m3u_list)
    m3u_list = m3u_list[0]

    # Read and parse all res
    playlist_req = http.send_request(http.add_referer_url(m3u_list, url))
    if playlist_req.status_code != 200:
        raise Exception("Error from server %d" % playlist_req.status_code)

    playlist_content = playlist_req.text
    playlist_content = map(lambda x: x.strip(), playlist_content.split("\n"))
    playlist_content = filter(lambda x: not x.startswith("#") and len(x),
                              playlist_content)

    # Build source urls
    sources = map(lambda x: __relative_url(m3u_list, x), playlist_content)
    sources = map(formatUrls, sources)

    return __check_video_list(url, sources, True, True)
示例#2
0
def __extract_9anime(url, page_content, referer=None):
    episode_id = url.rsplit('/', 1)[1]
    url_info = urlparse.urlparse(url)
    domain = url_info.netloc
    scheme = url_info.scheme

    url_base = "%s://%s" % (scheme, domain)

    ts_value = NineAnimeUrlExtender.get_ts_value(page_content)
    server_id = NineAnimeUrlExtender.get_server_value(page_content)
    extra_param = NineAnimeUrlExtender.get_extra_url_parameter(
        episode_id, server_id, ts_value)
    ep_info_url = "%s/ajax/episode/info?ts=%s&_=%d&id=%s&server=%d" % \
    (url_base, ts_value, extra_param, episode_id, server_id)

    time.sleep(0.3)
    urlRequest = http.send_request(ep_info_url)
    grabInfo = json.loads(urlRequest.text)
    grabInfo = NineAnimeUrlExtender.decode_info(grabInfo)
    if 'error' in grabInfo.keys():
        raise Exception('error while trying to fetch info: %s' %
                        grabInfo['error'])
    if grabInfo['type'] == 'iframe':
        target = grabInfo['target']
        if target.startswith('//'):
            target = "%s:%s" % (url_info.scheme, target)

        return load_video_from_url(http.add_referer_url(target, url))
    elif grabInfo['type'] == 'direct':
        return __9anime_extract_direct(url, grabInfo)

    raise Exception('Unknown case, please report')
def __extract_swf_player(url, content, referer=None):
    domain = __extract_js_var(content, "flashvars\.domain")
    assert domain is not "undefined"

    key = __extract_js_var(content, "flashvars\.filekey")
    filename = __extract_js_var(content, "flashvars\.file")
    cid, cid2, cid3 = ("undefined", "undefined", "undefined")
    user, password = ("undefined", "undefined")

    data = {
            'key': key,
            'file': filename,
            'cid': cid,
            'cid2': cid2,
            'cid3': cid3,
            'pass': password,
            'user': user,
            'numOfErrors': "0"
    }
    token_url = "%s/api/player.api.php?%s" % (domain, urllib.urlencode(data))

    video_info_res = http.send_request(http.add_referer_url(token_url, url), set_request=__set_flash(url))
    video_info = dict(urlparse.parse_qsl(video_info_res.text))

    if video_info.has_key("error_msg"):
        print "[*] Error Message: %s" % (video_info["error_msg"])
    if not video_info.has_key("url"):
        return None
    return video_info['url']
示例#4
0
def __extract_swf_player(url, content, referer=None):
    domain = __extract_js_var(content, "flashvars\.domain")
    assert domain is not "undefined"

    key = __extract_js_var(content, "flashvars\.filekey")
    filename = __extract_js_var(content, "flashvars\.file")
    cid, cid2, cid3 = ("undefined", "undefined", "undefined")
    user, password = ("undefined", "undefined")

    data = {
        'key': key,
        'file': filename,
        'cid': cid,
        'cid2': cid2,
        'cid3': cid3,
        'pass': password,
        'user': user,
        'numOfErrors': "0"
    }
    token_url = "%s/api/player.api.php?%s" % (domain, urllib.urlencode(data))

    video_info_res = http.send_request(http.add_referer_url(token_url, url),
                                       set_request=__set_flash(url))
    video_info = dict(urlparse.parse_qsl(video_info_res.text))

    if video_info.has_key("error_msg"):
        print "[*] Error Message: %s" % (video_info["error_msg"])
    if not video_info.has_key("url"):
        return None
    return video_info['url']
示例#5
0
def __check_video_list(refer_url,
                       vidlist,
                       add_referer=False,
                       ignore_cookie=False):
    nlist = []
    for item in vidlist:
        try:
            item_url = item[1]
            if add_referer:
                item_url = http.add_referer_url(item_url, refer_url)

            temp_req = http.head_request(item_url)
            if temp_req.status_code != 200:
                print "[*] Skiping Invalid Url: %s - status = %d" % (
                    item[1], temp_req.status_code)
                continue  # Skip Item.

            out_url = temp_req.url
            if ignore_cookie:
                out_url = http.strip_cookie_url(out_url)

            nlist.append((item[0], out_url))
        except Exception, e:
            # Just don't add source.
            pass
示例#6
0
def __extract_mycloud(url, content):
    strip_res = lambda x: x.split("/")[-1].split(".")[0]
    formatUrls = lambda x: (strip_res(x), http.add_referer_url(x, url))
    fixHttp = lambda x: ("https://" + x) if not x.startswith("http") else x

    playlist_urls = re.findall("\"file\"\s*:\s*\"\/*(.+)\"", content)
    playlist_urls = map(fixHttp, playlist_urls)
    playlist_entries_full = map(formatUrls, playlist_urls)

    return __check_video_list(url, playlist_entries_full, True, True)
def __extract_mycloud(url, content):
    playlist_url = re.findall("\"file\"\s*:\s*\"\/*(.+)\"", content)[0]
    if not playlist_url.startswith("http"):
        playlist_url = "https://" + playlist_url

    joinUrls = lambda x: (x[0], urlparse.urljoin(playlist_url, x[1]).rstrip())
    playlist_content = http.send_request(
        http.add_referer_url(playlist_url, url)).text
    playlist_entries = re.findall("=\d*x(\d*)\n*([^#]*)\n*#?",
                                  playlist_content)
    playlist_entries_full = map(joinUrls, playlist_entries)
    return __check_video_list(url, playlist_entries_full, True, True)
示例#8
0
    def playSource():
        reqObj = http.send_request(http.add_referer_url(raw_url, ""))
        if reqObj.status_code != 200:
            raise Exception("Error from server %d" % reqObj.status_code)

        results = VIDEO_RE.findall(reqObj.text)
        if not results:
            results = VIDEO_RE_NEW.findall(reqObj.text)
        if not results:
            raise Exception("Unable to find source")

        return results[0]
    def playSource():
        reqObj = http.send_request(http.add_referer_url(raw_url, referer))
        if reqObj.status_code != 200:
            raise Exception("Error from server %d" % reqObj.status_code)

        results = VIDEO_RE.findall(reqObj.text)
        if not results:
            results = VIDEO_RE_NEW.findall(reqObj.text)
        if not results:
            raise Exception("Unable to find source")

        return results[0]
def __extract_9anime(url, page_content, referer=None):
    url_info = urlparse.urlparse(url)
    episode_id = url_info.path.rsplit('/', 1)[-1]
    qs = dict(urlparse.parse_qsl(url_info.query))
    domain = url_info.netloc
    scheme = url_info.scheme

    url_base = "%s://%s" % (scheme, domain)

    ts_value = NineAnimeUrlExtender.get_ts_value(page_content)
    if not qs.has_key("server_id"):
        raise Exception('missing server id')

    server_id = int(qs["server_id"])

    #extra_param = NineAnimeUrlExtender.get_extra_url_parameter(episode_id, server_id, ts_value)
    extra_param = _9ANIME_EXTRA_PARAM

    tryNo = 0
    ep_info_url = "%s/ajax/episode/info?ts=%s&_=%s&id=%s&server=%d" % \
    (url_base, ts_value, extra_param, episode_id, server_id)

    while True:
        time.sleep(0.3)
        urlRequest = http.send_request(ep_info_url)
        grabInfo = json.loads(urlRequest.text)
        grabInfo = NineAnimeUrlExtender.decode_info(grabInfo)
        if 'error' in grabInfo.keys():
            if tryNo < 2:
                tryNo += 1
                retry = "true" if tryNo == 2 else "false"
                ep_info_url = __9anime_retry(episode_id, server_id, retry)
                continue

            raise Exception('error while trying to fetch info: %s' %
                            grabInfo['error'])

        break

    if grabInfo['type'] == 'iframe':
        target = grabInfo['target']
        if target.startswith('//'):
            target = "%s:%s" % (url_info.scheme, target)

        return load_video_from_url(http.add_referer_url(target, url))
    elif grabInfo['type'] == 'direct':
        return __9anime_extract_direct(url, grabInfo)

    raise Exception('Unknown case, please report')
def __extract_9anime(url, page_content, referer=None):
    url_info = urlparse.urlparse(url)
    episode_id = url_info.path.rsplit('/', 1)[-1]
    qs = dict(urlparse.parse_qsl(url_info.query))
    domain = url_info.netloc
    scheme = url_info.scheme

    url_base = "%s://%s" % (scheme, domain)

    ts_value = NineAnimeUrlExtender.get_ts_value(page_content)
    if not qs.has_key("server_id"):
        raise Exception('missing server id')

    server_id = int(qs["server_id"])

    #extra_param = NineAnimeUrlExtender.get_extra_url_parameter(episode_id, server_id, ts_value)
    extra_param = _9ANIME_EXTRA_PARAM

    tryNo = 0
    ep_info_url = "%s/ajax/episode/info?ts=%s&_=%s&id=%s&server=%d" % \
    (url_base, ts_value, extra_param, episode_id, server_id)

    while True:
        time.sleep(0.3)
        urlRequest = http.send_request(ep_info_url)
        grabInfo = json.loads(urlRequest.text)
        grabInfo = NineAnimeUrlExtender.decode_info(grabInfo)
        if 'error' in grabInfo.keys():
            if tryNo < 2:
                tryNo += 1
                retry = "true" if tryNo == 2 else "false"
                ep_info_url = __9anime_retry(episode_id, server_id, retry)
                continue

            raise Exception('error while trying to fetch info: %s' %
                            grabInfo['error'])

        break

    if grabInfo['type'] == 'iframe':
        target = grabInfo['target']
        if target.startswith('//'):
            target = "%s:%s" % (url_info.scheme, target)

        return load_video_from_url(http.add_referer_url(target, url))
    elif grabInfo['type'] == 'direct':
        return __9anime_extract_direct(url, grabInfo)

    raise Exception('Unknown case, please report')
def __extract_mycloud(url, content, referer=None):
    fixHttp = lambda x: ("https://" + x) if not x.startswith("http") else x
    strip_res = lambda x: x.split("/")[-1].split(".")[0]
    formatUrls = lambda x: (strip_res(x), http.add_referer_url(x, url))

    # Get m3u of all res
    m3u_list = re.findall("\"file\"\s*:\s*\"\/*(.+)\"", content)
    m3u_list = map(fixHttp, m3u_list)
    m3u_list = m3u_list[0]

    # Read and parse all res
    playlist_req = http.send_request(http.add_referer_url(m3u_list, url))
    if playlist_req.status_code != 200:
        raise Exception("Error from server %d" % playlist_req.status_code)

    playlist_content = playlist_req.text
    playlist_content = map(lambda x: x.strip(), playlist_content.split("\n"))
    playlist_content = filter(lambda x: not x.startswith("#") and len(x), playlist_content)

    # Build source urls
    sources = map(lambda x: __relative_url(m3u_list, x), playlist_content)
    sources = map(formatUrls, sources)

    return __check_video_list(url, sources, True, True)
    def f(url, content, referer=None):
        if debug:
            print url
            print content
            print compiled_regex.findall(content)
            raise

        try:
            regex_url = compiled_regex.findall(content)[match]
            regex_url = __relative_url(url, regex_url)
            if double_ref:
                video_url = utils.head_request(http.add_referer_url(regex_url, url)).url
            else:
                video_url = __relative_url(regex_url, regex_url)
            return video_url
        except Exception, e:
            print "[*E*] Failed to load link: %s: %s" % (url, e)
            return None
    def f(url, content, referer=None):
        if debug:
            print url
            print content
            print compiled_regex.findall(content)
            raise

        try:
            regex_url = compiled_regex.findall(content)[match]
            regex_url = __relative_url(url, regex_url)
            if double_ref:
                video_url = utils.head_request(http.add_referer_url(regex_url, url)).url
            else:
                video_url = __relative_url(regex_url, regex_url)
            return video_url
        except Exception, e:
            print "[*E*] Failed to load link: %s: %s" % (url, e)
            return None
def __9anime_extract_direct(refer_url, grabInfo):
    # grabInfo['grabber'] sometimes ands with /?server=23 so we have to concat with & instead of ?
    url_parameter_concat = "&" if "?" in grabInfo['grabber'] else "?"
    url = "%s%s%s" % (grabInfo['grabber'], url_parameter_concat, urllib.urlencode(grabInfo['params']))
    url = __relative_url(refer_url, url)
    resp = json.loads(http.send_request(http.add_referer_url(url, refer_url)).text)

    possible_error = resp['error'] if 'error' in resp.keys() else 'No-Error-Set'
    if 'data' not in resp.keys():
        if possible_error == 'deleted':
            return None
        raise Exception('Failed to parse 9anime direct with error: %s' %
                        resp['error'] if 'error' in resp.keys() else
                        'No-Error-Set')

    if 'error' in resp.keys() and resp['error']:
        print '[*E*] Failed to parse 9anime direct but got data with error: %s' % resp['error']

    return __check_video_list(refer_url, map(lambda x: (x['label'], x['file']), resp['data']))
def __9anime_extract_direct(refer_url, grabInfo):
    # grabInfo['grabber'] sometimes ands with /?server=23 so we have to concat with & instead of ?
    url_parameter_concat = "&" if "?" in grabInfo['grabber'] else "?"
    url = "%s%s%s" % (grabInfo['grabber'], url_parameter_concat, urllib.urlencode(grabInfo['params']))
    url = __relative_url(refer_url, url)
    resp = json.loads(http.send_request(http.add_referer_url(url, refer_url)).text)

    possible_error = resp['error'] if 'error' in resp.keys() else 'No-Error-Set'
    if 'data' not in resp.keys():
        if possible_error == 'deleted':
            return None
        raise Exception('Failed to parse 9anime direct with error: %s' %
                        resp['error'] if 'error' in resp.keys() else
                        'No-Error-Set')

    if 'error' in resp.keys() and resp['error']:
        print '[*E*] Failed to parse 9anime direct but got data with error: %s' % resp['error']

    return __check_video_list(refer_url, map(lambda x: (x['label'], x['file']), resp['data']))
def __check_video_list(refer_url, vidlist, add_referer=False,
                       ignore_cookie=False):
    nlist = []
    for item in vidlist:
        try:
            item_url = item[1]
            if add_referer:
                item_url = http.add_referer_url(item_url, refer_url)

            temp_req = http.head_request(item_url)
            if temp_req.status_code != 200:
                print "[*] Skiping Invalid Url: %s - status = %d" % (item[1],
                                                             temp_req.status_code)
                continue # Skip Item.

            out_url = temp_req.url
            if ignore_cookie:
                out_url = http.strip_cookie_url(out_url)

            nlist.append((item[0], out_url))
        except Exception, e:
            # Just don't add source.
            pass
示例#18
0
 def playSource():
     playUrl = http.add_referer_url("%s&q=%s" % (url, label), referer)
     reqObj = http.send_request(playUrl)
     return VIDEO_RE.findall(reqObj.text)[0]