예제 #1
0
def get_band_data_by_url(url):
    body = load_url(url)
    band_data = re.search("var BandData = ({.*?})[,;]\n", body, re.S)

    band_data = demjson.decode(band_data.group(1))

    return band_data
예제 #2
0
def get_band_data_by_url(url):
    body = load_url(url)
    band_data = re.search("var BandData = ({.*?})[,;]\n", body, re.S)

    band_data = demjson.decode(band_data.group(1))

    return band_data
예제 #3
0
def get_album_data_by_url(url):
    body = load_url(url)
    m = re.search("var TralbumData = .*?current: ({.*}),\n.*?(is_preorder.*?)trackinfo ?:", body, re.S)
    data = json.loads(m.group(1))

    data.update(demjson.decode("{%s}" % (m.group(2),)))

    return data
예제 #4
0
def get_album_data_by_url(url):
    body = load_url(url)
    m = re.search(
        "var TralbumData = .*?current: ({.*}),\n.*?(is_preorder.*?)trackinfo ?:",
        body, re.S)
    data = json.loads(m.group(1))
    data.update(demjson.decode("{%s}" % (m.group(2), )))

    return data
def resolve(url):
    data = re.search(r'<script[^\.]+?\.setup\(([^\)]+?)\);', util.request(url), re.I | re.S)
    if data:
        data = demjson.decode(data.group(1).decode('string_escape'))
        if 'sources' in data:
            result = []
            for source in data['sources']:
                result.append({'url': source['file'], 'quality': source['label']})
            return result
    return None
def resolve(url):
    cookies = {}
    result = []
    util.init_urllib(cookies)
    id = re.search(r'.*player/flash/(?P<url>.+)', url).group('url')
    r = util.request('http://myvi.ru/player/api/Video/Get/%s?sig' % id)
    jsondata = demjson.decode(r)
    playlist = jsondata['sprutoData']['playlist'][0]
    uuid = pickle.loads(
        util._cookie_jar.dump())['.myvi.ru']['/']['UniversalUserID']
    for f in playlist['video']:
        streamurl = f['url']
        streamurl += '|Cookie=UniversalUserID%3D' + urllib.parse.quote(
            uuid.value)
        streamurl += '&User-Agent=' + UA
        result.append({'url': streamurl})
    if result:
        return result
    else:
        return None
예제 #7
0
def resolve(url):
    realurl = re.search(r'<iframe src="([^"]+)".*', util.request(url),
                        re.I | re.S).group(1)
    data = re.search(r'<script[^\.]+?\.setup\((.+?)\);', util.request(realurl),
                     re.I | re.S)
    if data:
        data = data.group(1).decode('string_escape')
        data = re.sub(r'\w+\(([^\)]+?)\)', r'\1', data)  # Strip JS functions
        data = re.sub(r': *([^"][a-zA-Z]+)', r':"\1"',
                      data)  # Fix incorrect JSON
        data = demjson.decode(data)
        if 'sources' in data:
            result = []
            for source in data['sources']:
                if 'tracks' in data:
                    for track in data['tracks']:
                        result.append({
                            'url': source['file'],
                            'subs': track['file'],
                            'lang': ' %s subtitles' % track['label']
                        })
            return result
    return None
예제 #8
0
def _js_to_obj(code):
    return demjson.decode(code)
예제 #9
0
def topic1():
    videos = []
    try:
        idx = content.index('Flash Player 720p HD Quality Online Links')

        if idx != -1:
            links_content = content[idx + 1].split('\n')
            print links_content
            for link_content in links_content:
                idx1 = link_content.find('[URL=')
                idx2 = link_content.find(']')
                url = link_content[idx1 + 5: idx2]

                idx3 = link_content.find('[', idx2)
                name = link_content[idx2 + 1: idx3]

                print name, url

                soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar))
                json_url = dict(soup.find('script', {'src': '//cdn.playwire.com/bolt/js/embed.min.js'}).attrs)['data-config']

                json_data = json.loads(h.make_request(json_url, cookie_file, cookie_jar))
                poster = json_data['poster']
                src = json_data['src']
                soup = BeautifulSoup(h.make_request(src, cookie_file, cookie_jar))
                base_url = soup.find('baseurl').text
                media_node = soup.find('media')
                media_url = dict(media_node.attrs)['url']
                video_url = '%s/%s' % (base_url, media_url)

                videos.append({'url': video_url, 'thumbnail': poster, 'name': name})
                h.add_dir_video(addon_handle, name, video_url, poster, '')
    except Exception as e:
        videos = []
        links_content = ''
        print 'Flash Player', e

    print videos
    if not videos:
        try:
            idx = content.index('Letwatch 720p HD Quality Online Links')

            if idx != -1:
                links_content = content[idx + 1].split('\n')
                print links_content
                for link_content in links_content:
                    idx1 = link_content.find('[URL=')
                    idx2 = link_content.find(']')
                    url = link_content[idx1 + 5: idx2]

                    idx3 = link_content.find('[', idx2)
                    name = link_content[idx2 + 1: idx3]

                    print name, url

                    soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar))
                    div = h.bs_find_with_class(soup, 'div', 'entry-content')
                    divs = div.findAll('div', recursive=False)
                    src = dict(divs[1].find('iframe').attrs)['src']

                    print src
                    soup = BeautifulSoup(h.make_request(src, cookie_file, cookie_jar))
                    scripts = soup.findAll('script')
                    rgx = re.compile(ur'.*}\(\'(.*)\',([0-9]+),([0-9]+),\'(.*)\'\.split.*')
                    for script in scripts:
                        if script.text.startswith('eval'):
                            groups = re.search(rgx, script.text).groups()
                            p = groups[0]
                            base = int(groups[1])
                            c = int(groups[2])
                            k = groups[3].split('|')

                            for x in reversed(xrange(0, c)):
                                if k[x]:
                                    p = re.sub(r'\b%s\b' % h.int2base(x, base), k[x], p)

                            arr = p.split(';')
                            data_str = arr[0][26:-1]
                            data = demjson.decode(data_str.replace("\\", ""))
                            video_url = ''
                            video_type = ''
                            for source in data['sources']:
                                if not video_url:
                                    video_url = source['file']
                                    video_type = source['label']
                                else:
                                    if source['label'] == 'HD':
                                        video_url = source['file']
                                        video_type = source['label']

                            print video_type, video_url

                            poster = ''
                            videos.append({'url': video_url, 'thumbnail': poster, 'name': name})
                            h.add_dir_video(addon_handle, name, video_url, poster, '')
        except Exception as e:
            videos = []
            print 'LetWatch 720', e

    print videos
    if not videos:
        try:
            idx = content.index('Vidto Link')

            if idx != -1:
                links_content = content[idx + 1].split('\n')
                print links_content
                for link_content in links_content:
                    idx1 = link_content.find('[URL=')
                    idx2 = link_content.find(']')
                    url = link_content[idx1 + 5: idx2]

                    idx3 = link_content.find('[', idx2)
                    name = link_content[idx2 + 1: idx3]

                    print name, url

                    soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar))
                    if len(soup.findChildren()) == 1:
                        meta = soup.find('meta', attrs={'http-equiv': 'refresh'})
                        if meta:
                            c = dict(meta.attrs)['content']
                            idx4 = c.find('URL=')
                            if idx4 != -1:
                                url = c[idx4 + 4:]
                                print url
                                soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar))

                    div = soup.find('div', {'id': 'content'})
                    src = dict(div.find('table').find('iframe').attrs)['src']
                    print src

                    soup = BeautifulSoup(h.make_request(src, cookie_file, cookie_jar))
                    div = soup.find('body').find("div", {"id": "player_code"})
                    script = None
                    scripts = div.findAll('script')
                    rgx = re.compile(ur'.*}\(\'(.*)\',([0-9]+),([0-9]+),\'(.*)\'\.split.*')
                    for s in scripts:
                        if s.text.startswith('eval'):
                            script = s
                            break

                    groups = re.search(rgx, script.text).groups()
                    p = groups[0]
                    base = int(groups[1])
                    c = int(groups[2])
                    k = groups[3].split('|')

                    for x in reversed(xrange(0, c)):
                        if k[x]:
                            p = re.sub(r'\b%s\b' % h.int2base(x, base), k[x], p)

                    idx5 = p.find('hd:[')
                    idx6 = p.find(']')
                    q = p[idx5 + 3:idx6 + 1]
                    j = demjson.decode(q)
                    print j

                    video_url = ''
                    size = 0
                    for _j in j:
                        s = _j['label'][:-1]
                        if s > size:
                            size = s
                            video_url = _j['file']

                    print video_url
                    if video_url:
                        poster = ''
                        videos.append({'url': video_url, 'thumbnail': poster, 'name': name})
                        h.add_dir_video(addon_handle, name, video_url, poster, '')
                return
        except Exception as e:
            videos = []
            print 'Vidto', e

    print videos
    if not videos:
        try:
            idx = content.index('Watchvideo Link')

            if idx != -1:
                links_content = content[idx + 1].split('\n')
                print links_content
                for link_content in links_content:
                    idx1 = link_content.find('[URL=')
                    idx2 = link_content.find(']')
                    url = link_content[idx1 + 5: idx2]

                    idx3 = link_content.find('[', idx2)
                    name = link_content[idx2 + 1: idx3]

                    print name, url

                    soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar))
                    if len(soup.findChildren()) == 1:
                        meta = soup.find('meta', attrs={'http-equiv': 'refresh'})
                        if meta:
                            c = dict(meta.attrs)['content']
                            idx4 = c.find('URL=')
                            if idx4 != -1:
                                url = c[idx4 + 4:]
                                print url
                                soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar))

                    div = soup.find('div', {'id': 'content'})
                    src = dict(div.find('table').find('iframe').attrs)['src']
                    print src

                    soup = BeautifulSoup(h.make_request(src, cookie_file, cookie_jar))
                    script = soup.find('body').find('script', recursive=False)

                    idx5 = script.text.find('sources: ')
                    idx6 = script.text.find(']')
                    j = demjson.decode(script.text[idx5 + 9:idx6 + 1])

                    video_url = ''
                    for _j in j:
                        if 'label' not in _j:
                            video_url = _j['file']

                    if video_url:
                        poster = ''
                        videos.append({'url': video_url, 'thumbnail': poster, 'name': name})
                        h.add_dir_video(addon_handle, name, video_url, poster, '')
        except Exception as e:
            videos = []
            print 'Watchvideo', e

    print videos
    if not videos:
        try:
            idx = content.index('Letwatch Link')

            if idx != -1:
                links_content = content[idx + 1].split('\n')
                print links_content
                for link_content in links_content:
                    idx1 = link_content.find('[URL=')
                    idx2 = link_content.find(']')
                    url = link_content[idx1 + 5: idx2]

                    idx3 = link_content.find('[', idx2)
                    name = link_content[idx2 + 1: idx3]

                    print name, url

                    soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar))
                    if len(soup.findChildren()) == 1:
                        meta = soup.find('meta', attrs={'http-equiv': 'refresh'})
                        if meta:
                            c = dict(meta.attrs)['content']
                            idx4 = c.find('URL=')
                            if idx4 != -1:
                                url = c[idx4 + 4:]
                                print url
                                soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar))

                    div = soup.find('div', {'id': 'content'})
                    src = dict(div.find('table').find('iframe').attrs)['src']
                    print src

                    soup = BeautifulSoup(h.make_request(src, cookie_file, cookie_jar))
                    scripts = soup.findAll('script')
                    rgx = re.compile(ur'.*}\(\'(.*)\',([0-9]+),([0-9]+),\'(.*)\'\.split.*')
                    for script in scripts:
                        if script.text.startswith('eval'):
                            groups = re.search(rgx, script.text).groups()
                            p = groups[0]
                            base = int(groups[1])
                            c = int(groups[2])
                            k = groups[3].split('|')

                            for x in reversed(xrange(0, c)):
                                if k[x]:
                                    p = re.sub(r'\b%s\b' % h.int2base(x, base), k[x], p)

                            arr = p.split(';')
                            data_str = arr[0][26:-1]
                            data = demjson.decode(data_str.replace("\\", ""))
                            video_url = ''
                            video_type = ''
                            for source in data['sources']:
                                if not video_url:
                                    video_url = source['file']
                                    video_type = source['label']
                                else:
                                    if source['label'] == 'HD':
                                        video_url = source['file']
                                        video_type = source['label']

                            print video_type, video_url

                            poster = ''
                            videos.append({'url': video_url, 'thumbnail': poster, 'name': name})
                            h.add_dir_video(addon_handle, name, video_url, poster, '')
        except Exception as e:
            videos = []
            print 'LetWatch', e

    print videos
    if not videos:
        try:
            idx = content.index('Watch Online - Flash')

            if idx != -1:
                links_content = content[idx + 1].split('\n')
                print links_content

                for link_content in links_content:
                    idx1 = link_content.find('[URL=')
                    idx2 = link_content.find(']')
                    url = link_content[idx1 + 5: idx2]

                    idx3 = link_content.find('[', idx2)
                    name = link_content[idx2 + 1: idx3]

                    print name, url

                    soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar))
                    script = soup.find('script', {'src': '//cdn.playwire.com/bolt/js/zeus/embed.js'})

                    if script:
                        data_config = dict(script.attrs)['data-config']
                        json_info = json.loads(h.make_request(data_config, cookie_file, cookie_jar))

                        poster = json_info['content']['poster']

                        f4m = json_info['content']['media']['f4m']

                        soup = BeautifulSoup(h.make_request(f4m, cookie_file, cookie_jar))
                        base_url = soup.find('baseurl').text

                        media_url = None
                        bitrate = 0
                        medias = soup.findAll('media')
                        for m in medias:
                            attrs = dict(m.attrs)
                            br = int(attrs['bitrate'])
                            if br > bitrate:
                                media_url = attrs['url']
                                bitrate = br

                        video_url = '%s/%s' % (base_url, media_url)
                        videos.append({'url': video_url, 'thumbnail': poster, 'name': name})
                        h.add_dir_video(addon_handle, name, video_url, poster, '')
        except Exception as e:
            videos = []
            print 'Watch Online - Flash', e

    print videos
    if not videos:
        import pprint
        pprint.pprint(content)