Esempio n. 1
0
def episode():
    url = h.extract_var(args, 'url')

    name = h.extract_var(args, 'name')

    soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar))

    div = h.bs_find_with_class(soup, 'div', 'video-player')

    scripts = div.findAll('script')
    script = None
    for _script in scripts:
        if 'CryptoJS' in _script.text:
            script = _script
            break

    url = ''
    if script:
        script_text = script.text
        _dailytoday = ''
        _subject = ''

        parser = Parser()
        tree = parser.parse(script.text)
        for node in tree.children():
            ecma = node.to_ecma()
            if ecma.startswith('var dailytoday ='):
                _dailytoday = node.children()[0].children()[1].to_ecma()[1:-1]
            elif ecma.startswith('var subject ='):
                _subject = node.children()[0].children()[1].to_ecma()[1:-1]
            # elif "var bigmumbai = " not in ecma and "bigmumbai = " in ecma:
            #     print ecma

        if _dailytoday and _subject:
            url = decrypt.decrypt_url(_dailytoday, _subject)
        else:
            url = script.text.split('bigmumbai = ', 2)[2].split(';')[0][1:-1]

        print url
        plot = h.bs_find_with_class(soup, 'div', 'vp-info').find('span', {'itemprop': 'description'}).text
        thumbnail = soup.find('div', {'itemprop': 'video'}).find('meta', {'itemprop': 'thumbnailUrl'})['content']
        h.add_dir_video(addon_handle, name, url, thumbnail, plot)
    else:
        iframe = div.find('iframe')
        if iframe:
            attrs = dict(iframe.attrs)
            youtube_url = attrs['src']
            print youtube_url
            video_id = urlparse.urlparse(youtube_url).path.replace('/embed/', '')
            url = 'plugin://plugin.video.youtube/play/?video_id=%s' % video_id
            h.add_dir_video(addon_handle, name, url, '', '')
Esempio n. 2
0
def show():
    url = h.extract_var(args, 'url')

    url = '%svideo/' % (url)

    soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar))

    info_div = h.bs_find_with_class(soup, 'div', 'video-n-info-wrap')

    pagination = h.bs_find_with_class(info_div, 'ul', 'pagination')
    pages = {
        'prev': [],
        'next': []
    }
    if pagination:
        page_type = 'prev'
        pages_li = pagination.findAll('li')[1:-1]
        for li in pages_li:
            attrs = dict(li.attrs)
            if 'class' in attrs and attrs['class'] == 'active':
                page_type = 'next'
            else:
                a = li.find('a')
                a_attrs = dict(a.attrs)
                pages[page_type].append({
                    'href': a_attrs['href'],
                    'page': a.text
                })

    for page in pages['prev']:
        h.add_dir(addon_handle, base_url, '<< Page %s' % page['page'], page['href'], 'show')

    related_div = h.bs_find_with_class(info_div, 'div', 'related-videos')
    ul = related_div.find('ul')
    for li in ul.findAll('li'):
        a = li.find('a')
        a_attrs = dict(a.attrs)
        href = a_attrs['href']
        # if href.endswith('-full-episode.html'):
        h.add_dir(addon_handle, base_url, a_attrs['title'], href, 'episode', dict(a.find('img').attrs)['src'])

    for page in pages['next']:
        h.add_dir(addon_handle, base_url, '>> Page %s' % page['page'], page['href'], 'show')
Esempio n. 3
0
def archive_shows():
    url = h.extract_var(args, 'url')

    soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar))

    ul = h.bs_find_with_class(soup, 'ul', 'archive-shows')

    for li in ul.findAll('li'):
        a = li.find('a')
        a_attrs = dict(a.attrs)
        h.add_dir(addon_handle, base_url, a_attrs['title'], a_attrs['href'], 'show')