def listing(): listing_info = f.get_listing(h.extract_var(args, 'url')) for info in listing_info['listing']: title = info['title'] if info['quality'] is not None: title += ' ({0})'.format(info['quality']) h.add_dir(addon_handle, base_url, title, info['movie_id'], 'movie_servers', info['image'], info['image'])
def archive_shows(): url = h.extract_var(args, 'url') soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar)) ul = h.bs_find_with_class(soup, 'ul', 'archive-shows') for li in ul.findAll('li'): a = li.find('a') a_attrs = dict(a.attrs) h.add_dir(addon_handle, base_url, a_attrs['title'], a_attrs['href'], 'show')
def current_shows(): url = h.extract_var(args, 'url') soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar)) h2s = h.bs_find_all_with_class(soup, 'h2', 'bubble-title') # XXX: If want sorted # import operator # shows = {} # shows[a_attrs['href']] = a_attrs['title'] # shows = sorted(shows.items(), key=operator.itemgetter(1)) # XXX: View mode thumbnail supported in xbmcswift2 h2 = None for h2 in h2s: if h2.text == 'Current Shows': for li in h2.findNext('ul').findAll('li'): a = li.find('a') a_attrs = dict(a.attrs) img_src = dict(a.find('img').attrs)['src'] h.add_dir(addon_handle, base_url, a_attrs['title'], a_attrs['href'], 'show', img_src, img_src) break
def show(): url = h.extract_var(args, 'url') url = '%svideo/' % (url) soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar)) info_div = h.bs_find_with_class(soup, 'div', 'video-n-info-wrap') pagination = h.bs_find_with_class(info_div, 'ul', 'pagination') pages = { 'prev': [], 'next': [] } if pagination: page_type = 'prev' pages_li = pagination.findAll('li')[1:-1] for li in pages_li: attrs = dict(li.attrs) if 'class' in attrs and attrs['class'] == 'active': page_type = 'next' else: a = li.find('a') a_attrs = dict(a.attrs) pages[page_type].append({ 'href': a_attrs['href'], 'page': a.text }) for page in pages['prev']: h.add_dir(addon_handle, base_url, '<< Page %s' % page['page'], page['href'], 'show') related_div = h.bs_find_with_class(info_div, 'div', 'related-videos') ul = related_div.find('ul') for li in ul.findAll('li'): a = li.find('a') a_attrs = dict(a.attrs) href = a_attrs['href'] # if href.endswith('-full-episode.html'): h.add_dir(addon_handle, base_url, a_attrs['title'], href, 'episode', dict(a.find('img').attrs)['src']) for page in pages['next']: h.add_dir(addon_handle, base_url, '>> Page %s' % page['page'], page['href'], 'show')
def main_index(): soup = BeautifulSoup(h.make_request(SITE_ROOT, cookie_file, cookie_jar)) scripts = soup.find('body').findAll('script', {'src': None}) script = None for s in scripts: if 'var sn = ' in s.text: script = s script_text = script.text idx1 = script_text.find('var sn = ') idx2 = script_text[idx1:].find(';') sn = script_text[idx1 + 10:idx1 + idx2 - 1] script_text = script_text[idx1 + idx2 - 1:] idx1 = script_text.find('$.ajax({url:') idx2 = script_text[idx1:].find(',') lb_url = script_text[idx1 + 14:idx1 + idx2 - 1] lb_info = h.make_request(lb_url, cookie_file, cookie_jar) lb = lb_info.split('=')[1] script_text = script_text[idx1 + idx2 - 1:] idx1 = script_text.find('function showChannels (){') idx2 = script_text[idx1:].find('data = ') idx1 = idx1 + idx2 idx2 = script_text[idx1:].find("'") idx1 = idx1 + idx2 + 1 idx2 = script_text[idx1:].find("'") u = script_text[idx1: idx1 + idx2] + sn idx1 = script_text.find('function createVideo(') idx2 = script_text[idx1:].find('url = ') idx1 = idx1 + idx2 + 7 idx2 = script_text[idx1:].find(";") _s_u = script_text[idx1: idx1 + idx2 - 1].split('"') channels_info = demjson.decode(h.make_request(u, cookie_file, cookie_jar)) for channel_info in channels_info['channelsList']: caption = channel_info['caption'] channel_name = channel_info['channelName'].strip('\n') name = '%s (%s)' % (caption, channel_name) if 'streamsList' in channel_info and len(channel_info['streamsList']) > 0: streams_info = [] for stream in channel_info['streamsList']: s_u = list(_s_u) s_u[1] = lb s_u[3] = stream['streamName'] s_u[5] = str(stream['streamId']) stream_info = { 'stream_caption': stream['caption'], 'stream_url': ''.join(s_u), } streams_info.append(json.dumps(stream_info)) h.add_dir(addon_handle, base_url, name, urllib.urlencode({'streams_info': streams_info}), 'streams') else: # offline h.add_dir_video(addon_handle, name, '', '', '')
def movie_servers(): for info in f.get_movie_servers(h.extract_var(args, 'url')): h.add_dir(addon_handle, base_url, info['title'], '{0},{1}'.format(info['info'][0], info['info'][1]), 'movie')
def menu_options(): for option in f.get_menu_options(h.extract_var(args, 'name')): h.add_dir(addon_handle, base_url, option['title'], option['url'], 'listing')
def main_index(): for option in f.get_main_menu_options(): if not option['url']: h.add_dir(addon_handle, base_url, option['title'], option['url'], 'menu_options') else: h.add_dir(addon_handle, base_url, option['title'], option['url'], 'listing')
def main_index(): h.add_dir(addon_handle, base_url, 'Current Shows', SHOWS_URL, 'CurrentShows') h.add_dir(addon_handle, base_url, 'Archive Shows', SHOWS_URL, 'ArchiveShows')