def current_shows(): url = h.extract_var(args, 'url') soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar)) # XXX: If want sorted # import operator # shows = {} # shows[a_attrs['href']] = a_attrs['title'] # shows = sorted(shows.items(), key=operator.itemgetter(1)) # XXX: View mode thumbnail supported in xbmcswift2 h2 = soup.findAll('h2') for h2 in soup.findAll('h2'): if h2.text == 'Shows': for li in h2.nextSibling.find('ul').findAll('li'): a = li.find('a') a_attrs = dict(a.attrs) title = '%s (%s)' % ( h.bs_find_with_class(a, 'div', 'zc-show-title').text, h.bs_find_with_class(a, 'div', 'zc-air-time').text) img_src = dict(a.find('img').attrs)['src'] h.add_dir(addon_handle, base_url, title, '%s/video/' % a_attrs['href'], 'show', img_src, img_src) break
def current_shows(): url = h.extract_var(args, 'url') soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar)) h2 = soup.findAll('h2') for h2 in soup.findAll('h2'): if h2.text == 'Shows': for li in h2.nextSibling.find('ul').findAll('li'): a = li.find('a') a_attrs = dict(a.attrs) title = '%s (%s)' % ( h.bs_find_with_class(a, 'div', 'zc-show-title').text, h.bs_find_with_class(a, 'div', 'zc-air-time').text) img_src = dict(a.find('img').attrs)['src'] h.add_dir(addon_handle, base_url, title, '%s/video/' % a_attrs['href'], 'show', img_src, img_src) break
def movie(): url = h.extract_var(args, 'url') title = h.extract_var(args, 'name') thumb = '' resp = h.make_request(url, cookie_file, cookie_jar) soup = BeautifulSoup(resp) div = h.bs_find_with_class(soup, 'div', 'entry-content') video_url = urlresolver.resolve(dict(div.find('iframe').attrs)['src']) if video_url: h.add_dir_video(addon_handle, title, video_url, thumb, '')
def movies_list(): url = h.extract_var(args, 'url') resp = h.make_request(url, cookie_file, cookie_jar) soup = BeautifulSoup(resp) div = h.bs_find_with_class(soup, 'div', 'loop-content') post_divs = h.bs_find_all_with_class(div, 'div', 'item-post') for post_div in post_divs: thumb = dict(post_div.find('img').attrs)['src'] a = h.bs_find_with_class(post_div, 'h2', 'entry-title').find('a') title = a.text u = dict(a.attrs)['href'] h.add_dir(addon_handle, base_url, title, u, 'movie', thumbnail=thumb) div_nav = h.bs_find_with_class(soup, 'div', 'loop-nav') if div_nav: a_next = h.bs_find_with_class(div_nav, 'a', 'nextpostslink') if a_next: u = dict(a_next.attrs)['href'] title = 'Next >> (%s)' % h.bs_find_with_class(div_nav, 'span', 'pages').text h.add_dir(addon_handle, base_url, title, u, 'movies_list')
def current_shows(): url = h.extract_var(args, 'url') soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar)) h2 = soup.findAll('h2') for h2 in soup.findAll('h2'): if h2.text == 'Shows': for li in h2.nextSibling.find('ul').findAll('li'): a = li.find('a') a_attrs = dict(a.attrs) title = '%s (%s)' % (h.bs_find_with_class(a, 'div', 'zc-show-title').text, h.bs_find_with_class(a, 'div', 'zc-air-time').text) img_src = dict(a.find('img').attrs)['src'] h.add_dir(addon_handle, base_url, title, '%s/video/' % a_attrs['href'], 'show', img_src, img_src) break
def links(): import urlresolver links_info = json.loads(h.extract_var(args, 'elem_id')) for link in links_info: url = link['url'] print url resp = None try: resp = h.make_request(url, cookie_file, cookie_jar) soup = BeautifulSoup(resp) if len(soup.findChildren()) == 1: meta = soup.find('meta', attrs={'http-equiv': 'refresh'}) if meta: c = dict(meta.attrs)['content'] idx4 = c.find('URL=') if idx4 != -1: _url = c[idx4 + 4:] soup = BeautifulSoup(h.make_request(_url, cookie_file, cookie_jar)) div = soup.find('div', {'id': 'content'}) url = dict(div.find('table').find('iframe').attrs)['src'] else: div = h.bs_find_with_class(soup, 'div', 'entry-content') if div: divs = div.findAll('div', recursive=False) url = dict(divs[1].find('iframe').attrs)['src'] except urllib2.HTTPError as e: # Hack. Avast blocks first url. Only for WatchVideo currently if e.code == 403 and e.msg == 'Malicious content': up = urlparse.urlparse(url) id = urlparse.parse_qs(up.query)['id'][0] f = 0 if up.path == '/idowatch.php': url = 'http://vidfile.xyz/embed-%s-1280x720.html' % id f = 1 elif up.path == '/watchvideo.php': url = 'http://watchvideo2.us/embed-%s-1280x720.html' % id f = 1 elif up.path == '/playu.php': url = 'http://playu.me/embed-%s-1280x720.html' % id f = 1 if f: resp = h.make_request(url, cookie_file, cookie_jar) if resp: video_url = urlresolver.resolve(url) if video_url: h.add_dir_video(addon_handle, link['name'], video_url, '', '')
def topic1(): videos = [] try: idx = content.index('Flash Player 720p HD Quality Online Links') if idx != -1: links_content = content[idx + 1].split('\n') print links_content for link_content in links_content: idx1 = link_content.find('[URL=') idx2 = link_content.find(']') url = link_content[idx1 + 5: idx2] idx3 = link_content.find('[', idx2) name = link_content[idx2 + 1: idx3] print name, url soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar)) json_url = dict(soup.find('script', {'src': '//cdn.playwire.com/bolt/js/embed.min.js'}).attrs)['data-config'] json_data = json.loads(h.make_request(json_url, cookie_file, cookie_jar)) poster = json_data['poster'] src = json_data['src'] soup = BeautifulSoup(h.make_request(src, cookie_file, cookie_jar)) base_url = soup.find('baseurl').text media_node = soup.find('media') media_url = dict(media_node.attrs)['url'] video_url = '%s/%s' % (base_url, media_url) videos.append({'url': video_url, 'thumbnail': poster, 'name': name}) h.add_dir_video(addon_handle, name, video_url, poster, '') except Exception as e: videos = [] links_content = '' print 'Flash Player', e print videos if not videos: try: idx = content.index('Letwatch 720p HD Quality Online Links') if idx != -1: links_content = content[idx + 1].split('\n') print links_content for link_content in links_content: idx1 = link_content.find('[URL=') idx2 = link_content.find(']') url = link_content[idx1 + 5: idx2] idx3 = link_content.find('[', idx2) name = link_content[idx2 + 1: idx3] print name, url soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar)) div = h.bs_find_with_class(soup, 'div', 'entry-content') divs = div.findAll('div', recursive=False) src = dict(divs[1].find('iframe').attrs)['src'] print src soup = BeautifulSoup(h.make_request(src, cookie_file, cookie_jar)) scripts = soup.findAll('script') rgx = re.compile(ur'.*}\(\'(.*)\',([0-9]+),([0-9]+),\'(.*)\'\.split.*') for script in scripts: if script.text.startswith('eval'): groups = re.search(rgx, script.text).groups() p = groups[0] base = int(groups[1]) c = int(groups[2]) k = groups[3].split('|') for x in reversed(xrange(0, c)): if k[x]: p = re.sub(r'\b%s\b' % h.int2base(x, base), k[x], p) arr = p.split(';') data_str = arr[0][26:-1] data = demjson.decode(data_str.replace("\\", "")) video_url = '' video_type = '' for source in data['sources']: if not video_url: video_url = source['file'] video_type = source['label'] else: if source['label'] == 'HD': video_url = source['file'] video_type = source['label'] print video_type, video_url poster = '' videos.append({'url': video_url, 'thumbnail': poster, 'name': name}) h.add_dir_video(addon_handle, name, video_url, poster, '') except Exception as e: videos = [] print 'LetWatch 720', e print videos if not videos: try: idx = content.index('Vidto Link') if idx != -1: links_content = content[idx + 1].split('\n') print links_content for link_content in links_content: idx1 = link_content.find('[URL=') idx2 = link_content.find(']') url = link_content[idx1 + 5: idx2] idx3 = link_content.find('[', idx2) name = link_content[idx2 + 1: idx3] print name, url soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar)) if len(soup.findChildren()) == 1: meta = soup.find('meta', attrs={'http-equiv': 'refresh'}) if meta: c = dict(meta.attrs)['content'] idx4 = c.find('URL=') if idx4 != -1: url = c[idx4 + 4:] print url soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar)) div = soup.find('div', {'id': 'content'}) src = dict(div.find('table').find('iframe').attrs)['src'] print src soup = BeautifulSoup(h.make_request(src, cookie_file, cookie_jar)) div = soup.find('body').find("div", {"id": "player_code"}) script = None scripts = div.findAll('script') rgx = re.compile(ur'.*}\(\'(.*)\',([0-9]+),([0-9]+),\'(.*)\'\.split.*') for s in scripts: if s.text.startswith('eval'): script = s break groups = re.search(rgx, script.text).groups() p = groups[0] base = int(groups[1]) c = int(groups[2]) k = groups[3].split('|') for x in reversed(xrange(0, c)): if k[x]: p = re.sub(r'\b%s\b' % h.int2base(x, base), k[x], p) idx5 = p.find('hd:[') idx6 = p.find(']') q = p[idx5 + 3:idx6 + 1] j = demjson.decode(q) print j video_url = '' size = 0 for _j in j: s = _j['label'][:-1] if s > size: size = s video_url = _j['file'] print video_url if video_url: poster = '' videos.append({'url': video_url, 'thumbnail': poster, 'name': name}) h.add_dir_video(addon_handle, name, video_url, poster, '') return except Exception as e: videos = [] print 'Vidto', e print videos if not videos: try: idx = content.index('Watchvideo Link') if idx != -1: links_content = content[idx + 1].split('\n') print links_content for link_content in links_content: idx1 = link_content.find('[URL=') idx2 = link_content.find(']') url = link_content[idx1 + 5: idx2] idx3 = link_content.find('[', idx2) name = link_content[idx2 + 1: idx3] print name, url soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar)) if len(soup.findChildren()) == 1: meta = soup.find('meta', attrs={'http-equiv': 'refresh'}) if meta: c = dict(meta.attrs)['content'] idx4 = c.find('URL=') if idx4 != -1: url = c[idx4 + 4:] print url soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar)) div = soup.find('div', {'id': 'content'}) src = dict(div.find('table').find('iframe').attrs)['src'] print src soup = BeautifulSoup(h.make_request(src, cookie_file, cookie_jar)) script = soup.find('body').find('script', recursive=False) idx5 = script.text.find('sources: ') idx6 = script.text.find(']') j = demjson.decode(script.text[idx5 + 9:idx6 + 1]) video_url = '' for _j in j: if 'label' not in _j: video_url = _j['file'] if video_url: poster = '' videos.append({'url': video_url, 'thumbnail': poster, 'name': name}) h.add_dir_video(addon_handle, name, video_url, poster, '') except Exception as e: videos = [] print 'Watchvideo', e print videos if not videos: try: idx = content.index('Letwatch Link') if idx != -1: links_content = content[idx + 1].split('\n') print links_content for link_content in links_content: idx1 = link_content.find('[URL=') idx2 = link_content.find(']') url = link_content[idx1 + 5: idx2] idx3 = link_content.find('[', idx2) name = link_content[idx2 + 1: idx3] print name, url soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar)) if len(soup.findChildren()) == 1: meta = soup.find('meta', attrs={'http-equiv': 'refresh'}) if meta: c = dict(meta.attrs)['content'] idx4 = c.find('URL=') if idx4 != -1: url = c[idx4 + 4:] print url soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar)) div = soup.find('div', {'id': 'content'}) src = dict(div.find('table').find('iframe').attrs)['src'] print src soup = BeautifulSoup(h.make_request(src, cookie_file, cookie_jar)) scripts = soup.findAll('script') rgx = re.compile(ur'.*}\(\'(.*)\',([0-9]+),([0-9]+),\'(.*)\'\.split.*') for script in scripts: if script.text.startswith('eval'): groups = re.search(rgx, script.text).groups() p = groups[0] base = int(groups[1]) c = int(groups[2]) k = groups[3].split('|') for x in reversed(xrange(0, c)): if k[x]: p = re.sub(r'\b%s\b' % h.int2base(x, base), k[x], p) arr = p.split(';') data_str = arr[0][26:-1] data = demjson.decode(data_str.replace("\\", "")) video_url = '' video_type = '' for source in data['sources']: if not video_url: video_url = source['file'] video_type = source['label'] else: if source['label'] == 'HD': video_url = source['file'] video_type = source['label'] print video_type, video_url poster = '' videos.append({'url': video_url, 'thumbnail': poster, 'name': name}) h.add_dir_video(addon_handle, name, video_url, poster, '') except Exception as e: videos = [] print 'LetWatch', e print videos if not videos: try: idx = content.index('Watch Online - Flash') if idx != -1: links_content = content[idx + 1].split('\n') print links_content for link_content in links_content: idx1 = link_content.find('[URL=') idx2 = link_content.find(']') url = link_content[idx1 + 5: idx2] idx3 = link_content.find('[', idx2) name = link_content[idx2 + 1: idx3] print name, url soup = BeautifulSoup(h.make_request(url, cookie_file, cookie_jar)) script = soup.find('script', {'src': '//cdn.playwire.com/bolt/js/zeus/embed.js'}) if script: data_config = dict(script.attrs)['data-config'] json_info = json.loads(h.make_request(data_config, cookie_file, cookie_jar)) poster = json_info['content']['poster'] f4m = json_info['content']['media']['f4m'] soup = BeautifulSoup(h.make_request(f4m, cookie_file, cookie_jar)) base_url = soup.find('baseurl').text media_url = None bitrate = 0 medias = soup.findAll('media') for m in medias: attrs = dict(m.attrs) br = int(attrs['bitrate']) if br > bitrate: media_url = attrs['url'] bitrate = br video_url = '%s/%s' % (base_url, media_url) videos.append({'url': video_url, 'thumbnail': poster, 'name': name}) h.add_dir_video(addon_handle, name, video_url, poster, '') except Exception as e: videos = [] print 'Watch Online - Flash', e print videos if not videos: import pprint pprint.pprint(content)