def list_qualities(): exception = False video_url = _common.args.url bitrates = [] video_data = _connection.getURL(video_url) video_tree = BeautifulSoup(video_data, 'html.parser') video_rtmp = video_tree.meta if video_rtmp is not None: for video_index in video_rtmp: bitrate = int(video_index['system-bitrate']) display = int(bitrate) bitrates.append((display, bitrate)) else: video_data = _connection.getURL(video_url + '&manifest=m3u') video_tree = BeautifulSoup(video_data, 'html.parser') if video_tree.find('param', attrs = {'name' : 'isException', 'value' : 'true'}) is None: video_url2 = video_tree.seq.find_all('video')[0] video_url3 = video_url2['src'] video_data2 = _connection.getURL(video_url3) video_url4 = _m3u8.parse(video_data2) for video_index in video_url4.get('playlists'): bitrate = int(video_index.get('stream_info')['bandwidth']) try: codecs = video_index.get('stream_info')['codecs'] except: codecs = '' display = int(bitrate) / 1024 bitrates.append((display, bitrate)) else: exception = True if not exception: return bitrates else: _common.show_exception(video_tree.ref['title'], video_tree.ref['abstract'])
def play_uri(video_uri=_common.args.url, video_referer='www.vh1.com'): swfUrl = _connection.getRedirect(BASE2 + video_uri, referer=video_referer) configurl = urllib.unquote_plus( swfUrl.split('CONFIG_URL=')[1].split('&')[0]) configxml = _connection.getURL(configurl) video_tree = BeautifulSoup(configxml) feed = video_tree.player.feed try: mrssurl = feed.string.replace('{uri}', video_uri).replace( '{ref}', 'None').replace('&', '&').strip() mrssxml = _connection.getURL(mrssurl) mrsstree = BeautifulSoup(mrssxml) except: mrsstree = feed segmenturls = mrsstree.find_all('media:content') stacked_url = 'stack://' for segment in segmenturls: surl = segment['url'] videos = _connection.getURL(surl) videos = BeautifulSoup(videos).find_all('rendition') hbitrate = -1 sbitrate = int(_addoncompat.get_setting('quality')) for video in videos: bitrate = int(video['bitrate']) if bitrate > hbitrate and bitrate <= sbitrate: hbitrate = bitrate rtmpdata = video.src.string rtmpurl = MP4URL + rtmpdata.split('viacomvh1strm')[2] stacked_url += rtmpurl.replace(',', ',,') + ' , ' finalurl = stacked_url[:-3] xbmcplugin.setResolvedUrl(pluginHandle, True, xbmcgui.ListItem(path=finalurl))
def seasons(season_url = _common.args.url): season_data = _connection.getURL(season_url) season_tree = BeautifulSoup(season_data, 'html.parser', parse_only = SoupStrainer('div')) season_menu = season_tree.find('a', text = re.compile('full episodes', re.IGNORECASE)) season_menu2 = season_tree.find('a', href = re.compile('(?<!stand-up)/(video|clips)')) if season_menu is not None: season_url2 = season_menu['href'] if 'http' not in season_url2: season_url2 = season_url + season_url2 if 'South Park' in _common.args.name: season_data2 = _connection.getURL(season_url2) seasons = BeautifulSoup(season_data2, 'html5lib').find_all('a',class_='seasonbtn') if seasons: for season in seasons: try: display = 'Season %s' %str(int(season.string)) except: display = 'Special %s' %season.string _common.add_directory(display, SITE, 'episodes', season['href'] ) else: _common.add_directory('Full Episodes', SITE, 'episodes', season_url2) elif 'episode' in season_url: _common.add_directory('Full Episodes', SITE, 'episodes', season_url) print season_url if season_menu2 is not None: season_url3 = season_menu2['href'] if 'http' not in season_url3: season_url3 = season_url + season_url3 _common.add_directory('Clips', SITE, 'episodes', season_url3) _common.set_view('seasons')
def play_video(video_url = _common.args.url): stack_url = 'stack://' hbitrate = -1 sbitrate = int(_addoncompat.get_setting('quality')) * 1024 closedcaption = None video_data = _connection.getURL(video_url) video_tree = BeautifulSoup(video_data, 'html.parser') video_segments = video_tree.find_all('segment') for video_segment in video_segments: seg_url = VIDEOINFO % video_segment['id'] seg_data = _connection.getURL(seg_url) seg_menu = BeautifulSoup(seg_data).find_all('file') hbitrate = -1 file_url = None for video_index in seg_menu: try: bitrate = int(video_index['bitrate']) type = video_index['type'] if bitrate > hbitrate and bitrate <= sbitrate: hbitrate = bitrate file_url = video_index.string elif bitrate == hbitrate and bitrate <= sbitrate and type == 'hd' : file_url = video_index.string except: pass if file_url is None: file_url = BeautifulSoup(seg_data).find_all('file',type = 'hd')[0].string stack_url += file_url.replace(',', ',,') + ' , ' finalurl = stack_url[:-3] xbmcplugin.setResolvedUrl(pluginHandle, True, xbmcgui.ListItem(path = finalurl))
def seasons(collection_ids=_common.args.url): for collection_id in collection_ids.split(","): if "," not in collection_ids: season_url = SEASONSEPISODES else: season_url = SEASONSEPISODESEXTRA season_data = _connection.getURL(season_url % collection_id) print BeautifulSoup(season_data, "html.parser").episodes season_tree = BeautifulSoup(season_data, "html.parser") episode_count = int(season_tree.episodes["totalitems"]) if episode_count > 0: if "," not in collection_ids: display = "Episodes" else: display = "Episodes - %s" % season_tree.episode["collectiontitle"] _common.add_directory(display, SITE, "episodes", FULLEPISODES % collection_id) for collection_id in collection_ids.split(","): if "," not in collection_ids: seasonclips_url = SEASONSCLIPS else: seasonclips_url = SEASONSCLIPSEXTRA season_data2 = _connection.getURL(seasonclips_url % collection_id) season_tree2 = BeautifulSoup(season_data2, "html.parser") print BeautifulSoup(season_data2, "html.parser").episodes episode_count = int(season_tree2.episodes["totalitems"]) if episode_count > 0: if "," not in collection_ids: display = "Clips" else: display = "Clips - %s" % season_tree2.episode["collectiontitle"] _common.add_directory(display, SITE, "episodes", CLIPS % collection_id) _common.set_view("seasons")
def list_qualities(BASE, video_url = _common.args.url, media_base = VIDEOURL): if media_base not in video_url: video_url = media_base + video_url bitrates = [] if 'feed' not in video_url: swf_url = _connection.getRedirect(video_url, header = {'Referer' : BASE}) params = dict(item.split("=") for item in swf_url.split('?')[1].split("&")) uri = urllib.unquote_plus(params['uri']) config_url = urllib.unquote_plus(params['CONFIG_URL']) config_data = _connection.getURL(config_url, header = {'Referer' : video_url, 'X-Forwarded-For' : '12.13.14.15'}) feed_url = BeautifulSoup(config_data, 'html.parser', parse_only = SoupStrainer('feed')).feed.string feed_url = feed_url.replace('{uri}', uri).replace('&', '&').replace('{device}', DEVICE).replace('{ref}', 'None').strip() else: feed_url = video_url feed_data = _connection.getURL(feed_url) video_tree = BeautifulSoup(feed_data, 'html.parser', parse_only = SoupStrainer('media:group')) video_segments = video_tree.find_all('media:content') srates = [] for video_segment in video_segments: video_url3 = video_segment['url'].replace('{device}', DEVICE) video_data3 = _connection.getURL(video_url3, header = {'X-Forwarded-For' : '12.13.14.15'}) video_menu = BeautifulSoup(video_data3).findAll('rendition') orates = srates srates = [] for video_index in video_menu: bitrate = int(video_index['bitrate']) srates.append((bitrate, bitrate)) if orates != []: srates = list(set(srates).intersection(orates)) bitrates =srates return bitrates
def seasons(season_urls=_common.args.url): for season_url in season_urls.split(','): season_data = _connection.getURL(FULLEPISODES % urllib.quote_plus(season_url) + '&range=0-1') try: season_menu = int(simplejson.loads(season_data)['totalResults']) except: season_menu = 0 if season_menu > 0: season_url2 = FULLEPISODES % urllib.quote_plus( season_url) + '&range=0-' + str(season_menu) _common.add_directory('Full Episodes', SITE, 'episodes', season_url2) season_data2 = _connection.getURL(CLIPS % urllib.quote_plus(season_url) + '&range=0-1') try: season_menu2 = int(simplejson.loads(season_data2)['totalResults']) except: season_menu2 = 0 if season_menu2 > 0: season_url3 = CLIPS % urllib.quote_plus( season_url) + '&range=0-' + str(season_menu2) if ',' in season_urls: _common.add_directory('Clips %s' % season_url, SITE, 'episodes', season_url3) else: _common.add_directory('Clips', SITE, 'episodes', season_url3) _common.set_view('seasons')
def seasons(collection_ids = _common.args.url): for collection_id in collection_ids.split(','): if ',' not in collection_ids: season_url = SEASONSEPISODES else: season_url = SEASONSEPISODESEXTRA season_data = _connection.getURL(season_url % collection_id) season_tree = BeautifulSoup(season_data, 'html.parser') episode_count = int(season_tree.episodes['totalitems']) if episode_count > 0: if ',' not in collection_ids: display = 'Episodes' else: display = 'Episodes - %s' % season_tree.episode['collectiontitle'] _common.add_directory(display, SITE, 'episodes', FULLEPISODES % collection_id) for collection_id in collection_ids.split(','): if ',' not in collection_ids: seasonclips_url = SEASONSCLIPS else: seasonclips_url = SEASONSCLIPSEXTRA season_data2 = _connection.getURL(seasonclips_url % collection_id) season_tree2 = BeautifulSoup(season_data2, 'html.parser') episode_count = int(season_tree2.episodes['totalitems']) if episode_count > 0: if ',' not in collection_ids: display = 'Clips' else: display = 'Clips - %s' % season_tree2.episode['collectiontitle'] _common.add_directory(display, SITE, 'episodes', CLIPS % collection_id) _common.set_view('seasons')
def seasons(season_urls = _common.args.url): root_url = season_urls if season_urls[-1:] == '/': season_urls = season_urls + 'video' else: season_urls = season_urls + '/video' season_data = _connection.getURL(season_urls) show_id = re.compile('video.settings.show_id = (.*);').findall(season_data)[0] section_ids = re.compile('video.section_ids = \[(.*)\];').findall(season_data)[0] if section_ids: for section in section_ids.split(','): season_url = SEASONS % section season_data2 = _connection.getURL(season_url) try: season_title = simplejson.loads(season_data2)['result']['title'] _common.add_directory(season_title, SITE, 'episodes', FULLEPISODES % (section, show_id)) except: pass else: show_tree = BeautifulSoup(season_data, 'html5lib') season_menu = show_tree.find_all(attrs = {'name' : 'season'}) for season_item in season_menu: season_url = root_url + 'season/%s/videos/episodes' % season_item['value'] _common.add_directory('Season ' + season_item['value'], SITE, 'episodesClassic', season_url) for season_item in season_menu: season_url = root_url + 'season/%s/videos/clips' % season_item['value'] _common.add_directory('Clips Season ' + season_item['value'], SITE, 'episodesClassic', season_url) _common.set_view('seasons')
def seasons(collection_ids = _common.args.url): for collection_id in collection_ids.split(','): if ',' not in collection_ids: season_url = SEASONSEPISODES else: season_url = SEASONSEPISODESEXTRA season_data = _connection.getURL(season_url % collection_id) print BeautifulSoup(season_data, 'html.parser').episodes season_tree = BeautifulSoup(season_data, 'html.parser') episode_count = int(season_tree.episodes['totalitems']) if episode_count > 0: if ',' not in collection_ids: display = 'Episodes' else: display = 'Episodes - %s' % season_tree.episode['collectiontitle'] _common.add_directory(display, SITE, 'episodes', FULLEPISODES % collection_id) for collection_id in collection_ids.split(','): if ',' not in collection_ids: seasonclips_url = SEASONSCLIPS else: seasonclips_url = SEASONSCLIPSEXTRA season_data2 = _connection.getURL(seasonclips_url % collection_id) season_tree2 = BeautifulSoup(season_data2, 'html.parser') print BeautifulSoup(season_data2, 'html.parser').episodes episode_count = int(season_tree2.episodes['totalitems']) if episode_count > 0: if ',' not in collection_ids: display = 'Clips' else: display = 'Clips - %s' % season_tree2.episode['collectiontitle'] _common.add_directory(display, SITE, 'episodes', CLIPS % collection_id) _common.set_view('seasons')
def list_qualities(video_url=_common.args.url): bitrates = [] video_data = _connection.getURL(video_url) smil_tree = BeautifulSoup(video_data, 'html.parser') video_url2 = smil_tree.video['src'] clip_id = smil_tree.video.find('param', attrs={'name': 'clipId'}) if clip_id is not None: clip_id = clip_id['value'] video_url = VIDEOPAGE % clip_id video_data = _connection.getURL(video_url) video_tree = BeautifulSoup(video_data, 'html.parser') clip_url = SMIL_BASE + video_tree.clipurl.string smil_data = _connection.getURL(clip_url) smil_tree = BeautifulSoup(smil_data, 'html.parser') video_url2 = smil_tree.find_all('video') for video_index in video_url2: bitrate = int(video_index['system-bitrate']) display = int(bitrate) / 1024 bitrates.append((display, bitrate)) else: m3u_master_data = _connection.getURL(video_url2) m3u_master = _m3u8.parse(m3u_master_data) for video_index in m3u_master.get('playlists'): try: codecs = video_index.get('stream_info')['codecs'] except: codecs = '' if codecs != 'mp4a.40.2': bitrate = int(video_index.get('stream_info')['bandwidth']) display = int(bitrate) / 1024 bitrates.append((display, bitrate)) return bitrates
def list_qualities(video_url = _common.args.url): bitrates = [] video_data = _connection.getURL(video_url) smil_tree = BeautifulSoup(video_data, 'html.parser') video_url2 = smil_tree.video['src'] clip_id = smil_tree.video.find('param', attrs = {'name' : 'clipId'}) if clip_id is not None: clip_id = clip_id['value'] video_url = VIDEOPAGE % clip_id video_data = _connection.getURL(video_url) video_tree = BeautifulSoup(video_data, 'html.parser') clip_url = SMIL_BASE + video_tree.clipurl.string smil_data = _connection.getURL(clip_url) smil_tree = BeautifulSoup(smil_data, 'html.parser') video_url2 = smil_tree.find_all('video') for video_index in video_url2: bitrate = int(video_index['system-bitrate']) display = int(bitrate)/1024 bitrates.append((display, bitrate)) else: m3u_master_data = _connection.getURL(video_url2) m3u_master = _m3u8.parse(m3u_master_data) for video_index in m3u_master.get('playlists'): try: codecs = video_index.get('stream_info')['codecs'] except: codecs = '' if codecs != 'mp4a.40.2': bitrate = int(video_index.get('stream_info')['bandwidth']) display = int(bitrate)/1024 bitrates.append((display, bitrate)) return bitrates
def play_uri(video_uri = _common.args.url, video_referer = 'www.vh1.com'): swfUrl = _connection.getRedirect(BASE2 + video_uri, referer = video_referer) configurl = urllib.unquote_plus(swfUrl.split('CONFIG_URL=')[1].split('&')[0]) configxml = _connection.getURL(configurl) video_tree = BeautifulSoup(configxml) feed = video_tree.player.feed try: mrssurl = feed.string.replace('{uri}', video_uri).replace('{ref}', 'None').replace('&', '&').strip() mrssxml = _connection.getURL(mrssurl) mrsstree = BeautifulSoup(mrssxml) except: mrsstree = feed segmenturls = mrsstree.find_all('media:content') stacked_url = 'stack://' for segment in segmenturls: surl = segment['url'] videos = _connection.getURL(surl) videos = BeautifulSoup(videos).find_all('rendition') hbitrate = -1 sbitrate = int(_addoncompat.get_setting('quality')) for video in videos: bitrate = int(video['bitrate']) if bitrate > hbitrate and bitrate <= sbitrate: hbitrate = bitrate rtmpdata = video.src.string rtmpurl = MP4URL + rtmpdata.split('viacomvh1strm')[2] stacked_url += rtmpurl.replace(',',',,') + ' , ' finalurl = stacked_url[:-3] xbmcplugin.setResolvedUrl(pluginHandle, True, xbmcgui.ListItem(path = finalurl))
def seasons(season_url=_common.args.url): season_data = _connection.getURL(FULLEPISODES % season_url + '&start=0&rows=1', forwardheader='12.13.14.15') try: season_menu = int( BeautifulSoup(season_data).find( 'section', class_='video-content-list')['data-numfound']) except: season_menu = 0 if season_menu > 0: season_url2 = FULLEPISODES % season_url + '&start=0&rows=' + str( season_menu) _common.add_directory('Full Episodes', SITE, 'episodes', season_url2) season_data2 = _connection.getURL(CLIPS % season_url + '&start=0&rows=1', forwardheader='12.13.14.15') try: season_menu2 = int( BeautifulSoup(season_data2).find( 'section', class_='video-content-list')['data-numfound']) except: season_menu2 = 0 if season_menu2 > 0: season_url3 = CLIPS % season_url + '&start=0&rows=' + str(season_menu2) _common.add_directory('Clips', SITE, 'episodes', season_url3) _common.set_view('seasons')
def episodes(episode_url=_common.args.url): episode_data = _connection.getURL(episode_url) episode_tree = BeautifulSoup(episode_data.replace('\'+\'', ''), 'html.parser') if _common.args.name == 'Clips': if episode_tree.find('a', class_='next') is not None: add_clips(episode_tree) try: episodes( episode_url.split('?')[0] + episode_tree.find('a', class_='next')['href']) except: pass else: add_clips(episode_tree) else: if episode_tree.find('a', class_='season_menu') is not None: show_id = re.compile('var showId = "(.+?)";').findall( episode_data)[0] episode_id = re.compile('var episodeId = "(.+?)";').findall( episode_data)[0] episode_menu = episode_tree.find_all('a', class_='season') for episode_item in episode_menu: episode_data2 = _connection.getURL( SEASONURL % (show_id, episode_item['id'], episode_id)) episode_tree2 = BeautifulSoup(episode_data2) add_fullepisodes(episode_tree2, episode_item.text.split(' ')[1]) else: add_fullepisodes(episode_tree) _common.set_view('episodes')
def play_video(video_url=_common.args.url): video_url9 = 'stack://' sbitrate = int(_addoncompat.get_setting('quality')) closedcaption = None video_data = _connection.getURL(video_url, forwardheader='12.13.14.15') try: video_url2 = re.compile( '<meta content="http://media.mtvnservices.com/fb/(.+?).swf" property="og:video"/>' ).findall(video_data)[0] except: video_url2 = re.compile("NICK.unlock.uri = '(.+?)';").findall( video_data)[0] video_url3 = _connection.getRedirect('http://media.mtvnservices.com/fb/' + video_url2 + '.swf', referer=BASE) video_url4 = urllib.unquote_plus( video_url3.split('CONFIG_URL=')[1].split('&')[0]).strip() video_data2 = _connection.getURL( video_url4, referer='http://media.mtvnservices.com/fb/' + video_url2 + '.swf') video_tree = BeautifulSoup(video_data2) video_url5 = video_tree.feed.string.replace('{uri}', video_url2).replace( '&', '&').replace('{type}', 'network') video_data3 = _connection.getURL(video_url5) video_tree2 = BeautifulSoup(video_data3) video_segments = video_tree2.find_all('media:content') for video_segment in video_segments: hbitrate = -1 video_url6 = video_segment['url'] video_data4 = _connection.getURL(video_url6) video_menu = BeautifulSoup(video_data4).find_all('rendition') for video_index in video_menu: bitrate = int(video_index['bitrate']) if bitrate > hbitrate and bitrate <= sbitrate: hbitrate = bitrate video_url7 = video_index.find('src').string video_url8 = video_url7 + ' swfurl=' + video_url3.split( '?')[0] + ' pageUrl=' + BASE + ' swfvfy=true' video_url9 += video_url8.replace(',', ',,') + ' , ' finalurl = video_url9[:-3] try: closedcaption = video_tree2.find_all('media:text') except: pass if (_addoncompat.get_setting('enablesubtitles') == 'true') and (closedcaption is not None): convert_subtitles(closedcaption) xbmcplugin.setResolvedUrl(pluginHandle, True, xbmcgui.ListItem(path=finalurl)) if (_addoncompat.get_setting('enablesubtitles') == 'true') and (closedcaption is not None): while not xbmc.Player().isPlaying(): xbmc.sleep(100) for count in range(1, len(closedcaption)): xbmc.Player().setSubtitles( os.path.join(_common.CACHEPATH, 'subtitle-%s.srt' % str(count))) while xbmc.Player().isPlaying(): xbmc.sleep(10)
def onPlayBackEnded( self ): # Will be called when xbmc stops playing a segment print "**************************** End Event *****************************" if self._counter == self._segments: print "**************************** End Event -- Stopping Server *****************************" self.is_active = False if self._localHTTPServer: _connection.getURL('http://localhost:12345/stop', connectiontype = 0)
def episodes_from_html(episode_url=_common.args.url, page=1): """ Add episodes by analysing the HTML of the page """ if page == 1: episode_data = _connection.getURL(episode_url) episode_tree = None try: episode_url = re.compile("var .*Showcase.* = '(.*)'").findall( episode_data)[0] if 'http' not in episode_url: episode_url = BASE + episode_url episode_data = _connection.getURL(episode_url) except: try: episode_tree = BeautifulSoup(episode_data, 'html5lib') episode_url = episode_tree.find('div', class_='content')['data-feed'] episode_data = _connection.getURL(episode_url) episode_tree = BeautifulSoup(episode_data, 'html5lib') except: pass if episode_tree is None: episode_tree = BeautifulSoup(episode_data, 'html5lib') if 'Clips' in _common.args.name: if 'southpark' in episode_url: add_clips_southpark(episode_tree) else: next = episode_tree.find('a', class_=re.compile('next')) add_video(episode_tree) if next is not None: try: if 'href' in next.attrs: nexturl = next['href'].replace(' ', '+') else: nexturl = next['onclick'].split(';')[0].replace( "loadContent('", "").replace("')", "") if 'http' not in nexturl: nexturl = BASE + nexturl if page < int(_addoncompat.get_setting('maxpages')): episodes_from_html(nexturl, page + 1) except: pass else: if 'southpark' in episode_url: add_fullepisodes_southpark(episode_tree) else: next = episode_tree.find('a', class_=re.compile('next')) add_video(episode_tree, False) if next is not None: try: nexturl = next['href'] if nexturl[0] == '?': nexturl = episode_url.split('?')[0] + nexturl elif 'http' not in nexturl: nexturl = BASE + nexturl if page < int(_addoncompat.get_setting('maxpages')): episodes_from_html(nexturl, page + 1) except: pass
def episodes(episode_url = _common.args.url, page = 1): episode_data = _connection.getURL(episode_url) episode_tree = None if page == 1: try: episode_url = re.compile("var .*Showcase.* = '(.*)'").findall(episode_data)[0] if 'http' not in episode_url: episode_url = BASE + episode_url episode_data = _connection.getURL(episode_url) except: try: episode_tree = BeautifulSoup(episode_data, 'html5lib') episode_url = episode_tree.find('div', class_ = 'content')['data-feed'] episode_data = _connection.getURL(episode_url) episode_tree = BeautifulSoup(episode_data, 'html5lib') except: pass if episode_tree is None: episode_tree = BeautifulSoup(episode_data, 'html5lib') if 'Clips' in _common.args.name : if 'colbertnation' in episode_url: add_clips_colbertnation(episode_tree) elif 'southpark' in episode_url: add_clips_southpark(episode_tree) else: next = episode_tree.find('a', class_ = re.compile('next')) add_video(episode_tree) if next is not None: try: if 'href' in next.attrs: nexturl = next['href'].replace(' ', '+') else: nexturl = next['onclick'].split(';')[0].replace("loadContent('", "").replace("')", "") if 'http' not in nexturl: nexturl = BASE + nexturl if page < int(_addoncompat.get_setting('maxpages')): episodes(nexturl, page + 1) except: pass else: if ('colbertnation' in episode_url) or ('dailyshow' in episode_url): add_fullepisodes_colbertnation(episode_tree) elif 'southpark' in episode_url: add_fullepisodes_southpark(episode_tree) else: next = episode_tree.find('a', class_ = re.compile('next')) add_video(episode_tree, True) if next is not None: try: nexturl = next['href'] if nexturl[0] == '?': nexturl = episode_url.split('?')[0] + nexturl elif 'http' not in nexturl: nexturl = BASE + nexturl episodes(nexturl, page + 1) except: pass _common.set_view('episodes')
def play_video(guid = _common.args.url): video_url = VIDEO % guid hbitrate = -1 lbitrate = -1 sbitrate = int(_addoncompat.get_setting('quality')) * 1024 closedcaption = None video_url2 = None finalurl = '' video_data = _connection.getURL(video_url) video_menu = simplejson.loads(video_data)['items'] video_item = video_menu[0] try: closedcaption = video_item['captions']['sami']['url'] except: pass if (_addoncompat.get_setting('enablesubtitles') == 'true') and (closedcaption is not None) and (closedcaption != ''): convert_subtitles(closedcaption.replace(' ', '+')) try: ipad_url = video_item['videos']['ipad']['url'] video_data2 = _connection.getURL(ipad_url + '?format=json') video_url3 = simplejson.loads(video_data2)['url'] video_data3 = _connection.getURL(video_url3) video_url4 = _m3u8.parse(video_data3) uri = None for video_index in video_url4.get('playlists'): try: codecs = video_index.get('stream_info')['codecs'] except: codecs = '' if codecs != 'mp4a.40.5': bitrate = int(video_index.get('stream_info')['bandwidth']) if bitrate < lbitrate or lbitrate == -1: lbitrate = bitrate luri = video_index.get('uri') if bitrate > hbitrate and bitrate <= sbitrate: hbitrate = bitrate uri = video_index.get('uri') if uri is None: uri = luri finalurl = video_url3.rsplit('/', 1)[0] + '/' + uri except: flash_url = video_item['videos']['flash']['url'] video_data2 = _connection.getURL(flash_url + '?format=json') video_url3 = simplejson.loads(video_data2)['url'] if '.mp4' in video_url3: base_url, playpath_url = video_url3.split('mp4:') playpath_url = ' playpath=mp4:' + playpath_url elif 'flv' in video_url3: base_url, playpath_url = video_url3.split('flv:') playpath_url = ' playpath=' + playpath_url.replace('.flv','') finalurl = base_url + playpath_url + '?player= swfurl=' + SWFURL % guid + ' swfvfy=true' xbmcplugin.setResolvedUrl(pluginHandle, True, xbmcgui.ListItem(path = finalurl)) if (_addoncompat.get_setting('enablesubtitles') == 'true') and (closedcaption is not None) and (closedcaption != ''): while not xbmc.Player().isPlaying(): xbmc.sleep(100) xbmc.Player().setSubtitles(_common.SUBTITLESMI)
def episodes_json(SITE): episode_url = _common.args.url master_name = episode_url.split('#')[0] episode_url = episode_url.split('#')[1] episode_data = _connection.getURL(episode_url) episode_menu = simplejson.loads(episode_data) for episode_item in episode_menu: url = episode_item['episodeID'] try: episode_duration = episode_item['length'] except: episode_duration = -1 try: episode_airdate = _common.format_date(episode_item['airDate'].split('on ')[1],'%B %d, %Y') except: episode_airdate = -1 try: episode_plot = episode_item['summary'] except: episode_plot = episode_item['shortdescription'] episode_name = episode_item['title'] if episode_name == master_name: video_url = EPISODE % url video_data = _connection.getURL(video_url) video_tree = BeautifulSoup(video_data, 'html.parser') episode_name = video_tree.headline.string elif episode_name == "": episode_name = episode_plot try: season_number = int(episode_item['identifier'].split(',')[0].split(' ')[1]) except: season_number = -1 try: episode_number = int(episode_item['identifier'].split(', ')[1].split(' ')[1].replace(' Episode ', '')) except: try: episode_number = int(episode_item['identifier'].split(', ')[1].split(' ')[1]) except: episode_number = -1 try: episode_thumb = episode_item['640x360_jpg'] except: episode_thumb = None u = sys.argv[0] u += '?url="' + urllib.quote_plus(url) + '"' u += '&mode="' + SITE + '"' u += '&sitemode="play_video"' infoLabels={ 'title' : episode_name, 'durationinseconds' : episode_duration, 'season' : season_number, 'episode' : episode_number, 'plot' : episode_plot, 'premiered' : episode_airdate } _common.add_video(u, episode_name, episode_thumb, infoLabels = infoLabels, quality_mode = 'list_qualities') _common.set_view('episodes')
def seasons(season_url = _common.args.url): season_data = _connection.getURL(season_url) season_tree = BeautifulSoup(season_data, 'html.parser', parse_only = SoupStrainer('div')) season_source = season_tree.find('div', id = 'TPVideoPlaylistTaxonomyContainer')['source'] playlist_url = PLAYLIST % season_source playlist_data = _connection.getURL(playlist_url) playlist_data = playlist_data.replace('$pdk.NBCplayer.ShowPlayerTaxonomy.GetList(', '').replace(');', '') season_menu = simplejson.loads(playlist_data) for season_item in season_menu['playlistTaxonomy']: _common.add_directory(season_item['reference']['name'], SITE, 'episodes', FEED % season_item['reference']['feed']) _common.set_view('seasons')
def episodes(episode_url = _common.args.url): episode_data = _connection.getURL(episode_url) episode_tree = BeautifulSoup(episode_data) add_videos(episode_tree) pagedata = re.compile('new Paginator\((.+?),(.+?)\)').findall(episode_data) if pagedata: total = int(pagedata[0][0]) current = int(pagedata[0][1]) if total > 1: for page in range(1,total): episode_data = _connection.getURL(episode_url + '/' + str(page) + '/') episode_tree = BeautifulSoup(episode_data) add_videos(episode_tree) _common.set_view('episodes')
def seasons(season_url = _common.args.url): season_data = _connection.getURL(season_url) season_tree = BeautifulSoup(season_data, 'html.parser', parse_only = SoupStrainer('div')) season_source = season_tree.find('div', id = 'TPVideoPlaylistTaxonomyContainer')['source'] playlist_url = PLAYLIST % season_source playlist_data = _connection.getURL(playlist_url) playlist_data = playlist_data.replace('$pdk.NBCplayer.ShowPlayerTaxonomy.GetList(', '').replace(');', '') season_menu = simplejson.loads(playlist_data) try: for season_item in season_menu['playlistTaxonomy']: _common.add_directory(season_item['reference']['name'], SITE, 'episodes', FEED % season_item['reference']['feed']) except: pass _common.set_view('seasons')
def episodes(episode_url=_common.args.url): episode_data = _connection.getURL(episode_url) episode_tree = BeautifulSoup(episode_data) if 'Video Clips' in _common.args.name: episode_url2 = episode_tree.find('div', class_='v_content')['data-url'] if episode_tree.find('div', class_='pagination') is not None: episode_count = int( episode_tree.find('div', class_='result').text.rsplit(' ', 1)[1].strip()) episode_items, episode_rest = divmod(episode_count, 10) if episode_rest > 0: episode_items = episode_items + 1 if episode_items > int(_addoncompat.get_setting('maxpages')): episode_items = int(_addoncompat.get_setting('maxpages')) for episode_item in range(episode_items): episode_data2 = _connection.getURL(episode_url2 + '?page=' + str(episode_item + 1)) episode_tree2 = BeautifulSoup(episode_data2) add_clips(episode_tree2) else: episode_data2 = _connection.getURL(episode_url2 + '?page=1') episode_tree2 = BeautifulSoup(episode_data2) add_clips(episode_tree2) else: try: add_fullepisodes(episode_tree, int(_common.args.name.split(' ')[1])) except: try: add_fullepisodes(episode_tree, int(_common.args.name)) except: add_fullepisodes(episode_tree) if episode_tree.find('div', class_='pagination') is not None: episode_items2 = episode_tree.find( 'div', class_='pagination').find_all('a') for episode_item2 in episode_items2: if (episode_item2.text != 'Next'): episode_data3 = _connection.getURL(episode_item2['href']) episode_tree3 = BeautifulSoup(episode_data3) try: add_fullepisodes(episode_tree3, int(_common.args.name.split(' ')[1])) except: try: add_fullepisodes(episode_tree3, int(_common.args.name)) except: add_fullepisodes(episode_tree3) _common.set_view('episodes')
def seasons(season_url = _common.args.url): season_data = _connection.getURL(FULLEPISODES % season_url, header = {'X-Forwarded-For' : '12.13.14.15'}) try: count = int(simplejson.loads(season_data)['meta']['count']) except: count = 0 if count > 0: season_url2 = FULLEPISODES % season_url + '&start=0&rows=' + str(count) _common.add_directory('Full Episodes', SITE, 'episodes', season_url2) season_data2 = _connection.getURL(CLIPS % season_url, header = {'X-Forwarded-For' : '12.13.14.15'}) try: count = int(simplejson.loads(season_data2)['meta']['count']) except: count = 0 if count > 0: season_url3 = CLIPS % season_url + '&start=0&rows=' + str(count) _common.add_directory('Clips', SITE, 'episodes', season_url3) _common.set_view('seasons')
def episodes(episode_url = _common.args.url): try: shutil.rmtree(os.path.join(_common.CACHEPATH,'thumbs')) except: pass episode_data = _connection.getURL(VIDEOLIST % episode_url.split('#')[0]) episode_menu = simplejson.loads(episode_data)['videos'] os.mkdir(os.path.join(_common.CACHEPATH,'thumbs')) for episode_item in episode_menu: if int(episode_item['fullep']) == int(episode_url.split('#')[1]): show_name = episode_item['series_name'] url = episode_item['guid'] episode_duration = int(episode_item['duration_secs']) episode_plot = episode_item['description_long'] episode_name = episode_item['title'] season_number = int(episode_item['season']) episode_thumb = episode_item['large_thumbnail'] thumb_file = episode_thumb.split('/')[-1] thumb_path = os.path.join(_common.CACHEPATH, 'thumbs', thumb_file) dbpath = xbmc.translatePath(_database.DBPATH) thumbcount = 0 for name in glob.glob(os.path.join(dbpath, 'textures[0-9]*.db')): thumbcount = thumbcount+ _database.execute_command('select count(1) from texture where url = ?', [thumb_path,], fetchone = True, dbfile = name)[0] if thumbcount == 0: thumb_data = _connection.getURL(episode_thumb) file = open(thumb_path, 'wb') file.write(thumb_data) file.close() try: episode_number = int(episode_item['episode'][len(str(season_number)):]) except: episode_number = -1 try: episode_airdate = _common.format_date(episode_item['airdate'],'%Y-%b-%d', '%d.%m.%Y') except: episode_airdate = -1 u = sys.argv[0] u += '?url="' + urllib.quote_plus(url) + '"' u += '&mode="' + SITE + '"' u += '&sitemode="play_video"' infoLabels={ 'title' : episode_name, 'durationinseconds' : episode_duration, 'season' : season_number, 'episode' : episode_number, 'plot' : episode_plot, 'premiered' : episode_airdate, 'tvshowtitle': show_name } _common.add_video(u, episode_name, thumb_path, infoLabels = infoLabels) _common.set_view('episodes')
def play_video(SITE): video_url = _common.args.url try: qbitrate = _common.args.quality except: qbitrate = None hbitrate = -1 lbitrate = -1 sbitrate = int(_addoncompat.get_setting('quality')) video_data = _connection.getURL(video_url) smil_url = re.compile("window.video_auth_playlist_url = '(.*)'").findall( video_data)[0] smil_data = _connection.getURL(smil_url + '&manifest=m3u') video_tree2 = BeautifulSoup(smil_data) video_url3 = video_tree2.video['src'] video_data3 = _connection.getURL(video_url3) video_url4 = _m3u8.parse(video_data3) video_url5 = None for video_index in video_url4.get('playlists'): bitrate = int(video_index.get('stream_info')['bandwidth']) if qbitrate is None: try: codecs = video_index.get('stream_info')['codecs'] except: codecs = '' if (bitrate < lbitrate or lbitrate == -1) and 'mp4a.40.2' != codecs: lbitrate = bitrate lvideo_url5 = video_index.get('uri') if bitrate > hbitrate and bitrate <= ( sbitrate * 1000) and codecs != 'mp4a.40.2': hbitrate = bitrate video_url5 = video_index.get('uri') elif bitrate == qbitrate: video_url5 = video_index.get('uri') if video_url5 is None: video_url5 = lvideo_url5 finalurl = video_url3.rsplit('/', 1)[0] + '/' + video_url5 item = xbmcgui.ListItem(path=finalurl) if qbitrate is not None: item.setThumbnailImage(_common.args.thumb) item.setInfo( 'Video', { 'title': _common.args.name, 'season': _common.args.season_number, 'episode': _common.args.episode_number, 'TVShowTitle': _common.args.show_title }) xbmcplugin.setResolvedUrl(pluginHandle, True, item)
def episodes(SITE): episode_url = _common.args.url episode_data = _connection.getURL(episode_url) episode_tree = BeautifulSoup(episode_data) add_videos(episode_tree, SITE) pagedata = re.compile("new Paginator\((.+?),(.+?)\)").findall(episode_data) if pagedata: total = int(pagedata[0][0]) current = int(pagedata[0][1]) if total > 1: for page in range(1, total): episode_data = _connection.getURL(episode_url + "/" + str(page) + "/") episode_tree = BeautifulSoup(episode_data) add_videos(episode_tree, SITE) _common.set_view("episodes")
def play_video(video_url=_common.args.url): hbitrate = -1 sbitrate = int(_addoncompat.get_setting('quality')) * 1024 closedcaption = None video_data = _connection.getURL(video_url) video_tree = BeautifulSoup(video_data, 'html.parser') video_rtmp = video_tree.meta if video_rtmp is not None: base_url = video_rtmp['base'] video_url2 = video_tree.switch.find_all('video') for video_index in video_url2: bitrate = int(video_index['system-bitrate']) if bitrate > hbitrate and bitrate <= sbitrate: hbitrate = bitrate playpath_url = video_index['src'] if '.mp4' in playpath_url: playpath_url = 'mp4:' + playpath_url else: playpath_url = playpath_url.replace('.flv', '') finalurl = base_url + ' playpath=' + playpath_url + ' swfurl=' + SWFURL + ' swfvfy=true' else: video_data = _connection.getURL(video_url + '&manifest=m3u') video_tree = BeautifulSoup(video_data) try: closedcaption = video_tree.textstream['src'] except: pass if (_addoncompat.get_setting('enablesubtitles') == 'true') and (closedcaption is not None): convert_subtitles(closedcaption) # video_url2 = video_tree.seq.find_all('video')[0] video_url3 = video_url2['src'] video_url4 = video_url3.split('/')[-1] video_data2 = _connection.getURL(video_url3) video_url5 = _m3u8.parse(video_data2) for video_index in video_url5.get('playlists'): bitrate = int(video_index.get('stream_info')['bandwidth']) if bitrate > hbitrate and bitrate <= sbitrate: hbitrate = bitrate finalurl = video_url3.replace(video_url4, video_index.get('uri')) xbmcplugin.setResolvedUrl(pluginHandle, True, xbmcgui.ListItem(path=finalurl)) if (_addoncompat.get_setting('enablesubtitles') == 'true') and (closedcaption is not None): while not xbmc.Player().isPlaying(): xbmc.sleep(100) xbmc.Player().setSubtitles(_common.SUBTITLE)
def play_video(video_url=_common.args.url): hbitrate = -1 sbitrate = int(_addoncompat.get_setting('quality')) * 1024 closedcaption = None video_data = _connection.getURL(video_url) video_tree = BeautifulSoup(video_data, 'html.parser') video_rtmp = video_tree.meta if video_rtmp is not None: base_url = video_rtmp['base'] video_url2 = video_tree.switch.find_all('video') for video_index in video_url2: bitrate = int(video_index['system-bitrate']) if bitrate > hbitrate and bitrate <= sbitrate: hbitrate = bitrate playpath_url = video_index['src'] if '.mp4' in playpath_url: playpath_url = 'mp4:' + playpath_url else: playpath_url = playpath_url.replace('.flv', '') finalurl = base_url + ' playpath=' + playpath_url + ' swfurl=' + SWFURL + ' swfvfy=true' else: video_data = _connection.getURL(video_url + '&manifest=m3u') video_tree = BeautifulSoup(video_data) try: closedcaption = video_tree.textstream['src'] except: pass if (_addoncompat.get_setting('enablesubtitles') == 'true') and ( closedcaption is not None): convert_subtitles(closedcaption) video_url2 = video_tree.seq.find_all('video')[0] video_url3 = video_url2['src'] video_url4 = video_url3.split('/')[-1] video_data2 = _connection.getURL(video_url3) video_url5 = _m3u8.parse(video_data2) for video_index in video_url5.get('playlists'): bitrate = int(video_index.get('stream_info')['bandwidth']) if bitrate > hbitrate and bitrate <= sbitrate: hbitrate = bitrate finalurl = video_url3.replace(video_url4, video_index.get('uri')) xbmcplugin.setResolvedUrl(pluginHandle, True, xbmcgui.ListItem(path=finalurl)) if (_addoncompat.get_setting('enablesubtitles') == 'true') and ( closedcaption is not None): while not xbmc.Player().isPlaying(): xbmc.sleep(100) xbmc.Player().setSubtitles(_common.SUBTITLE)
def play_video(SITE): video_url = _common.args.url try: qbitrate = _common.args.quality except: qbitrate = None hbitrate = -1 lbitrate = -1 sbitrate = int(_addoncompat.get_setting("quality")) video_data = _connection.getURL(video_url) smil_url = re.compile("window.video_auth_playlist_url = '(.*)'").findall(video_data)[0] smil_data = _connection.getURL(smil_url + "&manifest=m3u") video_tree2 = BeautifulSoup(smil_data) video_url3 = video_tree2.video["src"] video_data3 = _connection.getURL(video_url3) video_url4 = _m3u8.parse(video_data3) video_url5 = None for video_index in video_url4.get("playlists"): bitrate = int(video_index.get("stream_info")["bandwidth"]) if qbitrate is None: try: codecs = video_index.get("stream_info")["codecs"] except: codecs = "" if (bitrate < lbitrate or lbitrate == -1) and "mp4a.40.2" != codecs: lbitrate = bitrate lvideo_url5 = video_index.get("uri") if bitrate > hbitrate and bitrate <= (sbitrate * 1000) and codecs != "mp4a.40.2": hbitrate = bitrate video_url5 = video_index.get("uri") elif bitrate == qbitrate: video_url5 = video_index.get("uri") if video_url5 is None: video_url5 = lvideo_url5 finalurl = video_url3.rsplit("/", 1)[0] + "/" + video_url5 item = xbmcgui.ListItem(path=finalurl) if qbitrate is not None: item.setThumbnailImage(_common.args.thumb) item.setInfo( "Video", { "title": _common.args.name, "season": _common.args.season_number, "episode": _common.args.episode_number, "TVShowTitle": _common.args.show_title, }, ) xbmcplugin.setResolvedUrl(pluginHandle, True, item)
def masterlist(): master_db = [] master_data = _connection.getURL(SHOWS) master_menu = simplejson.loads(master_data)['shows']['show'] for master_item in master_menu: fullepisodes = 0 clips = 0 if (int(master_item['clips']['count']['@total']) + int(master_item['fullepisodes']['count']['@total'])) > 0: if int(master_item['clips']['count']['@total']) > 0: try: if int(master_item['clips']['count']['video']['@accesslevel']) == 0: clips = int(master_item['clips']['count']['video']['$']) except: if int(master_item['clips']['count']['video'][0]['@accesslevel']) == 0: clips = int(master_item['clips']['count']['video'][0]['$']) if int(master_item['fullepisodes']['count']['@total']) > 0: try: if int(master_item['fullepisodes']['count']['video']['@accesslevel']) == 0: fullepisodes = int(master_item['fullepisodes']['count']['video']['$']) except: if int(master_item['fullepisodes']['count']['video'][0]['@accesslevel']) == 0: fullepisodes = int(master_item['fullepisodes']['count']['video'][0]['$']) if (fullepisodes + clips) > 0: master_name = master_item['title'] season_url = master_item['@id'] master_db.append((master_name, SITE, 'seasons', season_url)) return master_db
def convert_subtitles(closedcaption): str_output = '' subtitle_data = _connection.getURL(closedcaption, connectiontype=0) subtitle_data = BeautifulSoup(subtitle_data, 'html.parser', parse_only=SoupStrainer('div')) srt_output = '' lines = subtitle_data.find_all('p') i = 0 last_start_time = '' last_end_time = '' for line in lines: try: if line is not None: sub = clean_subs(_common.smart_utf8(line)) start_time = _common.smart_utf8(line['begin'].replace( '.', ',')) end_time = _common.smart_utf8(line['end'].replace('.', ',')) if start_time != last_start_time and end_time != last_end_time: str_output += '\n' + str( i + 1 ) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n' i = i + 1 last_end_time = end_time last_start_time = start_time else: str_output += sub + '\n\n' except: pass file = open(_common.SUBTITLE, 'w') file.write(str_output) file.close()
def rootlist(): root_data = _connection.getURL(SHOWS) root_menu = simplejson.loads(root_data)['shows']['show'] for root_item in root_menu: fullepisodes = 0 clips = 0 if (int(root_item['clips']['count']['@total']) + int(root_item['fullepisodes']['count']['@total'])) > 0: if int(root_item['clips']['count']['@total']) > 0: try: if int(root_item['clips']['count']['video']['@accesslevel']) == 0: clips = int(root_item['clips']['count']['video']['$']) except: if int(root_item['clips']['count']['video'][0]['@accesslevel']) == 0: clips = int(root_item['clips']['count']['video'][0]['$']) if int(root_item['fullepisodes']['count']['@total']) > 0: try: if int(root_item['fullepisodes']['count']['video']['@accesslevel']) == 0: fullepisodes = int(root_item['fullepisodes']['count']['video']['$']) except: if int(root_item['fullepisodes']['count']['video'][0]['@accesslevel']) == 0: fullepisodes = int(root_item['fullepisodes']['count']['video'][0]['$']) if (fullepisodes + clips) > 0: root_name = root_item['title'] season_url = root_item['@id'] _common.add_show(root_name, SITE, 'seasons', season_url) _common.set_view('tvshows')
def seasons(SITE, BRANDID): xbmcplugin.addSortMethod(pluginHandle, xbmcplugin.SORT_METHOD_LABEL) season_url = _common.args.url season_menu = [] season_numbers = [] clip_numbers = [] season_url2 = VIDEOLIST % BRANDID + '001/-1/' + season_url + '/-1/-1/-1/-1' season_data = _connection.getURL(season_url2) season_data2 = simplejson.loads(season_data)['videos'] season_count = int(season_data2['@count']) if season_count > 1: season_menu = season_data2['video'] elif season_count == 1: season_menu.append(dict(season_data2['video'])) for season_item in season_menu: if int(season_item['@accesslevel']) == 0: if season_item['@type'] == 'lf': try: if season_item['season']['@id'] not in season_numbers: season_numbers.append(season_item['season']['@id']) season_name = 'Season ' + season_item['season']['@id'] season_url3 = VIDEOLIST % BRANDID + '001/' + season_item['@type'] + '/' + season_url + '/' + season_item['season']['@id'] + '/-1/-1/-1' _common.add_directory(season_name, SITE, 'episodes', season_url3) except: pass elif season_item['@type'] == 'sf': try: if season_item['season']['@id'] not in clip_numbers: clip_numbers.append(season_item['season']['@id']) season_name = 'Season Clips ' + season_item['season']['@id'] season_url4 = VIDEOLIST % BRANDID + '001/' + season_item['@type'] + '/' + season_url + '/' + season_item['season']['@id'] + '/-1/-1/-1' _common.add_directory(season_name, SITE, 'episodes', season_url4) except: pass _common.set_view('seasons')
def episodes(episode_url = _common.args.url): episode_menu = [] episode_data = _connection.getURL(episode_url) episode_data2 = simplejson.loads(episode_data)['videos'] episode_count = int(episode_data2['@count']) if episode_count > 1: episode_menu = episode_data2['video'] elif episode_count == 1: episode_menu.append(dict(episode_data2['video'])) for episode_item in episode_menu: if int(episode_item['@accesslevel']) == 0: highest_height = -1 episode_name = episode_item['title'] episode_duration = int(episode_item['duration']['$'])/60000 season_number = episode_item['season']['@id'] episode_id = episode_item['@id'] episode_type = episode_item['@type'] try: episode_description = _common.replace_signs(episode_item['longdescription']) except: episode_description = _common.replace_signs(episode_item['description']) try: episode_airdate = episode_item['airdates']['airdate'].rsplit(' ',1)[0] episode_airdate = _common.format_date(episode_airdate,'%a, %d %b %Y %H:%M:%S', '%d.%m.%Y') except: try: episode_airdate = episode_item['airdates']['airdate'][0].rsplit(' ',1)[0] episode_airdate = _common.format_date(episode_airdate,'%a, %d %b %Y %H:%M:%S', '%d.%m.%Y') except: episode_airdate = None try: episode_number = episode_item['number'] except: episode_number = None try: for episode_picture in episode_item['thumbnails']['thumbnail']: try: picture_height = int(episode_picture['@width']) except: if episode_picture['@type'] == 'source-16x9': picture_height = 720 else: picture_height = 0 if picture_height > highest_height: highest_height = picture_height episode_thumb = episode_picture['$'] except: episode_thumb = episode_item['thumbnails']['thumbnail']['$'] u = sys.argv[0] u += '?url="' + urllib.quote_plus(episode_id) + '#' + urllib.quote_plus(episode_type) + '"' u += '&mode="' + SITE + '"' u += '&sitemode="play_video"' infoLabels={'title' : episode_name, 'plot' : episode_description, 'premiered' : episode_airdate, 'duration' : episode_duration, 'episode' : episode_number, 'season' : season_number } _common.add_video(u, episode_name, episode_thumb, infoLabels = infoLabels) _common.set_view('episodes')
def convert_subtitles(closedcaption): str_output = '' subtitle_data = _connection.getURL(closedcaption, connectiontype = 0) subtitle_data = clean_subs(_common.smart_utf8(subtitle_data)) file = open(_common.SUBTITLESMI, 'w') file.write(subtitle_data) file.close()
def episodes(episode_url = _common.args.url): episode_data = _connection.getURL(episode_url) episode_menu = simplejson.loads(episode_data)['entries'] for i, episode_item in enumerate(episode_menu): url = episode_item['media$content'][0]['plfile$url'] episode_duration = int(episode_item['media$content'][0]['plfile$duration']) episode_plot = episode_item['description'] episode_airdate = _common.format_date(epoch = episode_item['pubDate']/1000) episode_name = episode_item['title'] try: season_number = int(episode_item['pl' + str(i + 1) + '$season'][0]) except: season_number = -1 try: episode_number = int(episode_item['pl' + str(i + 1) + '$episode'][0]) except: episode_number = -1 try: episode_thumb = episode_item['plmedia$defaultThumbnailUrl'] except: episode_thumb = None u = sys.argv[0] u += '?url="' + urllib.quote_plus(url) + '"' u += '&mode="' + SITE + '"' u += '&sitemode="play_video"' infoLabels={ 'title' : episode_name, 'durationinseconds' : episode_duration, 'season' : season_number, 'episode' : episode_number, 'plot' : episode_plot, 'premiered' : episode_airdate } _common.add_video(u, episode_name, episode_thumb, infoLabels = infoLabels) _common.set_view('episodes')
def episodes(episode_url = _common.args.url): episode_data = _connection.getURL(episode_url, header = {'X-Forwarded-For' : '12.13.14.15'}) episode_menu = simplejson.loads(episode_data)['results'] for episode_item in episode_menu: try: show_name = episode_item['seriesTitle'] except: show_name = '' episode_name = episode_item['title'] url = 'http://legacy.nick.com/videos/clip/%s.html' url = url % episode_item['urlKey'] episode_plot = episode_item['description'] image = episode_item['images'][0]['assets'][0]['path'] episode_thumb = 'http://nick.com'+image try: episode_duration = _common.format_seconds(episode_item['duration']) except: episode_duration = -1 u = sys.argv[0] u += '?url="' + urllib.quote_plus(url) + '"' u += '&mode="' + SITE + '"' u += '&sitemode="play_video"' infoLabels = { 'title' : episode_name, 'plot' : episode_plot, 'durationinseconds' : episode_duration, 'tvshowtitle' : show_name } _common.add_video(u, episode_name, episode_thumb, infoLabels = infoLabels, quality_mode = 'list_qualities') _common.set_view('episodes')
def convert_subtitles(closedcaption): str_output = '' subtitle_data = _connection.getURL(closedcaption, connectiontype=0) subtitle_data = BeautifulSoup(subtitle_data, 'html.parser', parse_only=SoupStrainer('div')) lines = subtitle_data.find_all('p') for i, line in enumerate(lines): if line is not None: sub = clean_subs(_common.smart_utf8(line)) start_time_rest, start_time_msec = line['begin'].rsplit(':', 1) start_time = _common.smart_utf8(start_time_rest + ',' + start_time_msec) try: end_time_rest, end_time_msec = line['end'].rsplit(':', 1) end_time = _common.smart_utf8(end_time_rest + ',' + end_time_msec) except: continue str_output += str( i + 1 ) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n\n' file = open(_common.SUBTITLE, 'w') file.write(str_output) file.close()
def seasons(season_url = _common.args.url): xbmcplugin.addSortMethod(pluginHandle, xbmcplugin.SORT_METHOD_LABEL) season_menu = [] season_numbers = [] clip_numbers = [] season_url2 = VIDEOLIST + '-1/' + season_url + '/-1/-1/-1/-1' season_data = _connection.getURL(season_url2) season_data2 = simplejson.loads(season_data)['videos'] season_count = int(season_data2['@count']) if season_count > 1: season_menu = season_data2['video'] elif season_count == 1: season_menu.append(dict(season_data2['video'])) for season_item in season_menu: if int(season_item['@accesslevel']) == 0: if season_item['@type'] == 'lf': try: if season_item['season']['@id'] not in season_numbers: season_numbers.append(season_item['season']['@id']) season_name = 'Season ' + season_item['season']['@id'] season_url3 = VIDEOLIST + season_item['@type'] + '/' + season_url + '/' + season_item['season']['@id'] + '/-1/-1/-1' _common.add_directory(season_name, SITE, 'episodes', season_url3) except: pass elif season_item['@type'] == 'sf': try: if season_item['season']['@id'] not in clip_numbers: clip_numbers.append(season_item['season']['@id']) season_name = 'Season Clips ' + season_item['season']['@id'] season_url4 = VIDEOLIST + season_item['@type'] + '/' + season_url + '/' + season_item['season']['@id'] + '/-1/-1/-1' _common.add_directory(season_name, SITE, 'episodes', season_url4) except: pass _common.set_view('seasons')
def convert_subtitles(closedcaption): str_output = '' subtitle_data = _connection.getURL(closedcaption, connectiontype=0) subtitle_data = BeautifulSoup(subtitle_data, 'html.parser', parse_only=SoupStrainer('div')) lines = subtitle_data.find_all('p') for i, line in enumerate(lines): if line is not None: sub = clean_subs(_common.smart_utf8(line)) start_time_hours, start_time_rest = line['begin'].split(':', 1) start_time_hours = '%02d' % (int(start_time_hours) - 1) start_time = _common.smart_utf8(start_time_hours + ':' + start_time_rest.replace('.', ',')) end_time_hours, end_time_rest = line['end'].split(':', 1) end_time_hours = '%02d' % (int(end_time_hours) - 1) end_time = _common.smart_utf8(end_time_hours + ':' + end_time_rest.replace('.', ',')) str_output += str( i + 1 ) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n\n' file = open(SUBTITLE, 'w') file.write(str_output) file.close() return True
def episodes(episode_url = _common.args.url): episode_data = _connection.getURL(episode_url) episode_menu = simplejson.loads(episode_data) for episode_item in episode_menu['items']: if episode_item['videos']: url = episode_item['guid'] episode_name = episode_item['title'] episode_plot = episode_item['description'] episode_airdate = _common.format_date(episode_item['airdate'], '%Y-%m-%d %H:%M:%S', '%d.%m.%Y') episode_duration = int(episode_item['videos'].itervalues().next()['length']) / 1000 try: episode_thumb = episode_item['images']['kids-mezzannine-16x9']['url'] except: try: episode_thumb = episode_item['images']['kids-mezzannine-4x3']['url'] except: episode_thumb = episode_item['images']['mezzanine']['url'] u = sys.argv[0] u += '?url="' + urllib.quote_plus(url) + '"' u += '&mode="' + SITE + '"' u += '&sitemode="play_video"' infoLabels={ 'title' : episode_name, 'durationinseconds' : episode_duration, 'plot' : episode_plot, 'premiered' : episode_airdate } _common.add_video(u, episode_name, episode_thumb, infoLabels = infoLabels) _common.set_view('episodes')
def episodes(): episode_values = { 'video_browser_action': 'filter', 'params[type]': 'all', 'params[filter]': _common.args.url, 'params[page]': '1', 'params[post_id]': '71306', 'module_id_base': 'rb-video-browser' } episode_data = _connection.getURL(VIDEOURL, episode_values) episode_tree = simplejson.loads(episode_data)['html']['date'] episode_menu = BeautifulSoup(episode_tree).find_all('li') for episode_item in episode_menu: episode_name = episode_item.a.img['title'] episode_plot = episode_item.a.img['alt'].replace('/n', ' ') episode_thumb = episode_item.a.img['src'] url = episode_item.a['href'] u = sys.argv[0] u += '?url="' + urllib.quote_plus(url) + '"' u += '&mode="' + SITE + '"' u += '&sitemode="play_video"' infoLabels = {'title': episode_name, 'plot': episode_plot} _common.add_video(u, episode_name, episode_thumb, infoLabels=infoLabels) _common.set_view('episodes')
def play(video_uri = _common.args.url): if BASE in video_uri: video_data = _connection.getURL(video_uri) video_tree = BeautifulSoup(video_data) video_uri = video_tree.find('meta', {'name' : 'mtvn_uri'})['content'] video_url = video_uri _main_viacom.play_video(BASE, video_url)
def play_video(video_url=_common.args.url): stored_size = 0 video_data = _connection.getURL(video_url) video_tree = BeautifulSoup(video_data, 'html5lib') video_player_key = video_tree.find('param', attrs={'name': 'playerKey'})['value'] video_content_id = video_tree.find('param', attrs={'name': '@videoPlayer'})['value'] video_player_id = video_tree.find('param', attrs={'name': 'playerID'})['value'] renditions = get_episode_info(video_player_key, video_content_id, video_url, video_player_id) video_url2 = renditions['programmedContent']['videoPlayer']['mediaDTO'][ 'FLVFullLengthURL'] for item in sorted(renditions['programmedContent']['videoPlayer'] ['mediaDTO']['renditions'], key=lambda item: item['frameHeight'], reverse=False): stream_size = item['size'] if (int(stream_size) > stored_size): video_url2 = item['defaultURL'] stored_size = stream_size try: finalurl = video_url2.split('&', 2)[0] + '?' + video_url2.split( '&', 2)[2] + ' playpath=' + video_url2.split('&', 2)[1] except: finalurl = video_url2.split( '&', 1)[0] + ' playpath=' + video_url2.split('&', 1)[1] xbmcplugin.setResolvedUrl(pluginHandle, True, xbmcgui.ListItem(path=finalurl))
def add_master_shows(url, doubles = [], master_db = []): master_dict = {} root_dict = {} for i in range(ord('a') - 1, ord('z') + 1): if i < ord('a'): url = SHOWS else: url = SHOWSAZ % chr(i) master_data = _connection.getURL(url) master_tree = BeautifulSoup(master_data, 'html5lib') master_menu = master_tree.find_all('a', attrs = {'data-report' : 'SHOWS_HUB:SHOWS_AZ:SHOW'}) for master_item in master_menu: master_name = master_item.text season = re.compile(r' \(Season \d+\)') master_name = season.sub('', master_name).strip() season_url = master_item['href'].replace('series.jhtml', 'video.jhtml?sort=descend') if BASE not in season_url: season_url = BASE + season_url if season_url.split('season')[0].replace('video.jhtml?sort=descend','') not in doubles: tvdb_name = _common.get_show_data(master_name,SITE, 'seasons')[-1] if tvdb_name not in master_dict.keys(): master_dict[tvdb_name] = season_url else: master_dict[tvdb_name] = master_dict[tvdb_name] + ',' + season_url doubles.append(season_url.split('season')[0].replace('video.jhtml?sort=descend','')) for master_name, season_url in master_dict.iteritems(): master_db.append((master_name, SITE, 'seasons', season_url)) next = master_tree.find('a', class_ = 'page-next') if next: master_db, doubles = add_master_shows(BASE + next['href'], doubles, master_db) return master_db, doubles
def episodes(episode_url=_common.args.url): episode_data = _connection.getURL(EPISODES + episode_url) episode_data2 = simplejson.loads(episode_data)['list_html'] episode_tree = BeautifulSoup(episode_data2, 'html.parser').find('ul', id='videoList_ul') if episode_tree: episode_menu = episode_tree.find_all('li', recursive=False) for episode_item in episode_menu: infoLabels = {} url = episode_item['id'][6:] episode_thumb = episode_item.img['src'] episode_name = episode_item.span.string episode_plot = episode_item.find(id='viddesc_' + url).string show_name = episode_item.find(id='vidtitle_' + url).string u = sys.argv[0] u += '?url="' + urllib.quote_plus(url) + '"' u += '&mode="' + SITE + '"' u += '&sitemode="play_video"' infoLabels = { 'title': episode_name, 'plot': episode_plot, 'tvshowtitle': show_name } _common.add_video(u, episode_name, episode_thumb, infoLabels=infoLabels) _common.set_view('episodes')
def convert_subtitles(video_guid): str_output = '' subtitle_data = _connection.getURL(CLOSEDCAPTION % video_guid, connectiontype=0) subtitle_data = simplejson.loads(subtitle_data) for i, subtitle_line in enumerate(subtitle_data): if subtitle_line is not None: sub = _common.smart_utf8(subtitle_line['metadata']['Text']) start_time = _common.smart_utf8(str( subtitle_line['startTime'])).split('.') start_minutes, start_seconds = divmod(int(start_time[0]), 60) start_hours, start_minutes = divmod(start_minutes, 60) start_time = '%02d:%02d:%02d,%02d' % (start_hours, start_minutes, start_seconds, int(start_time[1][0:2])) end_time = _common.smart_utf8(str( subtitle_line['endTime'])).split('.') end_minutes, end_seconds = divmod(int(end_time[0]), 60) end_hours, end_minutes = divmod(end_minutes, 60) end_time = '%02d:%02d:%02d,%02d' % ( end_hours, end_minutes, end_seconds, int(end_time[1][0:2])) str_output += str( i + 1 ) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n\n' file = open(SUBTITLE, 'w') file.write(str_output) file.close() return True
def play_video(video_url=_common.args.url): video_data = _connection.getURL(VIDEOURL % video_url.split('/')[-1]) video_tree = simplejson.loads(video_data)['videos']['limelight700']['uri'] rtmpsplit = video_tree.split('mp4:') finalurl = rtmpsplit[0] + ' playpath=mp4:' + rtmpsplit[1] xbmcplugin.setResolvedUrl(pluginHandle, True, xbmcgui.ListItem(path=finalurl))
def episodes(episode_url=_common.args.url): episode_data = _connection.getURL(episode_url, forwardheader='12.13.14.15') episode_tree = BeautifulSoup(episode_data) episode_menu = episode_tree.find_all('article') for episode_item in episode_menu: show_name = episode_item.find('p', class_='show-name').text episode_name = episode_item.find('p', class_='short-title').text url = BASE + episode_item.find('a')['href'] episode_plot = _common.replace_signs( episode_item.find('p', class_='description').text) try: episode_thumb = episode_item.find('img', class_='thumbnail')['src'] except: episode_thumb = None try: duration = episode_item.find('span', class_='duration').text.replace( 'Duration:', '') except: duration = None u = sys.argv[0] u += '?url="' + urllib.quote_plus(url) + '"' u += '&mode="' + SITE + '"' u += '&sitemode="play_video"' infoLabels = { 'title': episode_name, 'plot': episode_plot, 'tvshowtitle': show_name } _common.add_video(u, episode_name, episode_thumb, infoLabels=infoLabels) _common.set_view('episodes')
def list_qualities(video_url = _common.args.url): video_data = _connection.getURL(video_url, header = {'X-Forwarded-For' : '12.13.14.15'}) try: video_url2 = re.compile('<meta content="http://media.mtvnservices.com/fb/(.+?).swf" property="og:video"/>').findall(video_data)[0] except: video_url2 = re.compile("NICK.unlock.uri = '(.+?)';").findall(video_data)[0] return _main_viacom.list_qualities(BASE, video_url2, media_base = BASE2)
def episodes(SITE): episode_menu = [] episode_data = _connection.getURL(_common.args.url) episode_data2 = simplejson.loads(episode_data)['videos'] episode_count = int(episode_data2['@count']) if episode_count > 1: episode_menu = episode_data2['video'] elif episode_count == 1: episode_menu.append(dict(episode_data2['video'])) for episode_item in episode_menu: if int(episode_item['@accesslevel']) == 0: highest_height = -1 episode_name = episode_item['title'] episode_duration = int(episode_item['duration']['$']) / 1000 season_number = episode_item['season']['@id'] episode_id = episode_item['@id'] episode_type = episode_item['@type'] try: episode_description = _common.replace_signs(episode_item['longdescription']) except: episode_description = _common.replace_signs(episode_item['description']) try: episode_airdate = episode_item['airdates']['airdate'].rsplit(' ',1)[0] episode_airdate = _common.format_date(episode_airdate,'%a, %d %b %Y %H:%M:%S', '%d.%m.%Y') except: try: episode_airdate = episode_item['airdates']['airdate'][0].rsplit(' ',1)[0] episode_airdate = _common.format_date(episode_airdate,'%a, %d %b %Y %H:%M:%S', '%d.%m.%Y') except: episode_airdate = -1 try: episode_number = episode_item['number'] except: episode_number = -1 try: for episode_picture in episode_item['thumbnails']['thumbnail']: try: picture_height = int(episode_picture['@width']) except: if episode_picture['@type'] == 'source-16x9': picture_height = 720 else: picture_height = 0 if picture_height > highest_height: highest_height = picture_height episode_thumb = episode_picture['$'] except: episode_thumb = episode_item['thumbnails']['thumbnail']['$'] u = sys.argv[0] u += '?url="' + urllib.quote_plus(episode_id) + '#' + urllib.quote_plus(episode_type) + '"' u += '&mode="' + SITE + '"' u += '&sitemode="play_video"' infoLabels={'title' : episode_name, 'plot' : episode_description, 'premiered' : episode_airdate, 'durationinseconds' : episode_duration, 'episode' : episode_number, 'season' : season_number } _common.add_video(u, episode_name, episode_thumb, infoLabels = infoLabels) _common.set_view('episodes')
def list_qualities(video_uri = _common.args.url): if BASE in video_uri: video_data = _connection.getURL(video_uri) video_tree = BeautifulSoup(video_data) video_uri = video_tree.find('meta', {'name' : 'mtvn_uri'})['content'] video_url = video_uri return _main_viacom.list_qualities(BASE, video_url)
def masterlist(SITE, BRANDID): master_db = [] master_data = _connection.getURL(SHOWS % BRANDID) master_menu = simplejson.loads(master_data)['shows']['show'] for master_item in master_menu: fullepisodes = 0 clips = 0 if (int(master_item['clips']['count']['@total']) + int(master_item['fullepisodes']['count']['@total'])) > 0: if int(master_item['clips']['count']['@total']) > 0: try: if int(master_item['clips']['count']['video']['@accesslevel']) == 0: clips = int(master_item['clips']['count']['video']['$']) except: if int(master_item['clips']['count']['video'][0]['@accesslevel']) == 0: clips = int(master_item['clips']['count']['video'][0]['$']) if int(master_item['fullepisodes']['count']['@total']) > 0: try: if int(master_item['fullepisodes']['count']['video']['@accesslevel']) == 0: fullepisodes = int(master_item['fullepisodes']['count']['video']['$']) except: if int(master_item['fullepisodes']['count']['video'][0]['@accesslevel']) == 0: fullepisodes = int(master_item['fullepisodes']['count']['video'][0]['$']) if (fullepisodes + clips) > 0: master_name = master_item['title'].strip() season_url = master_item['@id'] master_db.append((master_name, SITE, 'seasons', season_url)) return master_db
def masterlist(): master_db = [] master_dict = {} master_data = _connection.getURL(SHOWS) master_tree = BeautifulSoup(master_data, 'html.parser') master_menu = master_tree.allcollections.find_all('collection') for master_item in master_menu: master_name = _common.smart_utf8(master_item['name']) if '[AD]' not in master_name: tvdb_name = _common.get_show_data(master_name, SITE, 'seasons')[-1] season_url = master_item['id'] season_url = season_url + '#tveepisodes=' try: for season in master_item.tveepisodes.find_all('season'): season_url = season_url + '-' + season['number'] except: pass season_url = season_url + '#clips=' try: for season in master_item.clips.find_all('season'): if season['number'] != '': season_url = season_url + '-' + season['number'] else: season_url = season_url + '-' + '*' except: pass master_db.append((master_name, SITE, 'seasons', season_url)) return master_db
def convert_subtitles(closedcaption, durations=[]): str_output = '' j = 0 count = 0 for closedcaption_url in closedcaption: count = count + 1 subtitle_data = _connection.getURL(closedcaption_url['src'], connectiontype=0) subtitle_data = BeautifulSoup(subtitle_data, 'html.parser', parse_only=SoupStrainer('div')) lines = subtitle_data.find_all('p') for i, line in enumerate(lines): if line is not None: sub = clean_subs(_common.smart_utf8(line)) start_time = _common.smart_utf8(line['begin'][:-1].replace( '.', ',')) end_time = _common.smart_utf8(line['end'][:-1].replace( '.', ',')) str_output += str( j + i + 1 ) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n\n' j = j + i + 1 file = open( os.path.join(_common.CACHEPATH, 'subtitle-%s.srt' % int(count)), 'w') file.write(str_output) str_output = '' file.close()