def episodes_json(SITE): episode_url = common.args.url master_name = episode_url.split('#')[0] episode_url = episode_url.split('#')[1] episode_data = connection.getURL(episode_url) episode_menu = simplejson.loads(episode_data) for episode_item in episode_menu: url = episode_item['episodeID'] try: episode_duration = episode_item['length'] except: episode_duration = -1 try: episode_airdate = common.format_date(episode_item['airDate'].split('on ')[1],'%B %d, %Y') except: episode_airdate = -1 try: episode_plot = episode_item['summary'] except: episode_plot = episode_item['shortdescription'] episode_name = episode_item['title'] if episode_name == master_name: video_url = EPISODE % url video_data = connection.getURL(video_url) video_tree = BeautifulSoup(video_data, 'html.parser') episode_name = video_tree.headline.string elif episode_name == "": episode_name = episode_plot try: season_number = int(episode_item['identifier'].split(',')[0].split(' ')[1]) except: season_number = -1 try: episode_number = int(episode_item['identifier'].split(', ')[1].split(' ')[1].replace(' Episode ', '')) except: try: episode_number = int(episode_item['identifier'].split(', ')[1].split(' ')[1]) except: episode_number = -1 if episode_number > 100: episode_number = int(re.compile('episode-(\d*)').findall(connection.getRedirect(episode_item['shareURL']))[0]) try: episode_thumb = episode_item['640x360_jpg'] except: episode_thumb = None u = sys.argv[0] u += '?url="' + urllib.quote_plus(url) + '"' u += '&mode="' + SITE + '"' u += '&sitemode="play_video"' infoLabels={ 'title' : episode_name, 'durationinseconds' : episode_duration, 'season' : season_number, 'episode' : episode_number, 'plot' : episode_plot, 'premiered' : episode_airdate } common.add_video(u, episode_name, episode_thumb, infoLabels = infoLabels, quality_mode = 'list_qualities') common.set_view('episodes')
def list_qualities(BASE, video_url=common.args.url, media_base=VIDEOURL): bitrates = [] if media_base not in video_url: video_url = media_base + video_url exception = False if 'feed' not in video_url: swf_url = connection.getRedirect(video_url, header={'Referer': BASE}) params = dict( item.split("=") for item in swf_url.split('?')[1].split("&")) uri = urllib.unquote_plus(params['uri']) config_url = urllib.unquote_plus(params['CONFIG_URL'].replace( 'Other', DEVICE)) config_data = connection.getURL(config_url, header={ 'Referer': video_url, 'X-Forwarded-For': '12.13.14.15' }) config_tree = BeautifulSoup(config_data, 'html.parser') if not config_tree.error: feed_url = config_tree.feed.string feed_url = feed_url.replace('{uri}', uri).replace( '&', '&').replace('{device}', DEVICE).replace( '{ref}', 'None').replace('{type}', 'normal').strip() else: exception = True error_text = config_tree.error.string.split('/')[-1].split('_') common.show_exception(error_text[1], error_text[2]) else: feed_url = video_url if not exception: feed_data = connection.getURL(feed_url) video_tree = BeautifulSoup(feed_data, 'html.parser', parse_only=SoupStrainer('media:group')) video_segments = video_tree.find_all('media:content') video_segment = video_segments[0] video_url3 = video_segment['url'].replace('{device}', DEVICE) video_data3 = connection.getURL( video_url3, header={'X-Forwarded-For': '12.13.14.15'}) video_tree3 = BeautifulSoup(video_data3, 'html.parser') video_menu = video_tree3.find('src').string m3u8_url = None m3u_master_data = connection.getURL(video_menu, savecookie=True) m3u_master = m3u8.parse(m3u_master_data) for video_index in m3u_master.get('playlists'): bitrate = int(video_index.get('stream_info')['bandwidth']) display = int(bitrate) / 1024 bitrates.append((display, bitrate)) return bitrates
def list_qualities(BASE, video_url = common.args.url, media_base = VIDEOURL): bitrates = [] if media_base not in video_url: video_url = media_base + video_url exception = False if 'feed' not in video_url: swf_url = connection.getRedirect(video_url, header = {'Referer' : BASE}) params = dict(item.split("=") for item in swf_url.split('?')[1].split("&")) uri = urllib.unquote_plus(params['uri']) config_url = urllib.unquote_plus(params['CONFIG_URL'].replace('Other', DEVICE)) config_data = connection.getURL(config_url, header = {'Referer' : video_url, 'X-Forwarded-For' : '12.13.14.15'}) config_tree = BeautifulSoup(config_data, 'html.parser') if not config_tree.error: feed_url = config_tree.feed.string feed_url = feed_url.replace('{uri}', uri).replace('&', '&').replace('{device}', DEVICE).replace('{ref}', 'None').replace('{type}', 'normal').strip() else: exception = True error_text = config_tree.error.string.split('/')[-1].split('_') common.show_exception(error_text[1], error_text[2]) else: feed_url = video_url if not exception: feed_data = connection.getURL(feed_url) video_tree = BeautifulSoup(feed_data, 'html.parser', parse_only = SoupStrainer('media:group')) video_segments = video_tree.find_all('media:content') video_segment = video_segments[0] video_url3 = video_segment['url'].replace('{device}', DEVICE) video_data3 = connection.getURL(video_url3, header = {'X-Forwarded-For' : '12.13.14.15'}) video_tree3 = BeautifulSoup(video_data3, 'html.parser') video_menu = video_tree3.find('src').string m3u8_url = None m3u_master_data = connection.getURL(video_menu, savecookie = True) m3u_master = m3u8.parse(m3u_master_data) for video_index in m3u_master.get('playlists'): bitrate = int(video_index.get('stream_info')['bandwidth']) display = int(bitrate) / 1024 bitrates.append((display, bitrate)) return bitrates
def play_video(BASE, video_uri = common.args.url, media_base = VIDEOURL): video_url = media_base + video_uri try: qbitrate = common.args.quality except: qbitrate = None video_url2 = 'stack://' closedcaption = [] exception = False queue = PriorityQueue() segments = [] if 'feed' in video_uri: feed_url = video_uri else: swf_url = connection.getRedirect(video_url, header = {'Referer' : BASE}) params = dict(item.split("=") for item in swf_url.split('?')[1].split("&")) uri = urllib.unquote_plus(params['uri']) config_url = urllib.unquote_plus(params['CONFIG_URL'].replace('Other', DEVICE)) config_data = connection.getURL(config_url, header = {'Referer' : video_url, 'X-Forwarded-For' : '12.13.14.15'}) config_tree = BeautifulSoup(config_data, 'html.parser') if not config_tree.error: feed_url = config_tree.feed.string uri = urllib.quote_plus(uri) feed_url = feed_url.replace('{uri}', uri).replace('&', '&').replace('{device}', DEVICE).replace('{ref}', 'None').replace('{type}', 'network').strip() else: exception = True error_text = config_tree.error.string.split('/')[-1].split('_') if error_text[1] == 'loc': params = dict(item.split("=") for item in config_url.split('?')[-1].split('&')) common.show_exception('Geo', params['geo']) if not exception: feed_data = connection.getURL(feed_url, header = {'X-Forwarded-For' : '12.13.14.15'}) video_tree = BeautifulSoup(feed_data, 'html.parser', parse_only = SoupStrainer('media:group')) video_segments = video_tree.find_all('media:content') if not video_segments: video_tree = BeautifulSoup(feed_data, 'html.parser') common.show_exception(video_tree.find('meta', property = "og:site_name")['content'], video_tree.find('meta', property = "og:url")['content']) exception = True threads = [] for i, video_item in enumerate(video_segments): try: threads.append(Thread(get_videos, queue, i, video_item, qbitrate, False)) except Exception, e: print "Exception: ", e [i.start() for i in threads] [i.join() for i in threads] while not queue.empty(): video_data2 = queue.get() video_url2 += video_data2[1] + ' , ' segments.append(video_data2[2]) closedcaption.append((video_data2[3], int(video_data2[0]))) player._segments_array = segments finalurl = video_url2[:-3] time.sleep(20) if (addon.getSetting('enablesubtitles') == 'true') and closedcaption and detect_format is not None: convert_subtitles(closedcaption) player._subtitles_Enabled = True item = xbmcgui.ListItem(path = finalurl) if player._localHTTPServer: filestring = 'XBMC.RunScript(' + os.path.join(ustvpaths.LIBPATH,'proxy.py') + ', 12345)' xbmc.executebuiltin(filestring) finalurl = video_url2[:-3] #localhttpserver = True time.sleep(20) queue.task_done() try: item.setThumbnailImage(common.args.thumb) except: pass try: item.setInfo('Video', { 'title' : common.args.name, 'season' : common.args.season_number, 'episode' : common.args.episode_number, 'TVShowTitle' : common.args.show_title }) except: pass xbmcplugin.setResolvedUrl(pluginHandle, True, item) while player.is_active: player.sleep(250)
def episodes_json(SITE, episode_url = common.args.url): episodes = [] master_name = episode_url.split('#')[0] episode_url = episode_url.split('#')[1] episode_data = connection.getURL(episode_url) episode_menu = simplejson.loads(episode_data) for episode_item in episode_menu: url = episode_item['episodeID'] try: episode_duration = episode_item['length'] except: episode_duration = -1 try: episode_airdate = common.format_date(episode_item['airDate'].split('on ')[1],'%B %d, %Y') except: episode_airdate = -1 try: episode_plot = episode_item['summary'] except: episode_plot = episode_item['shortdescription'] episode_name = episode_item['title'] if episode_name == master_name: video_url = EPISODE % url video_data = connection.getURL(video_url) video_tree = BeautifulSoup(video_data, 'html.parser') episode_name = video_tree.headline.string elif episode_name == "": episode_name = episode_plot try: season_number = int(episode_item['identifier'].split(',')[0].split(' ')[1]) except: season_number = -1 try: episode_number = int(episode_item['identifier'].split(', ')[1].split(' ')[1].replace(' Episode ', '')) except: try: episode_number = int(episode_item['identifier'].split(', ')[1].split(' ')[1]) except: episode_number = -1 if episode_number > 100: try: episode_number = int(re.compile('episode-(\d*)').findall(connection.getRedirect(episode_item['shareURL']))[0]) except: try: web_data = _connection.getURL(episode_item['shareURL']) web_tree = BeautifulSoup(web_data, 'html.parser') episode_number = web_tree.find('h2', text = episode_name).findNext(itemprop = 'episodeNumber').string season_number = web_tree.find('h2', text = episode_name).findNext(itemprop = 'seasonNumber').string except: pass try: episode_thumb = episode_item['640x360_jpg'] except: episode_thumb = None episode_mpaa = episode_item['rating'] try: episode_type = episode_item['type'] except: episode_type = None if 'Movie' in master_name: type = 'Movie' elif episode_type == 1: type = 'Full Episode' else: type = 'Clips' if type != 'Movie': show_title = master_name else: show_title = None try: episode_year = episode_item['year'] except: episode_year = None try: episode_actors = episode_item['actors'].split(',') except: episode_actors = [] u = sys.argv[0] u += '?url="' + urllib.quote_plus(url) + '"' u += '&mode="' + SITE + '"' u += '&sitemode="play_video"' infoLabels={ 'title' : episode_name, 'durationinseconds' : episode_duration, 'season' : season_number, 'episode' : episode_number, 'plot' : episode_plot, 'premiered' : episode_airdate , 'year' : episode_year, 'mpaa' : episode_mpaa, 'TVShowTitle': show_title, 'cast' : episode_actors} episodes.append((u, episode_name, episode_thumb, infoLabels, 'list_qualities', False, type )) return episodes
def play_video(BASE, video_uri=common.args.url, media_base=VIDEOURL): video_url = media_base + video_uri try: qbitrate = common.args.quality except: qbitrate = None video_url2 = 'stack://' closedcaption = [] exception = False queue = PriorityQueue() segments = [] if 'feed' in video_uri: feed_url = video_uri else: swf_url = connection.getRedirect(video_url, header={'Referer': BASE}) params = dict( item.split("=") for item in swf_url.split('?')[1].split("&")) uri = urllib.unquote_plus(params['uri']) config_url = urllib.unquote_plus(params['CONFIG_URL'].replace( 'Other', DEVICE)) config_data = connection.getURL(config_url, header={ 'Referer': video_url, 'X-Forwarded-For': '12.13.14.15' }) config_tree = BeautifulSoup(config_data, 'html.parser') if not config_tree.error: feed_url = config_tree.feed.string uri = urllib.quote_plus(uri) feed_url = feed_url.replace('{uri}', uri).replace( '&', '&').replace('{device}', DEVICE).replace( '{ref}', 'None').replace('{type}', 'network').strip() else: exception = True error_text = config_tree.error.string.split('/')[-1].split('_') if error_text[1] == 'loc': params = dict( item.split("=") for item in config_url.split('?')[-1].split('&')) common.show_exception('Geo', params['geo']) if not exception: feed_data = connection.getURL( feed_url, header={'X-Forwarded-For': '12.13.14.15'}) video_tree = BeautifulSoup(feed_data, 'html.parser', parse_only=SoupStrainer('media:group')) video_segments = video_tree.find_all('media:content') if not video_segments: video_tree = BeautifulSoup(feed_data, 'html.parser') common.show_exception( video_tree.find('meta', property="og:site_name")['content'], video_tree.find('meta', property="og:url")['content']) exception = True threads = [] for i, video_item in enumerate(video_segments): try: threads.append( Thread(get_videos, queue, i, video_item, qbitrate, False)) except Exception, e: print "Exception: ", e [i.start() for i in threads] [i.join() for i in threads] while not queue.empty(): video_data2 = queue.get() video_url2 += video_data2[1] + ' , ' segments.append(video_data2[2]) closedcaption.append((video_data2[3], int(video_data2[0]))) player._segments_array = segments finalurl = video_url2[:-3] time.sleep(20) if (addon.getSetting('enablesubtitles') == 'true') and closedcaption and detect_format is not None: convert_subtitles(closedcaption) player._subtitles_Enabled = True item = xbmcgui.ListItem(path=finalurl) if player._localHTTPServer: filestring = 'XBMC.RunScript(' + os.path.join( ustvpaths.LIBPATH, 'proxy.py') + ', 12345)' xbmc.executebuiltin(filestring) finalurl = video_url2[:-3] #localhttpserver = True time.sleep(20) queue.task_done() try: item.setThumbnailImage(common.args.thumb) except: pass try: item.setInfo( 'Video', { 'title': common.args.name, 'season': common.args.season_number, 'episode': common.args.episode_number, 'TVShowTitle': common.args.show_title }) except: pass xbmcplugin.setResolvedUrl(pluginHandle, True, item) while player.is_active: player.sleep(250)
def episodes_json(SITE, episode_url=common.args.url): episodes = [] master_name = episode_url.split('#')[0] episode_url = episode_url.split('#')[1] episode_data = connection.getURL(episode_url) episode_menu = simplejson.loads(episode_data) for episode_item in episode_menu: url = episode_item['episodeID'] try: episode_duration = episode_item['length'] except: episode_duration = -1 try: episode_airdate = common.format_date( episode_item['airDate'].split('on ')[1], '%B %d, %Y') except: episode_airdate = -1 try: episode_plot = episode_item['summary'] except: episode_plot = episode_item['shortdescription'] episode_name = episode_item['title'] if episode_name == master_name: video_url = EPISODE % url video_data = connection.getURL(video_url) video_tree = BeautifulSoup(video_data, 'html.parser') episode_name = video_tree.headline.string elif episode_name == "": episode_name = episode_plot try: season_number = int( episode_item['identifier'].split(',')[0].split(' ')[1]) except: season_number = -1 try: episode_number = int(episode_item['identifier'].split(', ') [1].split(' ')[1].replace(' Episode ', '')) except: try: episode_number = int( episode_item['identifier'].split(', ')[1].split(' ')[1]) except: episode_number = -1 if episode_number > 100: try: episode_number = int( re.compile('episode-(\d*)').findall( connection.getRedirect(episode_item['shareURL']))[0]) except: try: web_data = _connection.getURL(episode_item['shareURL']) web_tree = BeautifulSoup(web_data, 'html.parser') episode_number = web_tree.find( 'h2', text=episode_name).findNext( itemprop='episodeNumber').string season_number = web_tree.find( 'h2', text=episode_name).findNext( itemprop='seasonNumber').string except: pass try: episode_thumb = episode_item['640x360_jpg'] except: episode_thumb = None episode_mpaa = episode_item['rating'] try: episode_type = episode_item['type'] except: episode_type = None if 'Movie' in master_name: type = 'Movie' elif episode_type == 1: type = 'Full Episode' else: type = 'Clips' if type != 'Movie': show_title = master_name else: show_title = None try: episode_year = episode_item['year'] except: episode_year = None try: episode_actors = episode_item['actors'].split(',') except: episode_actors = [] u = sys.argv[0] u += '?url="' + urllib.quote_plus(url) + '"' u += '&mode="' + SITE + '"' u += '&sitemode="play_video"' infoLabels = { 'title': episode_name, 'durationinseconds': episode_duration, 'season': season_number, 'episode': episode_number, 'plot': episode_plot, 'premiered': episode_airdate, 'year': episode_year, 'mpaa': episode_mpaa, 'TVShowTitle': show_title, 'cast': episode_actors } episodes.append((u, episode_name, episode_thumb, infoLabels, 'list_qualities', False, type)) return episodes