Example #1
0
def list_qualities(M3UURL = None):
	exception = False
	video_url = common.args.url
	video_data = connection.getURL(video_url)
	if 'link.theplatform.com' not in video_url:
		video_tree =  BeautifulSoup(video_data, 'html.parser')
		try:
			player_url = 'http:' + video_tree.find('div', class_ = 'video-player-wrapper').iframe['src']
		except:
			player_url = 'http:' + video_tree.find('div', id = 'pdk-player')['data-src']
		player_data = connection.getURL(player_url)
		player_tree = BeautifulSoup(player_data, 'html.parser')
		video_url = player_tree.find('link', type = "application/smil+xml")['href']
		video_url = video_url + '&format=SCRIPT'
		
		script_data = connection.getURL(video_url)
		script_menu = simplejson.loads(script_data)
		if script_menu['pl1$entitlement'] != 'auth':
			bitrates,exception = smil_bitrates(video_url)
		else:
			captions = script_menu['captions'][0]['src']
			id = re.compile('([0-9]+.[0-9]+.*).tt').findall(captions)[0]
			td = (datetime.datetime.utcnow()- datetime.datetime(1970,1,1))
			unow = int((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6)
			master_url = M3UURL % (id, str(unow), str(unow+60))
			bitrates = m3u_bitrates(master_url)
			return bitrates
			#need to set captions on player
	else:
		bitrates,exception = smil_bitrates(video_url)
	if  not exception:
		return bitrates
	else:
		common.show_exception(video_tree.ref['title'], video_tree.ref['abstract'])
def seasons(collection_ids = common.args.url):
	for collection_id in collection_ids.split(','):
		if ',' not in collection_ids:
			season_url = SEASONSEPISODES
		else:
			season_url = SEASONSEPISODESEXTRA
		season_data = connection.getURL(season_url % collection_id)
		season_tree = BeautifulSoup(season_data, 'html.parser')
		episode_count = int(season_tree.episodes['totalitems'])
		if episode_count > 0:
			if ',' not in collection_ids:
				display = 'Episodes'
			else:
				display = 'Episodes - %s' % season_tree.episode['collectiontitle']
			common.add_directory(display,  SITE, 'episodes', FULLEPISODES % collection_id)
	for collection_id in collection_ids.split(','):
		if ',' not in collection_ids:
			seasonclips_url = SEASONSCLIPS
		else:
			seasonclips_url = SEASONSCLIPSEXTRA
		season_data2 = connection.getURL(seasonclips_url % collection_id)
		season_tree2 = BeautifulSoup(season_data2, 'html.parser')
		episode_count = int(season_tree2.episodes['totalitems'])
		if episode_count > 0:
			if ',' not in collection_ids:
				display = 'Clips'
			else:
				display = 'Clips - %s' % season_tree2.episode['collectiontitle']
			common.add_directory(display,  SITE, 'episodes', CLIPS % collection_id)
	common.set_view('seasons')
Example #3
0
def select_quailty(guid = common.args.url):
	video_url =  VIDEO % guid
	sbitrate = int(addon.getSetting('quality')) * 1024
	closedcaption = None
	video_url2 = None
	video_data = connection.getURL(video_url)
	video_menu = simplejson.loads(video_data)['items']
	video_item = video_menu[0] 
	bitrates = []
	if addon.getSetting('preffered_stream_type') == 'RTMP':
		for video in video_item['videos']['flash'].itervalues():
			try:
				bitrate = video['bitrate']
				bitrates.append((bitrate,bitrate))
			except:
				pass
	else:
		ipad_url = video_item['videos']['iphone']['url']
		video_data2 = connection.getURL(ipad_url + '?format=json')
		video_url3 = simplejson.loads(video_data2)['url']
		video_data3 = connection.getURL(video_url3)
		video_url4 = m3u8.parse(video_data3)
		uri = None
		for video_index in video_url4.get('playlists'):
			try:
				codecs =  video_index.get('stream_info')['codecs']
			except:
				codecs = ''
			if  codecs != 'mp4a.40.5':
				bitrate = int(video_index.get('stream_info')['bandwidth'])
				bitrates.append((int(bitrate) / 1024 , bitrate))
	return bitrates
Example #4
0
def login(url):
    if addon.getSetting('cbs_use_login') == 'true':
        username = addon.getSetting('cbs_username')
        password = addon.getSetting('cbs_password')
        #Get token
        data = connection.getURL(url)
        token = re.compile("authToken = '(.*?)';").findall(data)[0]
        login_values = values = {
            'j_username': username,
            'j_password': password,
            '_remember_me': '1',
            'tk_trp': token
        }
        login_response = connection.getURL(LOGIN_URL,
                                           login_values,
                                           savecookie=True)
        response = simplejson.loads(login_response)
        if response['success'] == False:
            print 'Login failed', response
            try:
                msg = response['message']
            except:
                msg = response['messages']
            common.show_exception(NAME, msg)
            return False
        else:
            return True
Example #5
0
def masterlist():
    master_db = []
    master_dict = {}
    dupes = []
    for master_url in (SHOWS, ORIGINALS, MOVIES):
        master_data = connection.getURL(master_url)
        master_menu = simplejson.loads(master_data)["result"]["data"]
        for master_item in master_menu:
            show_id = master_item["show_id"]
            if show_id not in dupes:
                dupes.append(show_id)
                master_name = master_item["title"]
                if (
                    master_item["navigationItemLink"]
                    and "video" not in master_item["navigationItemLink"][0]["link"]
                    and master_item["navigationItemLink"][0]["title"] == "Watch"
                ):
                    season_url = master_item["navigationItemLink"][0]["link"]
                else:
                    if master_item["link"][-1:] == "/":
                        season_url = master_item["link"] + "video"
                    else:
                        season_url = master_item["link"] + "/video"
                if BASE not in season_url:
                    season_url = BASE + season_url
                master_dict[master_name] = season_url
                # check for missing shows
    web_data = connection.getURL(BASE)
    web_tree = BeautifulSoup(web_data, "html.parser")
    for item in web_tree.find("div", id="show-drop-down").find_all("a"):
        if item.text not in master_dict:
            master_db.append((item.text, SITE, "seasons", BASE + item["href"] + "video"))
    for master_name, season_url in master_dict.iteritems():
        master_db.append((master_name, SITE, "seasons", season_url))
    return master_db
def seasons(SITE, FULLEPISODES, CLIPS, FULLEPISODESWEB = None):
	season_urls = common.args.url
	for season_url in season_urls.split(','):
		season_data = connection.getURL(FULLEPISODES % urllib.quote_plus(season_url) + '&range=0-1')
		try:
			season_menu = int(simplejson.loads(season_data)['totalResults'])
		except:
			season_menu = 0
		if season_menu > 0:
			season_url2 = FULLEPISODES % urllib.quote_plus(season_url) + '&range=0-' + str(season_menu)
			common.add_directory('Full Episodes',  SITE, 'episodes', season_url2)
		elif FULLEPISODESWEB:
			show = season_url.split('/')[-1].replace(' ', '')
			web_data = connection.getURL(FULLEPISODESWEB % show)
			web_tree = BeautifulSoup(web_data, 'html.parser')
			all = len(web_tree.find_all('div', class_ = 'view-mode-vid_teaser_show_episode'))
			auth = len(web_tree.find_all('div', class_ = 'tve-video-auth'))
			if all > auth:
				common.add_directory('Full Episodes',  SITE, 'webepisodes', FULLEPISODESWEB % show)
		season_data2 = connection.getURL(CLIPS % urllib.quote_plus(season_url) + '&range=0-1')
		try:
			season_menu2 = int(simplejson.loads(season_data2)['totalResults'])
		except:
			season_menu2 = 0
		if season_menu2 > 0:
			season_url3 = CLIPS % urllib.quote_plus(season_url) + '&range=0-' + str(season_menu2)
			if ',' in season_urls:
				common.add_directory('Clips %s'%season_url,  SITE, 'episodes', season_url3)
			else:
				common.add_directory('Clips',  SITE, 'episodes', season_url3)
	common.set_view('seasons')
def seasons(collection_ids=common.args.url):
    seasons = []
    for collection_id in collection_ids.split(','):
        if ',' not in collection_ids:
            season_url = SEASONSEPISODES
        else:
            season_url = SEASONSEPISODESEXTRA
        season_data = connection.getURL(season_url % collection_id)
        season_tree = BeautifulSoup(season_data, 'html.parser')
        episode_count = int(season_tree.episodes['totalitems'])
        if episode_count > 0:
            if ',' not in collection_ids:
                display = 'Episodes'
            else:
                display = 'Episodes - %s' % season_tree.episode[
                    'collectiontitle']
            seasons.append((display, SITE, 'episodes',
                            FULLEPISODES % collection_id, -1, -1))
    for collection_id in collection_ids.split(','):
        if ',' not in collection_ids:
            seasonclips_url = SEASONSCLIPS
        else:
            seasonclips_url = SEASONSCLIPSEXTRA
        season_data2 = connection.getURL(seasonclips_url % collection_id)
        season_tree2 = BeautifulSoup(season_data2, 'html.parser')
        episode_count = int(season_tree2.episodes['totalitems'])
        if episode_count > 0:
            if ',' not in collection_ids:
                display = 'Clips'
            else:
                display = 'Clips - %s' % season_tree2.episode['collectiontitle']
            seasons.append(
                (display, SITE, 'episodes', CLIPS % collection_id, -1, -1))

    return seasons
Example #8
0
def play_video(episode_url=common.args.url):
    episode_data = connection.getURL(APIBASE + 'episode-details?episode_id=' +
                                     episode_url)
    episode_json = json.loads(episode_data)
    video_url = VIDEOURL % episode_json['data']['Episode']['FullEpisode']['PID']
    print video_url
    video_data = connection.getURL(video_url)
    video_tree = BeautifulSoup(video_data)
    finalurl = video_tree.video['src']
    item = xbmcgui.ListItem(path=finalurl)
    try:
        item.setThumbnailImage(common.args.thumb)
    except:
        pass
    try:
        item.setInfo(
            'Video', {
                'title': common.args.name,
                'season': common.args.season_number,
                'episode': common.args.episode_number,
                'TVShowTitle': common.args.show_title
            })
    except:
        pass
    xbmcplugin.setResolvedUrl(pluginHandle, True, item)
Example #9
0
def masterlist():
	master_db = []
	master_dict = {}
	dupes = []
	for master_url in (SHOWS, ORIGINALS, MOVIES):
		master_data = connection.getURL(master_url)
		master_menu = simplejson.loads(master_data)['result']['data']
		for master_item in master_menu:
			show_id = master_item['show_id']
			if show_id not in dupes:
				dupes.append(show_id)
				master_name = master_item['title']
				if master_item['navigationItemLink'] and 'video' not in master_item['navigationItemLink'][0]['link'] and master_item['navigationItemLink'][0]['title'] == 'Watch':
					season_url = master_item['navigationItemLink'][0]['link']
				else:
					if master_item['link'][-1:] == '/':
						season_url = master_item['link'] + 'video'
					else:
						season_url =  master_item['link'] + '/video'
				if BASE not in season_url:
					season_url = BASE + season_url
				master_dict[master_name] = season_url
	#check for missing shows
	web_data = connection.getURL(BASE)
	web_tree = BeautifulSoup(web_data, 'html.parser')
	for item in web_tree.find('div', id='show-drop-down').find_all('a'):
		if item.text not in master_dict:
			master_db.append((item.text , SITE, 'seasons', BASE + item['href'] + 'video'))
	for master_name, season_url in master_dict.iteritems():
		master_db.append((master_name, SITE, 'seasons', season_url))
	return master_db
Example #10
0
def episodes(filter=common.args.url):
    episodes = []
    show_id, season_num = filter.split('|')

    season_data = connection.getURL(APIBASE + "show-details?show_id=" +
                                    show_id)
    season_json = json.loads(season_data)
    for season_item in season_json['data']['Show']['Seasons']:
        if season_item['Number'] == season_num:
            for episode_item in season_item['Episodes']:
                episode_data = connection.getURL(
                    APIBASE + 'episode-details?episode_id=' +
                    str(episode_item['Id']))
                episode_json = json.loads(episode_data)
                if 'PID' in episode_json['data']['Episode']['FullEpisode']:
                    episode_name = episode_item['Title']
                    episode_plot = episode_item['Synopsis']
                    episode_thumb = episode_item['ShowLogoUri']
                    episode_id = episode_item['Id']
                    episode_number = episode_item['EpisodeNumber']
                    u = sys.argv[0]
                    u += '?url="' + str(episode_id) + '"'
                    u += '&mode="' + SITE + '"'
                    u += '&sitemode="play_video"'
                    infoLabels = {
                        'title': episode_name,
                        'plot': episode_plot,
                        'TVShowTitle': season_json['data']['Show']['Name'],
                        'season': season_num,
                        'episode': episode_number
                    }
                    episodes.append((u, episode_name, episode_thumb,
                                     infoLabels, None, False, 'Full Episode'))
    return episodes
Example #11
0
def smil_bitrates(video_url):
	bitrates = []
	video_data = connection.getURL(video_url)
	video_tree = BeautifulSoup(video_data, 'html.parser')
	video_rtmp = video_tree.meta
	playpath_url = None
	lplaypath_url = None
	try:
		base_url = video_rtmp['base']
	except:
		base_url = None
	if base_url is not None:
			video_url2 = video_tree.switch.find_all('video')
			for video_index in video_url2:
				bitrate = int(video_index['system-bitrate'])
				bitrates.append((int(bitrate) / 1024, bitrate))
	else:
		video_data = connection.getURL(video_url + '&manifest=m3u&Tracking=true&Embedded=true&formats=F4M,MPEG4')
		video_tree = BeautifulSoup(video_data, 'html.parser')
		if  video_tree.find('param', attrs = {'name' : 'isException', 'value' : 'true'}) is None:
			video_url2 = video_tree.body.seq.video
			video_url3 = video_url2['src']
			bitrates = m3u_bitrates(video3_url)
		else:
			exception = True
	return bitrates, exception
Example #12
0
def seasons(SITE, FULLEPISODES, CLIPSSEASON, CLIPS, WEBSHOWS = None, show_id = common.args.url):
	seasons = []
	master_name = show_id.split('#')[0]
	has_full_eps = show_id.split('#')[2]
	show_id = show_id.split('#')[1]
	if has_full_eps == 'true':
		seasons.append(('Full Episodes',  SITE, 'episodes', master_name + '#' + FULLEPISODES % show_id, -1, -1))
	elif WEBSHOWS is not None:
		try:

			webdata = connection.getURL(WEBSHOWS)
			web_tree =  BeautifulSoup(webdata, 'html.parser', parse_only = SoupStrainer('div', id = 'page-shows'))
			show = web_tree.find('h2', text = master_name)
			episodes = show.findNext('p', attrs = {'data-id' : 'num-full-eps-avail'})['data-value']
			if int(episodes) > 0:
				seasons.append(('Full Episodes',  SITE, 'episodes_web', master_name, -1, -1))
		except:
			pass

	clips_data = connection.getURL(CLIPSSEASON % show_id)
	clips_menu = simplejson.loads(clips_data)
	for season in clips_menu:
		clip_name = common.smart_utf8(season['title'])
		seasons.append((clip_name,  SITE, 'episodes', master_name + '#' + CLIPS % (show_id, season['id']), -1, -1))

	return seasons
def smil_bitrates(video_url):
    bitrates = []
    video_data = connection.getURL(video_url)
    video_tree = BeautifulSoup(video_data, 'html.parser')
    video_rtmp = video_tree.meta
    playpath_url = None
    lplaypath_url = None
    try:
        base_url = video_rtmp['base']
    except:
        base_url = None
    if base_url is not None:
        video_url2 = video_tree.switch.find_all('video')
        for video_index in video_url2:
            bitrate = int(video_index['system-bitrate'])
            bitrates.append((int(bitrate) / 1024, bitrate))
    else:
        video_data = connection.getURL(
            video_url +
            '&manifest=m3u&Tracking=true&Embedded=true&formats=F4M,MPEG4')
        video_tree = BeautifulSoup(video_data, 'html.parser')
        if video_tree.find('param',
                           attrs={
                               'name': 'isException',
                               'value': 'true'
                           }) is None:
            video_url2 = video_tree.body.seq.video
            video_url3 = video_url2['src']
            bitrates = m3u_bitrates(video3_url)
        else:
            exception = True
    return bitrates, exception
Example #14
0
def episodes(episode_url = common.args.url):
	episodes = []
	episode_data = connection.getURL(episode_url)
	episode_tree = BeautifulSoup(episode_data.replace('\'+\'', ''), 'html.parser')
	if 'clip' in episode_url:
		if episode_tree.find('a', class_ = 'next') is not None:
			add_clips(episode_tree)
			try:
				episodes(episode_url.split('?')[0] + episode_tree.find('a', class_ = 'next')['href'])
			except:
				pass
		else:
			episodes = add_clips(episode_tree)
	else:
		if episode_tree.find('a', class_ = 'season_menu') is not None:
			show_id = re.compile('var showId = "(.+?)";').findall(episode_data)[0]
			episode_id = re.compile('var episodeId = "(.+?)";').findall(episode_data)[0]
			episode_menu = episode_tree.find_all('a', class_ = 'season')
			for episode_item in episode_menu:
				episode_data2 = connection.getURL(SEASONURL %(show_id, episode_item['id'], episode_id))
				episode_tree2 = BeautifulSoup(episode_data2, 'html.parser')
				episodes.extend(add_fullepisodes(episode_tree2, episode_item.text.split(' ')[1]))
		else:
			episodes = add_fullepisodes(episode_tree)
	return episodes
Example #15
0
def seasons(SITE, FULLEPISODES, CLIPS, FULLEPISODESWEB = None, season_urls = common.args.url):

	seasons = []
	for season_url in season_urls.split(','):
		season_data = connection.getURL(FULLEPISODES % urllib.quote_plus(season_url) + '&range=0-1')
		try:
			season_menu = int(simplejson.loads(season_data)['totalResults'])
		except:
			season_menu = 0
		if season_menu > 0:
			season_url2 = FULLEPISODES % urllib.quote_plus(season_url) + '&range=0-' + str(season_menu)
			seasons.append(('Full Episodes',  SITE, 'episodes', season_url2, -1, -1))
		elif FULLEPISODESWEB:
			try:
				show = season_url.split('/')[-1].replace(' ', '')
				web_data = connection.getURL(FULLEPISODESWEB % show)
				web_tree = BeautifulSoup(web_data, 'html.parser')
				all = len(web_tree.find_all('div', class_ = 'view-mode-vid_teaser_show_episode'))
				auth = len(web_tree.find_all('div', class_ = 'tve-video-auth'))
				if all > auth:
					seasons.append(('Full Episodes',  SITE, 'episodes_web', FULLEPISODESWEB % show, -1, -1))
			except Exception as e:
				print "Error with web processing", e
		season_data2 = connection.getURL(CLIPS % urllib.quote_plus(season_url) + '&range=0-1')
		try:
			season_menu2 = int(simplejson.loads(season_data2)['totalResults'])
		except:
			season_menu2 = 0
		if season_menu2 > 0:
			season_url3 = CLIPS % urllib.quote_plus(season_url) + '&range=0-' + str(season_menu2)
			if ',' in season_urls:
				seasons.append(('Clips %s'%season_url,  SITE, 'episodes', season_url3, -1, -1))
			else:
				seasons.append(('Clips',  SITE, 'episodes', season_url3, -1, -1))
	return seasons
Example #16
0
def play_video(video_uri=common.args.url):
    # Handle the poewrnation specific video loading
    if 'powernation' in video_uri:
        video_data = connection.getURL(video_uri)
        video_json = json.loads(video_data)
        video_url = video_json['HLSURL']

        item = xbmcgui.ListItem(path=video_url)
        try:
            item.setThumbnailImage(common.args.thumb)
        except:
            pass
        try:
            item.setInfo(
                'Video', {
                    'title': common.args.name,
                    'season': common.args.season_number,
                    'episode': common.args.episode_number,
                    'TVShowTitle': common.args.show_title
                })
        except:
            pass
        xbmcplugin.setResolvedUrl(pluginHandle, True, item)
    else:
        video_data = connection.getURL(video_uri)
        video_url = BeautifulSoup(video_data, 'html5lib').find(
            'div', class_='video_player')['data-mgid']
        main_viacom.play_video(BASE, video_url)
Example #17
0
def masterlist():
	master_db = []
	master_dict = {}
	dupes = []
	for master_url in (SHOWS, ORIGINALS, MOVIES):
		master_data = connection.getURL(master_url)
		master_menu = simplejson.loads(master_data)['result']['data']
		for master_item in master_menu:
			show_id = master_item['show_id']
			if show_id not in dupes:
				dupes.append(show_id)
				master_name = master_item['title']
				if master_item['navigationItemLink'] and 'video' not in master_item['navigationItemLink'][0]['link'] and master_item['navigationItemLink'][0]['title'] == 'Watch':
					season_url = master_item['navigationItemLink'][0]['link']
				else:
					if master_item['link'][-1:] == '/':
						season_url = master_item['link'] + 'video'
					else:
						season_url =  master_item['link'] + '/video'
				if BASE not in season_url:
					season_url = BASE + season_url
				master_dict[master_name] = season_url
	#check for missing shows
	web_data = connection.getURL(BASE)
	web_tree = BeautifulSoup(web_data, 'html.parser')
	for item in web_tree.find('div', id='show-drop-down').find_all('a'):
		if item.text not in master_dict:
			master_db.append((item.text , SITE, 'seasons', BASE + item['href'] + 'video'))
	for master_name, season_url in master_dict.iteritems():
		master_db.append((master_name, SITE, 'seasons', season_url))
	return master_db
Example #18
0
def seasons(season_url=common.args.url):
    seasons = []
    season_data = connection.getURL(season_url)
    try:
        playlist = re.compile('"playlists":\s*(\[.*?\])',
                              re.DOTALL).findall(season_data)[0]
        season_menu = simplejson.loads(playlist)
        for season_item in season_menu:
            seasons.append((season_item['name'], SITE, 'episodes',
                            FEED % season_item['guid'], -1, -1))
    except:
        try:
            season_tree = BeautifulSoup(season_data,
                                        'html.parser',
                                        parse_only=SoupStrainer('div'))
            season_source = season_tree.find(
                'div', id='TPVideoPlaylistTaxonomyContainer')['source']
            playlist_url = PLAYLIST % season_source
            playlist_data = connection.getURL(playlist_url)
            playlist_data = playlist_data.replace(
                '$pdk.NBCplayer.ShowPlayerTaxonomy.GetList(',
                '').replace(');', '')
            season_menu = simplejson.loads(playlist_data)
            for season_item in season_menu['playlistTaxonomy']:
                season_name = season_item['reference']['name']
                season_url = FEED % season_item['reference']['feed']
                seasons.append(
                    (season_name, SITE, 'episodes', season_url, -1, -1))
        except Exception:
            pass
    return seasons
Example #19
0
def seasons(season_url=common.args.url):
    seasons = []
    if season_url in different_show_name:
        season_url = different_show_name[season_url]
    season_data = connection.getURL(FULLEPISODES %
                                    urllib.quote_plus(season_url) +
                                    '&range=0-1')
    try:
        season_menu = int(simplejson.loads(season_data)['total_count'])
    except:
        season_menu = 0
    if season_menu > 0:
        season_url2 = FULLEPISODES % urllib.quote_plus(
            season_url) + '&range=0-' + str(season_menu)
        seasons.append(
            ('Full Episodes', SITE, 'episodes', season_url2, -1, -1))
    season_data2 = connection.getURL(CLIPS % urllib.quote_plus(season_url) +
                                     '&range=0-1')
    try:
        season_menu2 = int(simplejson.loads(season_data2)['total_count'])
    except:
        season_menu2 = 0
    if season_menu2 > 0:
        season_url3 = CLIPS % urllib.quote_plus(
            season_url) + '&range=0-' + str(season_menu2)
        seasons.append(('Clips', SITE, 'episodes', season_url3, -1, -1))
    return seasons
def seasons(SITE, FULLEPISODES, CLIPSSEASON, CLIPS, WEBSHOWS = None, show_id = common.args.url):
	seasons = []
	master_name = show_id.split('#')[0]
	has_full_eps = show_id.split('#')[2]
	show_id = show_id.split('#')[1]
	if has_full_eps == 'true':
		seasons.append(('Full Episodes',  SITE, 'episodes', master_name + '#' + FULLEPISODES % show_id, -1, -1))
	elif WEBSHOWS is not None:
		try:

			webdata = connection.getURL(WEBSHOWS)
			web_tree =  BeautifulSoup(webdata, 'html.parser', parse_only = SoupStrainer('div', id = 'page-shows'))
			show = web_tree.find(text = master_name)
			print web_tree
			episodes = show.findNext('p', attrs = {'data-id' : 'num-full-eps-avail'})['data-value']
			if int(episodes) > 0:
				seasons.append(('Full Episodes',  SITE, 'episodes_web', master_name, -1, -1))
		except:
			pass

	clips_data = connection.getURL(CLIPSSEASON % show_id)
	clips_menu = simplejson.loads(clips_data)
	for season in clips_menu:
		clip_name = common.smart_utf8(season['title'])
		seasons.append((clip_name,  SITE, 'episodes', master_name + '#' + CLIPS % (show_id, season['id']), -1, -1))

	return seasons
Example #21
0
def episodes(filter = common.args.url):
	episodes = []
	show_id, season_num = filter.split('|')

	season_data = connection.getURL(APIBASE + "show-details?show_id=" + show_id)
	season_json = json.loads(season_data)
	for season_item in season_json['data']['Show']['Seasons']:
		if season_item['Number'] == season_num:
			for episode_item in season_item['Episodes']:				
				episode_data = connection.getURL(APIBASE + 'episode-details?episode_id=' + str(episode_item['Id']))
				episode_json = json.loads(episode_data)
				if 'PID' in episode_json['data']['Episode']['FullEpisode']:
					episode_name = episode_item['Title']
					episode_plot = episode_item['Synopsis']
					episode_thumb = episode_item['ShowLogoUri']
					episode_id = episode_item['Id']
					episode_number = episode_item['EpisodeNumber']
					u = sys.argv[0]
					u += '?url="' + str(episode_id) + '"'
					u += '&mode="' + SITE + '"'
					u += '&sitemode="play_video"'
					infoLabels={	'title' : episode_name,
									'plot' : episode_plot, 
									'TVShowTitle' : season_json['data']['Show']['Name'],
									'season' : season_num,
									'episode' : episode_number}
					episodes.append((u, episode_name, episode_thumb, infoLabels, None, False, 'Full Episode'))
	return episodes
Example #22
0
def episodes(episode_url=common.args.url):
    episodes = []
    episode_data = connection.getURL(episode_url)
    episode_tree = BeautifulSoup(episode_data.replace('\'+\'', ''),
                                 'html.parser')
    if 'clip' in episode_url:
        if episode_tree.find('a', class_='next') is not None:
            add_clips(episode_tree)
            try:
                episodes(
                    episode_url.split('?')[0] +
                    episode_tree.find('a', class_='next')['href'])
            except:
                pass
        else:
            episodes = add_clips(episode_tree)
    else:
        if episode_tree.find('a', class_='season_menu') is not None:
            show_id = re.compile('var showId = "(.+?)";').findall(
                episode_data)[0]
            episode_id = re.compile('var episodeId = "(.+?)";').findall(
                episode_data)[0]
            episode_menu = episode_tree.find_all('a', class_='season')
            for episode_item in episode_menu:
                episode_data2 = connection.getURL(
                    SEASONURL % (show_id, episode_item['id'], episode_id))
                episode_tree2 = BeautifulSoup(episode_data2, 'html.parser')
                episodes.extend(
                    add_fullepisodes(episode_tree2,
                                     episode_item.text.split(' ')[1]))
        else:
            episodes = add_fullepisodes(episode_tree)
    return episodes
def process_smil(video_url, qbitrate=None):
    closedcaption = None
    video_data = connection.getURL(video_url)
    video_tree = BeautifulSoup(video_data, 'html.parser')
    video_rtmp = video_tree.meta
    playpath_url = None
    lplaypath_url = None
    try:
        base_url = video_rtmp['base']
    except:
        base_url = None
    if base_url is not None:
        if qbitrate is None:
            video_url2 = video_tree.switch.find_all('video')
            for video_index in video_url2:
                bitrate = int(video_index['system-bitrate'])
                if bitrate < lbitrate or lbitrate == -1:
                    lbitrate = bitrate
                    lplaypath_url = video_index['src']
                if bitrate > hbitrate and bitrate <= sbitrate:
                    hbitrate = bitrate
                    playpath_url = video_index['src']
            if playpath_url is None:
                playpath_url = lplaypath_url
        else:
            playpath_url = video_tree.switch.find(
                'video', attrs={'system-bitrate': qbitrate})['src']
        if '.mp4' in playpath_url:
            playpath_url = 'mp4:' + playpath_url
        else:
            playpath_url = playpath_url.replace('.flv', '')
        finalurl = base_url + ' playpath=' + playpath_url + ' swfurl=' + SWFURL + ' swfvfy=true'
        player._localHTTPServer = False
    else:
        video_data = connection.getURL(
            video_url +
            '&manifest=m3u&Tracking=true&Embedded=true&formats=F4M,MPEG4')
        video_tree = BeautifulSoup(video_data, 'html.parser')
        try:
            closedcaption = video_tree.textstream['src']
            player._subtitles_Enabled = True
        except:
            pass
        if (addon.getSetting('enablesubtitles') == 'true') and (closedcaption
                                                                is not None):
            convert_subtitles(closedcaption)
        if video_tree.find('param',
                           attrs={
                               'name': 'isException',
                               'value': 'true'
                           }) is None:
            video_url2 = video_tree.body.seq.video
            video_url3 = video_url2['src']
            finalurl = process_m3u(video_url3, qbitrate)
            return finalurl, False

        else:
            exception = True
    return finalurl, exception
Example #24
0
	def onPlayBackEnded( self ):
		# Will be called when xbmc stops playing a segment
		print "**************************** End Event *****************************"
		if self._counter == self._segments:
			print "**************************** End Event -- Stopping Server *****************************"
			self.is_active = False
			if self._localHTTPServer:
				connection.getURL('http://localhost:12345/stop', connectiontype = 0)
Example #25
0
	def onPlayBackEnded( self ):
		# Will be called when xbmc stops playing a segment
		print "**************************** End Event *****************************"
		if self._counter == self._segments:
			print "**************************** End Event -- Stopping Server *****************************"
			self.is_active = False
			if self._localHTTPServer:
				connection.getURL('http://localhost:12345/stop', connectiontype = 0)
def play_video(SWFURL, M3UURL = None, BASE = None):
	key_url = None
	try:
		qbitrate = common.args.quality
	except:
		qbitrate = None
	exception = False
	video_url = common.args.url
	hbitrate = -1
	lbitrate = -1
	sbitrate = int(addon.getSetting('quality')) * 1024
	closedcaption = None
	video_data = connection.getURL(video_url)
	if 'link.theplatform.com' not in video_url:
		video_tree =  BeautifulSoup(video_data, 'html.parser')
		try:
			player_url = 'http:' + video_tree.find('div', class_ = 'video-player-wrapper').iframe['src']
		except:
			player_url = 'http:' + video_tree.find('div', id = 'pdk-player')['data-src']
		player_data = connection.getURL(player_url)
		player_tree =  BeautifulSoup(player_data, 'html.parser')
		video_url = player_tree.find('link', type = "application/smil+xml")['href']
		video_url = video_url + '&format=SCRIPT'
		
		script_data = connection.getURL(video_url)
		script_menu = simplejson.loads(script_data)
		if script_menu['pl1$entitlement'] != 'auth':
			finalurl,exception = process_smil(video_url)
		else:
			captions = script_menu['captions'][0]['src']

			id = re.compile('([0-9]+.[0-9]+.*).tt').findall(captions)[0]

			td = (datetime.datetime.utcnow()- datetime.datetime(1970,1,1))
			unow = int((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6)
			master_url = M3UURL % (id, str(unow), str(unow+60))
			finalurl = process_m3u(master_url, qbitrate)
	else:
		finalurl, exception = process_smil(video_url)
	if  not exception:
		item = xbmcgui.ListItem(path = finalurl)

		try:
			item.setThumbnailImage(common.args.thumb)
		except:
			pass
		try:
			item.setInfo('Video', {	'title' : common.args.name,
							'season' : common.args.season_number,
							'episode' : common.args.episode_number,
							'TVShowTitle' : common.args.show_title})
		except:
			pass
		xbmcplugin.setResolvedUrl(pluginHandle, True, item)
		while player.is_active:
			player.sleep(250)
	else:
		common.show_exception(video_tree.ref['title'], video_tree.ref['abstract'])
def process_m3u(m3u_url, qbitrate=None):
    key_url = None
    sbitrate = int(addon.getSetting('quality')) * 1024
    lbitrate = -1
    hbitrate = -1
    video_data2 = connection.getURL(m3u_url, savecookie=True)
    video_url5 = m3u8.parse(video_data2)
    for video_index in video_url5.get('playlists'):
        bitrate = int(video_index.get('stream_info')['bandwidth'])
        try:
            codecs = video_index.get('stream_info')['codecs']
        except:
            codecs = ''
        if qbitrate is None:
            if (bitrate < lbitrate or lbitrate == -1):
                lbitrate = bitrate
                lplaypath_url = video_index.get('uri')
            if (bitrate > hbitrate and bitrate <= sbitrate):
                hbitrate = bitrate
                playpath_url = video_index.get('uri')
        elif bitrate == qbitrate:
            playpath_url = video_index.get('uri')
    if playpath_url is None:
        playpath_url = lplaypath_url
    if not common.use_proxy() and int(addon.getSetting('connectiontype')) == 0:
        player._localHTTPServer = False
        return playpath_url
    else:
        m3u_data = connection.getURL(playpath_url, loadcookie=True)
        try:
            key_url = re.compile('URI="(.*?)"').findall(m3u_data)[0]

            key_data = connection.getURL(key_url, loadcookie=True)
            key_file = open(ustvpaths.KEYFILE % '0', 'wb')
            key_file.write(key_data)
            key_file.close()
        except:
            pass
        video_url5 = re.compile('(http:.*?)\n').findall(m3u_data)
        if int(addon.getSetting('connectiontype')) > 0:
            proxy_config = common.proxyConfig()
            for i, video_item in enumerate(video_url5):
                newurl = base64.b64encode(video_item)
                newurl = urllib.quote_plus(newurl)
                m3u_data = m3u_data.replace(
                    video_item, 'http://127.0.0.1:12345/proxy/' + newurl +
                    '/' + proxy_config)
        filestring = 'XBMC.RunScript(' + os.path.join(ustvpaths.LIBPATH,
                                                      'proxy.py') + ', 12345)'
        xbmc.executebuiltin(filestring)
        time.sleep(20)
        if key_url is not None:
            m3u_data = m3u_data.replace(key_url,
                                        'http://127.0.0.1:12345/play0.key')
        playfile = open(ustvpaths.PLAYFILE, 'w')
        playfile.write(m3u_data)
        playfile.close()
        return ustvpaths.PLAYFILE
def episodes_json(SITE):
    episode_url = common.args.url
    master_name = episode_url.split('#')[0]
    episode_url = episode_url.split('#')[1]
    episode_data = connection.getURL(episode_url)
    episode_menu = simplejson.loads(episode_data)
    for episode_item in episode_menu:
        url = episode_item['episodeID']
        try:
            episode_duration = episode_item['length']
        except:
            episode_duration = -1
        try:
            episode_airdate = common.format_date(episode_item['airDate'].split('on ')[1],'%B %d, %Y')
        except:
            episode_airdate = -1
        try:
            episode_plot = episode_item['summary']
        except:
            episode_plot = episode_item['shortdescription']
        episode_name = episode_item['title']
        if episode_name == master_name:
            video_url = EPISODE % url
            video_data = connection.getURL(video_url)
            video_tree = BeautifulSoup(video_data, 'html.parser')
            episode_name = video_tree.headline.string
        elif episode_name == "":
            episode_name = episode_plot
        try:
            season_number = int(episode_item['identifier'].split(',')[0].split(' ')[1])
        except:
            season_number = -1
        try:
            episode_number =  int(episode_item['identifier'].split(', ')[1].split(' ')[1].replace(' Episode ', ''))
        except:
            try:
                episode_number =  int(episode_item['identifier'].split(', ')[1].split(' ')[1])
            except:
                episode_number = -1
        if episode_number > 100:
            episode_number = int(re.compile('episode-(\d*)').findall(connection.getRedirect(episode_item['shareURL']))[0])
        try:
            episode_thumb = episode_item['640x360_jpg']
        except:
            episode_thumb = None
        u = sys.argv[0]
        u += '?url="' + urllib.quote_plus(url) + '"'
        u += '&mode="' + SITE + '"'
        u += '&sitemode="play_video"'
        infoLabels={    'title' : episode_name,
                        'durationinseconds' : episode_duration,
                        'season' : season_number,
                        'episode' : episode_number,
                        'plot' : episode_plot,
                        'premiered' : episode_airdate }
        common.add_video(u, episode_name, episode_thumb, infoLabels = infoLabels, quality_mode  = 'list_qualities')
    common.set_view('episodes')
Example #29
0
def seasons(season_urls=common.args.url):
    seasons = []
    root_url = season_urls
    season_urls = BASE + season_urls
    season_data = connection.getURL(season_urls)
    try:
        season_tree = BeautifulSoup(season_data)
        video_link = BASE + season_tree.find(
            'a', text=re.compile('Videos? \(\d+\)'))['href']
        season_data = connection.getURL(video_link)
        video_tree = BeautifulSoup(season_data)
        season_menu = video_tree.find_all('option')
        if season_menu:
            for season_item in season_menu:
                season_name = season_item.string
                season_url = BASE + season_item['value']
                seasons.append(
                    (season_name, SITE, 'episodes', season_url, -1, -1))
        else:
            seasons.append(('Clips', SITE, 'episodes', video_link, -1, -1))
    except:
        try:
            season_title = re.compile(
                '"channels": \[\{\s+"title": "(.*?)",\s+"start": \d+,\s+"end": \d+,\s+"total": \d+,\s+"videos":',
                re.DOTALL).findall(season_data)[0]
            seasons.append(
                (season_title, SITE, 'episodes', season_urls, -1, -1))
        except:
            season_tree = BeautifulSoup(season_data)
            try:
                dropdown = season_tree.find('nav', class_='hub').find(
                    'span', text='Videos').find_next(class_='dropdown-menu')
                season_menu = dropdown.find_all('a')
                for season_item in season_menu:
                    seasons.append((season_item['title'], SITE, 'episodes',
                                    BASE + season_item['href'], -1, -1))
            except:
                pass
            season_menu = season_tree.find_all(class_='ss-play')
            for season_item in season_menu:
                season_grandparent = season_item.parent.parent.parent
                try:
                    try:
                        season_name = season_grandparent.img['title']
                    except:
                        season_name = season_grandparent.h6.string
                    try:
                        season_url = BASE + season_grandparent['href']
                    except:
                        season_url = BASE + season_grandparent.a['href']
                    if 'shows' in season_url or 'packages' in season_url or 'chef' in season_url:
                        seasons.append((season_name, SITE, 'episodes',
                                        season_url, -1, -1))
                except:
                    pass
    return seasons
Example #30
0
def episodes_from_html(episode_url = common.args.url, page = 1):
	""" Add episodes by analysing the HTML of the page """
	if page == 1:
		episode_data = connection.getURL(episode_url)
		episode_tree = None
		try:
			episode_url = re.compile("var .*Showcase.* = '(.*)'").findall(episode_data)[0]
			if 'http' not in episode_url:
				episode_url = BASE + episode_url
			episode_data = connection.getURL(episode_url)
		except:
			try:
				episode_tree = BeautifulSoup(episode_data, 'html5lib')
				episode_url = episode_tree.find('div', class_ = 'content')['data-feed']
				episode_data = connection.getURL(episode_url)
				episode_tree = BeautifulSoup(episode_data, 'html5lib')
			except:
				pass
	if episode_tree is  None:
		episode_tree = BeautifulSoup(episode_data, 'html5lib')
	if 'Clips' in common.args.name  :
		if 'southpark' in episode_url:
			add_clips_southpark(episode_tree)
		else:
			next = episode_tree.find('a', class_ = re.compile('next'))		    
			add_video(episode_tree)
			if next is not None:
				try:
					if 'href' in next.attrs:
						nexturl = next['href'].replace(' ', '+')
					else:
						nexturl = next['onclick'].split(';')[0].replace("loadContent('", "").replace("')", "")
					if 'http' not in nexturl:
						nexturl = BASE + nexturl
					if page < int(addon.getSetting('maxpages')):
						episodes_from_html(nexturl, page + 1)
				except:
					pass
	else:
		if 'southpark' in episode_url:
			add_fullepisodes_southpark(episode_tree)
		else:
			next = episode_tree.find('a', class_ = re.compile('next'))		    
			add_video(episode_tree, False)
			if next is not None:
				try:
					nexturl = next['href']
					if nexturl[0] == '?': 
						nexturl = episode_url.split('?')[0] + nexturl				
					elif 'http' not in nexturl: 
						nexturl = BASE + nexturl
					if page < int(addon.getSetting('maxpages')):
						episodes_from_html(nexturl, page + 1)
				except:
					pass
Example #31
0
def play_video(video_url=common.args.url):
    hbitrate = -1
    sbitrate = int(addon.getSetting('quality')) * 1024
    closedcaption = None
    video_url2 = None
    finalurl = ''
    try:
        closedcaption = simplejson.loads(
            connection.getURL(CLOSEDCAPTION % video_url).replace(
                'video_info(', '').replace(')', ''))['closed_captions_url']
    except:
        pass
    if (addon.getSetting('enablesubtitles')
            == 'true') and (closedcaption
                            is not None) and (closedcaption != ''):
        convert_subtitles(closedcaption.replace(' ', '+'))
    video_data = cove.videos.filter(fields='mediafiles',
                                    filter_tp_media_object_id=video_url)
    video_menu = video_data['results'][0]['mediafiles']
    for video_item in video_menu:
        if video_item['video_encoding']['eeid'] == 'ipad-16x9':
            video_url2 = video_item['video_data_url']
        elif video_item['video_encoding']['eeid'] == 'hls-2500k-16x9':
            video_url2 = video_item['video_data_url']
        else:
            pass
    if video_url2 is None:
        video_url2 = video_item['video_data_url']
    video_data2 = connection.getURL(
        video_url2 +
        '?format=jsonp&callback=jQuery18303874830141490152_1377946043740')
    video_url3 = simplejson.loads(
        video_data2.replace('jQuery18303874830141490152_1377946043740(',
                            '').replace(')', ''))['url']
    if '.mp4' in video_url3:
        base_url, playpath_url = video_url3.split('mp4:')
        finalurl = base_url + ' playpath=mp4:' + playpath_url + '?player= swfurl=' + SWFURL % video_data[
            'results'][0]['guid'] + ' swfvfy=true'
    else:
        video_data3 = connection.getURL(video_url3)
        video_url4 = m3u8.parse(video_data3)
        for video_index in video_url4.get('playlists'):
            bitrate = int(video_index.get('stream_info')['bandwidth'])
            if bitrate > hbitrate and bitrate <= sbitrate:
                hbitrate = bitrate
                finalurl = video_url3.rsplit(
                    '/', 1)[0] + '/' + video_index.get('uri')
    xbmcplugin.setResolvedUrl(pluginHandle, True,
                              xbmcgui.ListItem(path=finalurl))
    if (addon.getSetting('enablesubtitles')
            == 'true') and (closedcaption
                            is not None) and (closedcaption != ''):
        while not xbmc.Player().isPlaying():
            xbmc.sleep(100)
        xbmc.Player().setSubtitles(ustvpaths.SUBTITLE)
Example #32
0
def process_m3u(m3u_url, qbitrate = None):
	key_url = None
	sbitrate = int(addon.getSetting('quality')) * 1024
	lbitrate = -1
	hbitrate = -1
	video_data2 = connection.getURL(m3u_url, savecookie = True)
	video_url5 = m3u8.parse(video_data2)
	for video_index in video_url5.get('playlists'):
		bitrate = int(video_index.get('stream_info')['bandwidth'])
		try:
			codecs =  video_index.get('stream_info')['codecs']
		except:
			codecs = ''
		if qbitrate is None:
			if (bitrate < lbitrate or lbitrate == -1):
				lbitrate = bitrate
				lplaypath_url =  video_index.get('uri')
			if (bitrate > hbitrate and bitrate <= sbitrate):
				hbitrate = bitrate
				playpath_url = video_index.get('uri')
		elif  bitrate == qbitrate:
			playpath_url =  video_index.get('uri')
	if playpath_url is None:
		playpath_url = lplaypath_url
	if not common.use_proxy() and int(addon.getSetting('connectiontype')) == 0:
		player._localHTTPServer = False
		return playpath_url
	else:
		m3u_data = connection.getURL(playpath_url, loadcookie = True)
		try:
			key_url = re.compile('URI="(.*?)"').findall(m3u_data)[0]
			
			key_data = connection.getURL(key_url, loadcookie = True)		
			key_file = open(ustvpaths.KEYFILE % '0', 'wb')
			key_file.write(key_data)
			key_file.close()
		except:
			pass
		video_url5 = re.compile('(http:.*?)\n').findall(m3u_data)
		if int(addon.getSetting('connectiontype')) > 0:
			proxy_config = common.proxyConfig()
			for i, video_item in enumerate(video_url5):
				newurl = base64.b64encode(video_item)
				newurl = urllib.quote_plus(newurl)
				m3u_data = m3u_data.replace(video_item, 'http://127.0.0.1:12345/proxy/' + newurl + '/' + proxy_config)
		filestring = 'XBMC.RunScript(' + os.path.join(ustvpaths.LIBPATH,'proxy.py') + ', 12345)'
		xbmc.executebuiltin(filestring)
		time.sleep(20)
		if key_url is not None:
			m3u_data = m3u_data.replace(key_url, 'http://127.0.0.1:12345/play0.key')
		playfile = open(ustvpaths.PLAYFILE, 'w')
		playfile.write(m3u_data)
		playfile.close()
		return ustvpaths.PLAYFILE
Example #33
0
def episodes_from_html(episode_url=common.args.url, page=1):
    """ Add episodes by analysing the HTML of the page """
    episodes = []
    if page == 1:
        episode_data = connection.getURL(episode_url)
        episode_tree = None
        try:
            episode_url = re.compile("var .*Showcase.* = '(.*)'").findall(
                episode_data)[0]
            if 'http' not in episode_url:
                episode_url = BASE + episode_url
            episode_data = connection.getURL(episode_url)
        except:
            try:
                episode_tree = BeautifulSoup(episode_data, 'html5lib')
                episode_url = episode_tree.find('div',
                                                class_='content')['data-feed']
                episode_data = connection.getURL(episode_url)
                episode_tree = BeautifulSoup(episode_data, 'html5lib')
            except:
                pass
    if episode_tree is None:
        episode_tree = BeautifulSoup(episode_data, 'html5lib')
    if 'Clips' in common.args.name:
        next = episode_tree.find('a', class_=re.compile('next'))
        episodes = add_video(episode_tree)
        if next is not None:
            try:
                if 'href' in next.attrs:
                    nexturl = next['href'].replace(' ', '+')
                else:
                    nexturl = next['onclick'].split(';')[0].replace(
                        "loadContent('", "").replace("')", "")
                if 'http' not in nexturl:
                    nexturl = BASE + nexturl
                if page < int(addon.getSetting('maxpages')):
                    episodes_from_html(nexturl, page + 1)
            except:
                pass
    else:
        next = episode_tree.find('a', class_=re.compile('next'))
        episodes = add_video(episode_tree, False)
        if next is not None:
            try:
                nexturl = next['href']
                if nexturl[0] == '?':
                    nexturl = episode_url.split('?')[0] + nexturl
                elif 'http' not in nexturl:
                    nexturl = BASE + nexturl
                if page < int(addon.getSetting('maxpages')):
                    episodes.extend(episodes_from_html(nexturl, page + 1))
            except:
                pass
    return episodes
def list_qualities2(API, video_url = common.args.url):
	video_bitrates = []
	video_data = connection.getURL(API + 'playlists/%s/videos.json' % video_url)
	video_tree = simplejson.loads(video_data)
	video_mgid = video_tree['playlist']['videos'][0]['video']['mgid']
	video_data2 = connection.getURL(VIDEOURLAPI % video_mgid)
	video_tree2 = BeautifulSoup(video_data2, 'html.parser')
	video_url2 = video_tree2.findAll('rendition')
	for video_index in video_url2:
		video_bitrate = int(video_index['bitrate'])
		video_bitrates.append((video_bitrate, video_bitrate))
	return video_bitrates
Example #35
0
def list_qualities(video_url=common.args.url):
    bitrates = []
    video_data = connection.getURL(video_url + "&manifest=m3u")
    video_tree = BeautifulSoup(video_data, "html.parser")
    video_url2 = video_tree.find("video", src=True)["src"]
    video_data2 = connection.getURL(video_url2, savecookie=True)
    video_url3 = m3u8.parse(video_data2)
    for video_index in video_url3.get("playlists"):
        bitrate = int(video_index.get("stream_info")["bandwidth"])
        if bitrate > 100000:
            bitrates.append((bitrate / 1000, bitrate))
    return bitrates
Example #36
0
def list_qualities(video_url = common.args.url):
	bitrates = []
	video_data = connection.getURL(video_url + '&manifest=m3u')
	video_tree = BeautifulSoup(video_data, 'html.parser')
	video_url2 = video_tree.find('video', src = True)['src']
	video_data2 = connection.getURL(video_url2, savecookie = True)
	video_url3 = m3u8.parse(video_data2)
	for video_index in video_url3.get('playlists'):
		bitrate = int(video_index.get('stream_info')['bandwidth'])
		if bitrate  > 100000:
			bitrates.append((bitrate / 1000, bitrate))
	return bitrates
Example #37
0
def list_qualities():
    exception = False
    video_url = common.args.url
    bitrates = []
    video_data = connection.getURL(video_url)
    video_tree = BeautifulSoup(video_data, 'html.parser')
    try:
        video_rtmp = video_tree.meta['base']
    except:
        video_rtmp = None
    if 'link.theplatform.com' not in video_url:
        video_tree = BeautifulSoup(video_data, 'html.parser')
        player_url = 'http:' + video_tree.find(
            'div', class_='video-player-wrapper').iframe['src']
        player_data = connection.getURL(player_url)
        player_tree = BeautifulSoup(player_data, 'html.parser')
        video_url = player_tree.find('link',
                                     type="application/smil+xml")['href']
        video_url = video_url + '&format=SMIL'
        video_data = connection.getURL(video_url)
    if video_rtmp is not None:
        for video_index in video_rtmp:
            bitrate = int(video_index['system-bitrate'])
            display = int(bitrate)
            bitrates.append((display, bitrate))
    else:
        video_data = connection.getURL(video_url + '&manifest=m3u')
        video_tree = BeautifulSoup(video_data, 'html.parser')
        if video_tree.find('param',
                           attrs={
                               'name': 'isException',
                               'value': 'true'
                           }) is None:
            video_url2 = video_tree.seq.find_all('video')[0]
            video_url3 = video_url2['src']
            video_data2 = connection.getURL(video_url3)
            video_url4 = m3u8.parse(video_data2)
            for video_index in video_url4.get('playlists'):
                bitrate = int(video_index.get('stream_info')['bandwidth'])
                try:
                    codecs = video_index.get('stream_info')['codecs']
                except:
                    codecs = ''
                display = int(bitrate) / 1024
                bitrates.append((display, bitrate))
        else:
            exception = True
    if not exception:
        return bitrates
    else:
        common.show_exception(video_tree.ref['title'],
                              video_tree.ref['abstract'])
Example #38
0
def seasons(SITE, FULLEPISODES, CLIPS, FULLEPISODESWEB = None, season_urls = common.args.url):

	seasons = []
	for season_url in season_urls.split(','):
		season_data = connection.getURL(FULLEPISODES % urllib.quote_plus(season_url) + '&range=0-1')
		try:
			season_menu = int(simplejson.loads(season_data)['totalResults'])
		except:
			season_menu = 0
		if season_menu > 0:
			season_url2 = FULLEPISODES % urllib.quote_plus(season_url) + '&range=0-' + str(season_menu)
			seasons.append(('Full Episodes',  SITE, 'episodes', season_url2, -1, -1))
		if FULLEPISODESWEB:
			try:
				show = season_url.split('/')[-1].replace(' ', '')
				web_data = connection.getURL(FULLEPISODESWEB % show)
				try:
					web_tree = BeautifulSoup(web_data, 'html.parser')
				except:
					web_tree = BeautifulSoup(web_data, 'html5lib')
				all = len(web_tree.find_all('div', class_ = 'view-mode-vid_teaser_show_episode'))
				auth = len(web_tree.find_all('div', class_ = 'tve-video-auth'))
				if all > auth:
					seasons.append(('Full Episodes',  SITE, 'episodes_web', FULLEPISODESWEB % show, -1, -1))
				else:
					try:
						print "eps block"
						eps = web_tree.find( class_ = 'view-syfy-show-episodes').find_all(text= re.compile('Full Episode'))
						headers = []
						for ep in eps:
							heading = ep.parent.parent.parent.parent.parent.findPrevious('h3').div.string.strip()
							if heading not in headers:
								headers.append(heading)
						for web_item in headers:
							seasons.append(('Season ' +web_item,  SITE, 'episodes_web', FULLEPISODESWEB % show + '/' + web_item, -1, -1))
					except:
						title = web_tree.find( class_ = 'pane-full-episodes-pane-episodes-by-show').h2.string
						seasons.append((title,  SITE, 'episodes_web', FULLEPISODESWEB % show, -1, -1))
			except Exception as e:
				print "Exception with web processing", e
		season_data2 = connection.getURL(CLIPS % urllib.quote_plus(season_url) + '&range=0-1')
		try:
			season_menu2 = int(simplejson.loads(season_data2)['totalResults'])
		except:
			season_menu2 = 0
		if season_menu2 > 0:
			season_url3 = CLIPS % urllib.quote_plus(season_url) + '&range=0-' + str(season_menu2)
			if ',' in season_urls:
				seasons.append(('Clips %s'%season_url,  SITE, 'episodes', season_url3, -1, -1))
			else:
				seasons.append(('Clips',  SITE, 'episodes', season_url3, -1, -1))
	return seasons
def list_qualities2(API, video_url=common.args.url):
    video_bitrates = []
    video_data = connection.getURL(API +
                                   'playlists/%s/videos.json' % video_url)
    video_tree = simplejson.loads(video_data)
    video_mgid = video_tree['playlist']['videos'][0]['video']['mgid']
    video_data2 = connection.getURL(VIDEOURLAPI % video_mgid)
    video_tree2 = BeautifulSoup(video_data2, 'html.parser')
    video_url2 = video_tree2.findAll('rendition')
    for video_index in video_url2:
        video_bitrate = int(video_index['bitrate'])
        video_bitrates.append((video_bitrate, video_bitrate))
    return video_bitrates
Example #40
0
def seasons(SITE,
            FULLEPISODES,
            CLIPS,
            FULLEPISODESWEB=None,
            season_urls=common.args.url):

    seasons = []
    for season_url in season_urls.split(','):
        season_data = connection.getURL(FULLEPISODES %
                                        urllib.quote_plus(season_url) +
                                        '&range=0-1')
        try:
            season_menu = int(simplejson.loads(season_data)['totalResults'])
        except:
            season_menu = 0
        if season_menu > 0:
            season_url2 = FULLEPISODES % urllib.quote_plus(
                season_url) + '&range=0-' + str(season_menu)
            seasons.append(
                ('Full Episodes', SITE, 'episodes', season_url2, -1, -1))
        elif FULLEPISODESWEB:
            try:
                show = season_url.split('/')[-1].replace(' ', '')
                web_data = connection.getURL(FULLEPISODESWEB % show)
                web_tree = BeautifulSoup(web_data, 'html.parser')
                all = len(
                    web_tree.find_all(
                        'div', class_='view-mode-vid_teaser_show_episode'))
                auth = len(web_tree.find_all('div', class_='tve-video-auth'))
                if all > auth:
                    seasons.append(('Full Episodes', SITE, 'episodes_web',
                                    FULLEPISODESWEB % show, -1, -1))
            except Exception as e:
                print "Error with web processing", e
        season_data2 = connection.getURL(CLIPS %
                                         urllib.quote_plus(season_url) +
                                         '&range=0-1')
        try:
            season_menu2 = int(simplejson.loads(season_data2)['totalResults'])
        except:
            season_menu2 = 0
        if season_menu2 > 0:
            season_url3 = CLIPS % urllib.quote_plus(
                season_url) + '&range=0-' + str(season_menu2)
            if ',' in season_urls:
                seasons.append(('Clips %s' % season_url, SITE, 'episodes',
                                season_url3, -1, -1))
            else:
                seasons.append(
                    ('Clips', SITE, 'episodes', season_url3, -1, -1))
    return seasons
Example #41
0
def process_smil(video_url, qbitrate = None):
	closedcaption = None
	video_data = connection.getURL(video_url)
	video_tree = BeautifulSoup(video_data, 'html.parser')
	video_rtmp = video_tree.meta
	playpath_url = None
	lplaypath_url = None
	try:
		base_url = video_rtmp['base']
	except:
		base_url = None
	if base_url is not None:
		if qbitrate is None:
			video_url2 = video_tree.switch.find_all('video')
			for video_index in video_url2:
				bitrate = int(video_index['system-bitrate'])
				if bitrate < lbitrate or lbitrate == -1:
					lbitrate = bitrate
					lplaypath_url = video_index['src']	
				if bitrate > hbitrate and bitrate <= sbitrate:
					hbitrate = bitrate
					playpath_url = video_index['src']	
			if playpath_url is None:
				playpath_url = lplaypath_url
		else:
			playpath_url = video_tree.switch.find('video', attrs = {'system-bitrate' : qbitrate})['src']
		if '.mp4' in playpath_url:
			playpath_url = 'mp4:'+ playpath_url
		else:
			playpath_url = playpath_url.replace('.flv','')
		finalurl = base_url +' playpath=' + playpath_url + ' swfurl=' + SWFURL + ' swfvfy=true'
		player._localHTTPServer = False
	else:
		video_data = connection.getURL(video_url + '&manifest=m3u&Tracking=true&Embedded=true&formats=F4M,MPEG4')
		video_tree = BeautifulSoup(video_data, 'html.parser')
		try:
			closedcaption = video_tree.textstream['src']
			player._subtitles_Enabled = True
		except:
			pass
		if (addon.getSetting('enablesubtitles') == 'true') and (closedcaption is not None):
				convert_subtitles(closedcaption)
		if  video_tree.find('param', attrs = {'name' : 'isException', 'value' : 'true'}) is None:
			video_url2 = video_tree.body.seq.video
			video_url3 = video_url2['src']
			finalurl = process_m3u(video_url3, qbitrate)
			return finalurl, False

		else:
			exception = True
	return finalurl, exception
Example #42
0
def seasons(season_url = common.args.url):
	season_data = connection.getURL(season_url)
	season_tree = BeautifulSoup(season_data, 'html.parser', parse_only = SoupStrainer('div'))
	season_source = season_tree.find('div', id = 'TPVideoPlaylistTaxonomyContainer')['source']
	playlist_url = PLAYLIST % season_source
	playlist_data = connection.getURL(playlist_url)
	playlist_data = playlist_data.replace('$pdk.NBCplayer.ShowPlayerTaxonomy.GetList(', '').replace(');', '')
	season_menu = simplejson.loads(playlist_data)
	try:
		for season_item in season_menu['playlistTaxonomy']:
			common.add_directory(season_item['reference']['name'],  SITE, 'episodes', FEED % season_item['reference']['feed'])
	except:
		pass
	common.set_view('seasons')
def list_qualities(SITE, BRANDID, PARTNERID):
	video_id, video_type = common.args.url.split('#')
	bitrates = []
	video_auth = get_authorization(BRANDID, video_id, video_type)
	if video_auth is False:
		video_url = VIDEOLIST % BRANDID + '001/-1/-1/-1/' + video_id + '/-1/-1'
		video_data = connection.getURL(video_url)
		try:
			video_data2 = simplejson.loads(video_data)['videos']['video']
			video_format = video_data2['assets']['asset'][0]['@format']
		except:
			try:
				video_data2 = simplejson.loads(video_data)['videos']['video']
				video_format = video_data2['assets']['asset']['@format']
				#video_closedcaption = video_data2['closedcaption']['@enabled']
			except:
				video_format = 'MOV'
				#video_closedcaption = 'false'
		video_id = video_id.replace('VDKA','')
		if video_format == 'MP4':
			video_url = PLAYLISTMP4 % (PARTNERID, PARTNERID) + video_id
			video_data = connection.getURL(video_url)
			video_url2 = m3u8.parse(video_data)
			for video_index in video_url2.get('playlists'):
				bitrate = int(video_index.get('stream_info')['bandwidth'])
				bitrate.append((bitrate / 1000, bitrate))
		elif  video_format == 'MOV':
			video_url = PLAYLISTMOV % (PARTNERID, PARTNERID) + video_id
			video_data = connection.getURL(video_url)
			video_tree = BeautifulSoup(video_data, 'html.parser')
			base_url = video_tree('baseurl')[0].string
			video_url2 = video_tree.findAll('media')
			for video_index in video_url2:
				bitrate = int(video_index['bitrate'])
				bitrates.append((bitrate, bitrate))
	else:
		video_url = VIDEOLIST % BRANDID + '002/-1/-1/-1/' + video_id + '/-1/-1'
		video_data = connection.getURL(video_url)
		video_data2 = simplejson.loads(video_data)['videos']['video']
		video_closedcaption = video_data2['closedcaption']['@enabled']
		try:
			video_url2 = video_data2['assets']['asset']['$'] + video_auth
		except:
			video_url2 = video_data2['assets']['asset'][1]['$'] + video_auth
		video_data3 = connection.getURL(video_url2.replace('m3u8','json'))
		video_url3 = simplejson.loads(video_data3)
		for video_keys in BITRATETABLE.iterkeys():
			bitrate = int(video_keys)
			bitrates.append((bitrate, bitrate))
	return bitrates	
def seasons(SITE, SEASONSEPISODE, SEASONSCLIPS, EPISODES, CLIPS, season_url=common.args.url):
    seasons = []
    season_data = connection.getURL(SEASONSEPISODE % season_url)
    season_tree = simplejson.loads(season_data)["season"]
    for season_item in season_tree:
        season_name = "Season " + str(season_item)
        seasons.append((season_name, SITE, "episodes", EPISODES % (season_url, season_item), -1, -1))
    season_url = common.args.url
    season_data = connection.getURL(SEASONSCLIPS % season_url)
    season_tree = simplejson.loads(season_data)["season"]
    for season_item in season_tree:
        season_name = "Season Clips " + str(season_item)
        seasons.append((season_name, SITE, "episodes", CLIPS % (season_url, season_item), -1, -1))
    return seasons
def list_qualities(BASE, video_url=common.args.url, media_base=VIDEOURL):
    bitrates = []
    if media_base not in video_url:
        video_url = media_base + video_url
    exception = False
    if 'feed' not in video_url:
        swf_url = connection.getRedirect(video_url, header={'Referer': BASE})
        params = dict(
            item.split("=") for item in swf_url.split('?')[1].split("&"))
        uri = urllib.unquote_plus(params['uri'])
        config_url = urllib.unquote_plus(params['CONFIG_URL'].replace(
            'Other', DEVICE))
        config_data = connection.getURL(config_url,
                                        header={
                                            'Referer': video_url,
                                            'X-Forwarded-For': '12.13.14.15'
                                        })
        config_tree = BeautifulSoup(config_data, 'html.parser')
        if not config_tree.error:
            feed_url = config_tree.feed.string
            feed_url = feed_url.replace('{uri}', uri).replace(
                '&amp;', '&').replace('{device}', DEVICE).replace(
                    '{ref}', 'None').replace('{type}', 'normal').strip()
        else:
            exception = True
            error_text = config_tree.error.string.split('/')[-1].split('_')
            common.show_exception(error_text[1], error_text[2])
    else:
        feed_url = video_url
    if not exception:
        feed_data = connection.getURL(feed_url)
        video_tree = BeautifulSoup(feed_data,
                                   'html.parser',
                                   parse_only=SoupStrainer('media:group'))
        video_segments = video_tree.find_all('media:content')
        video_segment = video_segments[0]
        video_url3 = video_segment['url'].replace('{device}', DEVICE)
        video_data3 = connection.getURL(
            video_url3, header={'X-Forwarded-For': '12.13.14.15'})
        video_tree3 = BeautifulSoup(video_data3, 'html.parser')
        video_menu = video_tree3.find('src').string
        m3u8_url = None
        m3u_master_data = connection.getURL(video_menu, savecookie=True)
        m3u_master = m3u8.parse(m3u_master_data)
        for video_index in m3u_master.get('playlists'):
            bitrate = int(video_index.get('stream_info')['bandwidth'])
            display = int(bitrate) / 1024
            bitrates.append((display, bitrate))
        return bitrates
Example #46
0
def play_video(SITE):
    video_url = common.args.url
    try:
        qbitrate = common.args.quality
    except:
        qbitrate = None
    hbitrate = -1
    lbitrate = -1
    sbitrate = int(addon.getSetting('quality'))
    video_data = connection.getURL(video_url)
    smil_url = re.compile('video_auth_playlist_url = "(.*)"').findall(
        video_data)[0]
    smil_data = connection.getURL(smil_url + '&manifest=m3u')
    video_tree2 = BeautifulSoup(smil_data, 'html.parser')
    video_url3 = video_tree2.video['src']
    video_data3 = connection.getURL(video_url3)
    video_url4 = m3u8.parse(video_data3)
    video_url5 = None
    for video_index in video_url4.get('playlists'):
        bitrate = int(video_index.get('stream_info')['bandwidth'])
        if qbitrate is None:
            try:
                codecs = video_index.get('stream_info')['codecs']
            except:
                codecs = ''
            if (bitrate < lbitrate
                    or lbitrate == -1) and 'mp4a.40.2' != codecs:
                lbitrate = bitrate
                lvideo_url5 = video_index.get('uri')
            if bitrate > hbitrate and bitrate <= (
                    sbitrate * 1000) and codecs != 'mp4a.40.2':
                hbitrate = bitrate
                video_url5 = video_index.get('uri')
        elif bitrate == qbitrate:
            video_url5 = video_index.get('uri')
    if video_url5 is None:
        video_url5 = lvideo_url5
    finalurl = video_url5
    item = xbmcgui.ListItem(path=finalurl)
    if qbitrate is not None:
        item.setThumbnailImage(common.args.thumb)
        item.setInfo(
            'Video', {
                'title': common.args.name,
                'season': common.args.season_number,
                'episode': common.args.episode_number,
                'TVShowTitle': common.args.show_title
            })
    xbmcplugin.setResolvedUrl(pluginHandle, True, item)
Example #47
0
def select_quailty(guid = common.args.url):
	video_url =  VIDEO % guid
	#hbitrate = -1
	#lbitrate = -1
	sbitrate = int(addon.getSetting('quality')) * 1024
	closedcaption = None
	video_url2 = None
	#finalurl = ''
	video_data = connection.getURL(video_url)
	video_menu = simplejson.loads(video_data)['items']
	video_item = video_menu[0] 
	#try:
	#	closedcaption = video_item['captions']['sami']['url']
	#except:
	#	pass
#	if (addon.getSetting('enablesubtitles') == 'true') and (closedcaption is not None) and (closedcaption != ''):
#		convert_subtitles(closedcaption.replace(' ', '+'))
	bitrates = []
	if addon.getSetting('preffered_stream_type') == 'RTMP':
		for video in video_item['videos']['flash'].itervalues():
			try:
				bitrate = video['bitrate']
				# if bitrate < lbitrate or lbitrate == -1:
					# lbitrate = bitrate
					# luri = video['url']
				# if bitrate > hbitrate and bitrate <= sbitrate:
					# hbitrate = bitrate
					# uri = video['url']
				# print video
				bitrates.append((bitrate,bitrate))
			except:
				pass
			#print uri,luri
	else:
		ipad_url = video_item['videos']['iphone']['url']
		video_data2 = connection.getURL(ipad_url + '?format=json')
		video_url3 = simplejson.loads(video_data2)['url']
		video_data3 = connection.getURL(video_url3)
		video_url4 = m3u8.parse(video_data3)
		uri = None
		for video_index in video_url4.get('playlists'):
			try:
				codecs =  video_index.get('stream_info')['codecs']
			except:
				codecs = ''
			if  codecs != 'mp4a.40.5':
				bitrate = int(video_index.get('stream_info')['bandwidth'])
				bitrates.append((int(bitrate) / 1024 , bitrate))
	return bitrates
Example #48
0
def select_quailty(guid=common.args.url):
    video_url = VIDEO % guid
    #hbitrate = -1
    #lbitrate = -1
    sbitrate = int(addon.getSetting('quality')) * 1024
    closedcaption = None
    video_url2 = None
    #finalurl = ''
    video_data = connection.getURL(video_url)
    video_menu = simplejson.loads(video_data)['items']
    video_item = video_menu[0]
    #try:
    #	closedcaption = video_item['captions']['sami']['url']
    #except:
    #	pass
    #	if (addon.getSetting('enablesubtitles') == 'true') and (closedcaption is not None) and (closedcaption != ''):
    #		convert_subtitles(closedcaption.replace(' ', '+'))
    bitrates = []
    if addon.getSetting('preffered_stream_type') == 'RTMP':
        for video in video_item['videos']['flash'].itervalues():
            try:
                bitrate = video['bitrate']
                # if bitrate < lbitrate or lbitrate == -1:
                # lbitrate = bitrate
                # luri = video['url']
                # if bitrate > hbitrate and bitrate <= sbitrate:
                # hbitrate = bitrate
                # uri = video['url']
                # print video
                bitrates.append((bitrate, bitrate))
            except:
                pass
            #print uri,luri
    else:
        ipad_url = video_item['videos']['iphone']['url']
        video_data2 = connection.getURL(ipad_url + '?format=json')
        video_url3 = simplejson.loads(video_data2)['url']
        video_data3 = connection.getURL(video_url3)
        video_url4 = m3u8.parse(video_data3)
        uri = None
        for video_index in video_url4.get('playlists'):
            try:
                codecs = video_index.get('stream_info')['codecs']
            except:
                codecs = ''
            if codecs != 'mp4a.40.5':
                bitrate = int(video_index.get('stream_info')['bandwidth'])
                bitrates.append((int(bitrate) / 1024, bitrate))
    return bitrates
def list_qualities():
	video_url = common.args.url
	bitrates = []
	sig = sign_url(video_url)
	smil_url = re.compile('(.+)\?').findall(video_url)[0] + '?switch=hls&assetTypes=medium_video_s3&mbr=true&metafile=true&sig=' + sig
	video_data = connection.getURL(smil_url)
	smil_tree = BeautifulSoup(video_data, 'html.parser')
	video_url2 = smil_tree.video['src']
	m3u_master_data = connection.getURL(video_url2)
	m3u_master = m3u8.parse(m3u_master_data)
	for video_index in m3u_master.get('playlists'):
		bitrate = int(video_index.get('stream_info')['bandwidth'])
		display = int(bitrate) / 1024
		bitrates.append((display, bitrate))
	return bitrates
Example #50
0
def seasons(season_urls = common.args.url):
	seasons = []
	root_url = season_urls
	season_urls = BASE + season_urls
	season_data = connection.getURL(season_urls)
	try:
		season_tree = BeautifulSoup(season_data)
		video_link = BASE + season_tree.find('a', text = re.compile('Videos? \(\d+\)'))['href']
		season_data = connection.getURL(video_link)
		video_tree = BeautifulSoup(season_data)
		season_menu = video_tree.find_all('option')
		if season_menu:
			for season_item in season_menu:
				season_name = season_item.string
				season_url = BASE + season_item['value']
				seasons.append((season_name,  SITE, 'episodes', season_url, -1, -1))
		else:
			seasons.append(('Clips',  SITE, 'episodes', video_link, -1, -1))
	except:
		try:
			season_title = re.compile('"channels": \[\{\s+"title": "(.*?)",\s+"start": \d+,\s+"end": \d+,\s+"total": \d+,\s+"videos":', re.DOTALL).findall(season_data)[0]
			seasons.append((season_title,  SITE, 'episodes', season_urls, -1, -1))
		except:
			season_tree = BeautifulSoup(season_data)
			try:
				dropdown = season_tree.find('nav', class_ ='hub').find('span', text = 'Videos').find_next(class_ = 'dropdown-menu')
				season_menu = dropdown.find_all('a')
				for season_item in season_menu:
					seasons.append((season_item['title'],  SITE, 'episodes', BASE + season_item['href'], -1, -1))
			except:
				pass
			season_menu = season_tree.find_all(class_ = 'ss-play')
			for season_item in season_menu:
				season_grandparent = season_item.parent.parent.parent
				try:
					try:
						season_name = season_grandparent.img['title']
					except:
						season_name = season_grandparent.h6.string
					try:
						season_url = BASE + season_grandparent['href']
					except:
						season_url = BASE + season_grandparent.a['href']
					if 'shows' in season_url or 'packages' in season_url or 'chef' in season_url:
						seasons.append((season_name,  SITE, 'episodes', season_url, -1, -1))
				except:
					pass
	return seasons
Example #51
0
def list_qualities(SITE, BRANDID, PARTNERID):
    video_id, video_type = common.args.url.split('#')
    bitrates = []
    video_auth = get_authorization(BRANDID, video_id, video_type)
    if video_auth is False:
        video_url = VIDEOLIST % BRANDID + '001/-1/-1/-1/' + video_id + '/-1/-1'
        video_data = connection.getURL(video_url)
        try:
            video_data2 = simplejson.loads(video_data)['videos']['video']
            video_format = video_data2['assets']['asset'][0]['@format']
        except:
            try:
                video_data2 = simplejson.loads(video_data)['videos']['video']
                video_format = video_data2['assets']['asset']['@format']
            except:
                video_format = 'MOV'
        video_id = video_id.replace('VDKA', '')
        if video_format == 'MP4':
            video_url = PLAYLISTMP4 % (PARTNERID, PARTNERID) + video_id
            video_data = connection.getURL(video_url)
            video_url2 = m3u8.parse(video_data)
            for video_index in video_url2.get('playlists'):
                bitrate = int(video_index.get('stream_info')['bandwidth'])
                bitrate.append((bitrate / 1000, bitrate))
        elif video_format == 'MOV':
            video_url = PLAYLISTMOV % (PARTNERID, PARTNERID) + video_id
            video_data = connection.getURL(video_url)
            video_tree = BeautifulSoup(video_data, 'html.parser')
            base_url = video_tree('baseurl')[0].string
            video_url2 = video_tree.findAll('media')
            for video_index in video_url2:
                bitrate = int(video_index['bitrate'])
                bitrates.append((bitrate, bitrate))
    else:
        video_url = VIDEOLIST % BRANDID + '002/-1/-1/-1/' + video_id + '/-1/-1'
        video_data = connection.getURL(video_url)
        video_data2 = simplejson.loads(video_data)['videos']['video']
        video_closedcaption = video_data2['closedcaption']['@enabled']
        try:
            video_url2 = video_data2['assets']['asset']['$'] + video_auth
        except:
            video_url2 = video_data2['assets']['asset'][1]['$'] + video_auth
        video_data3 = connection.getURL(video_url2.replace('m3u8', 'json'))
        video_url3 = simplejson.loads(video_data3)
        for video_keys in BITRATETABLE.iterkeys():
            bitrate = int(video_keys)
            bitrates.append((bitrate, bitrate))
    return bitrates
Example #52
0
def episodes(episode_url = common.args.url):
	try:
		shutil.rmtree(os.path.join(ustvpaths.DATAPATH,'thumbs'))
	except:
		pass
	episode_data = connection.getURL(VIDEOLIST % episode_url.split('#')[0])
	episode_menu = simplejson.loads(episode_data)['videos']
	os.mkdir(os.path.join(ustvpaths.DATAPATH,'thumbs'))
	for episode_item in episode_menu:
		if int(episode_item['fullep']) == int(episode_url.split('#')[1]):
			show_name = episode_item['series_name']
			url = episode_item['guid']
			episode_duration = int(episode_item['duration_secs'])
			episode_plot = episode_item['description_long']
			episode_name = episode_item['title']
			season_number = int(episode_item['season'])
			episode_thumb = episode_item['large_thumbnail']
			thumb_file = episode_thumb.split('/')[-1]
			thumb_path = os.path.join(ustvpaths.DATAPATH, 'thumbs', thumb_file)
			thumbcount = 0
			for name in glob.glob(os.path.join(ustvpaths.DBPATH,'textures[0-9]*.db')):
				thumbcount = thumbcount+ database.execute_command('select count(1) from texture where url = ?', [thumb_path,], fetchone = True, dbfile = name)[0]
			if thumbcount == 0:
				thumb_data = connection.getURL(episode_thumb)
				file = open(thumb_path, 'wb')
				file.write(thumb_data)
				file.close()
			try:
				episode_number = int(episode_item['episode'][len(str(season_number)):])
			except:
				episode_number = -1
			try:
				episode_airdate = common.format_date(episode_item['airdate'],'%Y-%b-%d', '%d.%m.%Y')
			except:
				episode_airdate = -1
			u = sys.argv[0]
			u += '?url="' + urllib.quote_plus(url) + '"'
			u += '&mode="' + SITE + '"'
			u += '&sitemode="play_video"'
			infoLabels={	'title' : episode_name,
							'durationinseconds' : episode_duration,
							'season' : season_number,
							'episode' : episode_number,
							'plot' : episode_plot,
							'premiered' : episode_airdate,
							'tvshowtitle': show_name }
			common.add_video(u, episode_name, thumb_path, infoLabels = infoLabels)
	common.set_view('episodes')
def sign_url(url):
    query = {'url': re.compile('/[sz]/(.+)\?').findall(url)[0]}
    encoded = urllib.urlencode(query)
    sig = connection.getURL(
        'http://servicesaetn-a.akamaihd.net/jservice/video/components/get-signed-signature?'
        + encoded)
    return sig
def convert_subtitles(closedcaption):
    str_output = ''
    subtitle_data = connection.getURL(closedcaption, connectiontype=0)
    subtitle_data = BeautifulSoup(subtitle_data,
                                  'html.parser',
                                  parse_only=SoupStrainer('div'))
    srt_output = ''
    lines = subtitle_data.find_all('p')
    i = 0
    last_start_time = ''
    last_end_time = ''
    for line in lines:
        try:
            if line is not None:
                sub = clean_subs(common.smart_utf8(line))
                start_time = common.smart_utf8(line['begin'].replace('.', ','))
                end_time = common.smart_utf8(line['end'].replace('.', ','))
                if start_time != last_start_time and end_time != last_end_time:
                    str_output += '\n' + str(
                        i + 1
                    ) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n'
                    i = i + 1
                    last_end_time = end_time
                    last_start_time = start_time
                else:
                    str_output += sub + '\n\n'
        except:
            pass
    file = open(ustvpaths.SUBTITLE, 'w')
    file.write(str_output)
    file.close()
    return True
Example #55
0
def episodes_powernation(episode_url):
    episodes = []
    episodes_data = connection.getURL(episode_url)
    episodes_json = json.loads(episodes_data)
    for episode_item in episodes_json['episodes']:
        episode_name = episode_item['description']['title']
        episode_plot = episode_item['description']['description_long']
        episode_thumb = episode_item['description']['image_url']
        episode_id = episode_item['description']['episode_id']

        season_num, episode_num = episode_id.split('-')

        u = sys.argv[0]
        u += '?url="' + PN_URL + episode_id + '"'
        u += '&mode="' + SITE + '"'
        u += '&sitemode="play_video"'
        infoLabels = {
            'title': episode_name,
            'plot': episode_plot,
            'TVShowTitle': episode_item['description']['show_name'],
            'season': episode_item['description']['season'],
            'episode': episode_num
        }

        episodes.append(
            (u, episode_name, episode_thumb, infoLabels, None, False, None))
    return episodes
Example #56
0
def episodes(episode_url = common.args.url):
	episodes = []
	episode_data = connection.getURL(episode_url, header = AUTH)
	episode_menu = simplejson.loads(episode_data)['videos']
	for episode_item in episode_menu:
		episode_airdate = common.format_date(episode_item['airDate'],'%Y-%m-%d', '%d.%m.%Y')
		url = episode_item['ios_video_url']
		episode_duration = int(episode_item['duration'])
		episode_plot = episode_item['description']
		episode_name = episode_item['name']
		try:
			season_number = episode_item['season']
		except:
			season_number = -1
		try:
			episode_number = episode_item['episode']
		except:
			episode_number = -1
		try:
			episode_thumb = episode_item['img_url'].replace(' ', '%20')
		except:
			episode_thumb = None
		u = sys.argv[0]
		u += '?url="' + urllib.quote_plus(url) + '"'
		u += '&mode="' + SITE + '"'
		u += '&sitemode="play_video"'
		infoLabels={	'title' 			: episode_name,
						'durationinseconds' : episode_duration,
						'season' 			: season_number,
						'episode' 			: episode_number,
						'plot' 				: episode_plot,
						'premiered' 		: episode_airdate }
		episodes.append((u, episode_name, episode_thumb, infoLabels,  'list_qualities', False, 'Clip'))
	return episodes
Example #57
0
File: nbc.py Project: MarkTV/MarkTV
def convert_subtitles(closedcaption):
	str_output = ''
	subtitle_data = connection.getURL(closedcaption, connectiontype = 0)
	subtitle_data = BeautifulSoup(subtitle_data, 'html.parser', parse_only = SoupStrainer('div'))
	srt_output = ''
	lines = subtitle_data.find_all('p')
	i = 0
	last_start_time = ''
	last_end_time = ''
	for line in lines:
		try:
			if line is not None:
				sub = clean_subs(common.smart_utf8(line))
				start_time = common.smart_utf8(line['begin'].replace('.', ','))
				end_time = common.smart_utf8(line['end'].replace('.', ','))
				if start_time != last_start_time and end_time != last_end_time:
					str_output += '\n' + str(i + 1) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n'
					i = i + 1
					last_end_time = end_time
					last_start_time = start_time
				else:
					str_output +=  sub + '\n\n'
		except:
			pass
	file = open(ustvpaths.SUBTITLE, 'w')
	file.write(str_output)
	file.close()