def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_l': return resolver.get_stream_youtube(params.video_id, False) elif params.next == 'play_r_news': return resolver.get_stream_youtube(params.video_id, False) elif params.next == 'play_r_sports': data_embed_token = utils.get_webcontent( URL_PCODE_EMBED_TOKEN) pcode = re.compile( 'sas/embed_token/(.*?)/all').findall(data_embed_token)[0] data_embed_token = urllib.quote_plus( data_embed_token.replace('"','')) video_vod = utils.get_webcontent( URL_OOYALA_VOD % (pcode, params.video_id, data_embed_token)) json_parser = json.loads(video_vod) # Get Value url encodebase64 if 'streams' in json_parser["authorization_data"][params.video_id]: for stream in json_parser["authorization_data"][params.video_id]["streams"]: url_base64 = stream["url"]["data"] return base64.standard_b64decode(url_base64) else: return None elif params.next == 'download_video': return resolver.get_stream_youtube(params.video_id, True)
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_r' or params.next == 'download_video': url = '' html_video = utils.get_webcontent(params.url_video) if 'dailymotion' in html_video: video_id = re.compile( r'www.dailymotion.com/embed/video/(.*?)[\?\"]').findall( html_video)[0] if params.next == 'download_video': return resolver.get_stream_dailymotion(video_id, True) else: return resolver.get_stream_dailymotion(video_id, False) else: # get videoId and accountId videoId, accountId = re.compile( r'embed/(.*?)/(.*?)/').findall(html_video)[0] html_json = utils.get_webcontent( URL_VIDEO_REPLAY % (videoId, accountId)) html_json_2 = re.compile(r'\((.*?)\);').findall(html_json)[0] json_parser = json.loads(html_json_2) for playlist in json_parser['Playlist']: datas_video = playlist['MediaFiles']['M3u8'] for data in datas_video: url = data['Url'] return url elif params.next == 'play_l': return resolver.get_stream_dailymotion(params.video_id, False)
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_r' or params.next == 'download_video': if 'http' in params.video_url: video_html = utils.get_webcontent( params.video_url) stream_id = re.compile( '\'media_id\' : "(.*?)"').findall(video_html)[0] streams_jsonparser = json.loads( utils.get_webcontent(URL_STREAM % stream_id)) url = '' if 'mediaList' in streams_jsonparser: # Case Jarvis if common.sp.xbmc.__version__ == '2.24.0': for stream in streams_jsonparser["mediaList"][0]["mobileUrls"]: if 'MobileH264' in stream["targetMediaPlatform"]: url = stream["mobileUrl"] # Case Krypton and ... else: for stream in streams_jsonparser["mediaList"][0]["mobileUrls"]: if 'HttpLiveStreaming' in stream["targetMediaPlatform"]: url = stream["mobileUrl"] return url else: return '' else: return ''
def get_video_url(params): """Get video URL and start video player""" video_json = utils.get_webcontent( URL_JSON_VIDEO % (params.video_id), random_ua=True) json_parser = json.loads(video_json) video_assets = json_parser['clips'][0]['assets'] if video_assets is None: utils.send_notification(common.ADDON.get_localized_string(30712)) return '' # "type":"primetime_phls_h264" => Video protected by DRM (m3u8) # "type":"usp_hls_h264" => Video not protected by DRM (m3u8) # "type":"usp_dashcenc_h264" => No supported by Kodi (MDP) # "type":"usp_hlsfp_h264" => Video protected by DRM (m3u8) # "type":"http_h264" => Video not proted by DRM (mp4) (Quality SD "video_quality":"sd", HD "video_quality":"hq", HD "video_quality":"hd", HD "video_quality":"lq", 3G) # "type":"http_subtitle_vtt_sm" => Subtitle (in English TVShows) desired_quality = common.PLUGIN.get_setting('quality') all_datas_videos_quality = [] all_datas_videos_path = [] for asset in video_assets: if 'http_h264' in asset["type"]: all_datas_videos_quality.append(asset["video_quality"]) all_datas_videos_path.append( asset['full_physical_path'].encode('utf-8')) elif 'h264' in asset["type"]: manifest = utils.get_webcontent( asset['full_physical_path'].encode('utf-8'), random_ua=True) if 'drm' not in manifest: all_datas_videos_quality.append(asset["video_quality"]) all_datas_videos_path.append( asset['full_physical_path'].encode('utf-8')) if len(all_datas_videos_quality) == 0: utils.send_notification(common.ADDON.get_localized_string(30702)) return '' elif len(all_datas_videos_quality) == 1: return all_datas_videos_path[0] else: if desired_quality == "DIALOG": seleted_item = common.sp.xbmcgui.Dialog().select( common.GETTEXT('Choose video quality'), all_datas_videos_quality) if seleted_item == -1: return '' return all_datas_videos_path[seleted_item] elif desired_quality == "BEST": url_best = '' i = 0 for data_video in all_datas_videos_quality: if 'lq' not in data_video: url_best = all_datas_videos_path[i] i = i + 1 return url_best else: return all_datas_videos_path[0]
def list_videos_categories(params): """Build videos categories listing""" videos_categories = [] url = ''.join(( params.program_url, '/videos')) program_html = utils.get_webcontent(url) program_soup = bs(program_html, 'html.parser') filters_1_soup = program_soup.find( 'ul', class_='filters_1') if filters_1_soup is not None: for li in filters_1_soup.find_all('li'): category_title = li.get_text().encode('utf-8') category_id = li.find('a')['data-filter'].encode('utf-8') # Get Last Page of each categorie # Get First page : url_first_page = ''.join(( params.program_url, '/videos', '?filter=', category_id)) program_first_page_html = utils.get_webcontent(url_first_page) program_first_page_soup = bs( program_first_page_html, 'html.parser') # Get Last page : last_page = '0' if program_first_page_soup.find( 'a', class_='icon i-chevron-right-double trackXiti' ) is not None: last_page = program_first_page_soup.find( 'a', class_='icon i-chevron-right-double trackXiti' ).get('href').rsplit('/')[-1].split('?')[0] videos_categories.append({ 'label': category_title, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', program_url=params.program_url, page='1', last_page=last_page, next='list_videos', window_title=category_title, category_id=category_id ) }) return common.PLUGIN.create_listing( videos_categories, sort_methods=( common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL ), category=common.get_window_title() )
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_r' or params.next == 'download_video': video_html = utils.get_webcontent( params.video_url) video_id = re.compile( r'itemId":"(.*?)"').findall(video_html)[0] json_video_stream = utils.get_webcontent( URL_STREAM % video_id) json_video_stream_parser = json.loads(json_video_stream) return json_video_stream_parser["package"]["video"]["item"][0]["rendition"][0]["src"]
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_l': return params.url elif params.next == 'play_r' or params.next == 'download_video': video_html = utils.get_webcontent(params.video_url) videoId = re.compile('data-uvid="(.*?)"').findall(video_html)[0] apikey_html = utils.get_webcontent(URL_API_KEY) apikey = re.compile('"apiKey": "(.*?)"').findall(apikey_html)[0] stream_html = utils.get_webcontent(URL_STREAM % (apikey, videoId)) return re.compile('"hls":"(.*?)"').findall(stream_html)[0]
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_r' or params.next == 'download_video': video_html = utils.get_webcontent( params.video_url) video_id = re.compile( r'item_longId" \: "(.*?)"').findall(video_html)[0] xml_video_stream = utils.get_webcontent( URL_STREAM % video_id) return re.compile( r'src\>(.*?)\<').findall(xml_video_stream)[0]
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_r' or params.next == 'download_video': video_html = utils.get_webcontent( params.video_url) url_video = re.compile( r'jQuery.get\("(.*?)"').findall(video_html)[0] if params.next == 'download_video': return url_video else: url = utils.get_webcontent( url_video) return re.compile( r'src="(.*?)"').findall(url)[0]
def get_live_item(params): lives = [] title = '' plot = '' duration = 0 img = '' url_live = '' for town_name, live_id in LIVES_TOWN.iteritems(): title = 'YES TV : ' + town_name + ' Live TV' live_html = utils.get_webcontent( URL_LIVE % live_id) url_live_2 = re.compile( 'iframe src="(.*?) "').findall(live_html)[0] url_live_2 = url_live_2 + live_id live_html_2 = utils.get_webcontent(url_live_2) live_json = re.compile( 'sources\:(.*?)\]\,').findall(live_html_2)[0] live_jsonpaser = json.loads(live_json + ']') url_live = 'http:' + live_jsonpaser[0]["file"] info = { 'video': { 'title': title, 'plot': plot, 'duration': duration } } lives.append({ 'label': title, 'fanart': img, 'thumb': img, 'url': common.PLUGIN.get_url( action='start_live_tv_stream', next='play_l', module_name=params.module_name, module_path=params.module_path, url=url_live, ), 'is_playable': True, 'info': info }) return lives
def get_live_item(params): plot = '' duration = 0 img = '' url_live = '' live_html = utils.get_webcontent(URL_LIVE) url_live = re.compile( 'source src=\"(.*?)\"').findall(live_html)[0] img = re.compile( 'poster=\"(.*?)\"').findall(live_html)[0] info = { 'video': { 'title': params.channel_label, 'plot': plot, 'duration': duration } } return { 'label': params.channel_label, 'thumb': img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='start_live_tv_stream', next='play_l', url_live=url_live ), 'is_playable': True, 'info': info }
def get_live_item(params): plot = '' duration = 0 img = '' live_html = utils.get_webcontent(URL_LIVE) live_id = re.compile( 'iframe.dacast.com\/(.*?)"').findall(live_html)[0] info = { 'video': { 'title': params.channel_label, 'plot': plot, 'duration': duration } } return { 'label': params.channel_label, 'fanart': img, 'thumb': img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='start_live_tv_stream', next='play_l', live_id=live_id, ), 'is_playable': True, 'info': info }
def getVideoURL(channel,id): filPrgm = utils.get_webcontent(showInfo % (id)) jsonParser = json.loads(filPrgm) for video in jsonParser['videos']: if video['format']==globalvar.ADDON.getSetting('%sQuality' % (channel)): url = video['url'] return url
def list_videos(channel,folder): videos = [] uniqueItem = dict() filePath = utils.downloadCatalog(channelCatalog % (channel),'%s.json' % (channel),False,{}) filPrgm = open(filePath).read() jsonParser = json.loads(filPrgm) emissions = jsonParser['reponse']['emissions'] for emission in emissions : titre='' plot='' duration='0' id = emission['id_programme'].encode('utf-8') if id=='': id = emission['id_emission'].encode('utf-8') if id==folder: id_diffusion=emission['id_diffusion'] filPrgm=utils.get_webcontent(showInfo % (emission['id_diffusion'])) if(filPrgm!=''): jsonParserShow = json.loads(filPrgm) if jsonParserShow['synopsis']: plot = jsonParserShow['synopsis'].encode('utf-8') date = jsonParserShow['diffusion']['date_debut'] if jsonParserShow['real_duration']!=None : duration = jsonParserShow['real_duration']/50 if jsonParserShow['titre']: titre = jsonParserShow['titre'].encode('utf-8') if jsonParserShow['sous_titre']: titre+=' - ' + jsonParserShow['sous_titre'].encode('utf-8') image = imgURL % (jsonParserShow['image']) infoLabels = { "Title": titre,"Plot":plot,"Aired":date,"Duration": duration, "Year":date[6:10]} if jsonParserShow['genre']!='': infoLabels['Genre']=jsonParserShow['genre'].encode('utf-8') videos.append( [channel, id_diffusion, titre, image,infoLabels,'play'] ) return videos
def get_video_url(params): """Get video URL and start video player""" url_selected = '' all_datas_videos_quality = [] all_datas_videos_path = [] videos_html = utils.get_webcontent(params.video_url) videos_soup = bs(videos_html, 'html.parser') list_videos = videos_soup.find( 'ul', class_='nav nav-tabs').find_all('a') for video in list_videos: if '#video-' in video.get('href'): # Find a better solution to strip all_datas_videos_quality.append(video.get_text().strip()) # Get link value_jwplayer_id = video.get('data-jwplayer-id') # Case mp4 if value_jwplayer_id != '': list_streams = videos_soup.find_all( 'div', class_='jwplayer') for stream in list_streams: if stream.get('id') == value_jwplayer_id: url = stream.get('data-source') # Cas Yt else: video_id = re.compile( 'youtube.com/embed/(.*?)\?').findall(videos_html)[0] url = resolver.get_stream_youtube(video_id, False) all_datas_videos_path.append(url) # Get link from FranceTV elif '#ftv-player-' in video.get('href'): # Find a better solution to strip all_datas_videos_quality.append(video.get_text().strip()) # Get link value_ftvlayer_id = video.get('data-ftvplayer-id') list_streams = videos_soup.find_all( 'iframe', class_='embed-responsive-item') for stream in list_streams: if stream.get('id') == value_ftvlayer_id: url_id = stream.get('src') ydl = YoutubeDL() ydl.add_default_info_extractors() with ydl: result = ydl.extract_info( url_id, download=False) for format_video in result['formats']: url = format_video['url'] all_datas_videos_path.append(url) if len(all_datas_videos_quality) > 1: seleted_item = common.sp.xbmcgui.Dialog().select( common.GETTEXT('Choose video quality'), all_datas_videos_quality) if seleted_item == -1: return '' url_selected = all_datas_videos_path[seleted_item] return url_selected else: return all_datas_videos_path[0]
def get_video_url(params): """Get video URL and start video player""" video_html = utils.get_webcontent(params.video_url) if params.next == 'play_r': # Get DailyMotion Id Video video_urls = re.compile( 'file: \"(.*?)\"').findall(video_html) stream_url = '' for video_url in video_urls: if 'm3u8' in video_url: stream_url = video_url return stream_url elif params.next == 'play_r_elle_girl_tv': # Get DailyMotion Id Video video_id = re.compile( r'embed/video/(.*?)[\"\?\']').findall( video_html)[0] return resolver.get_stream_dailymotion(video_id, False) elif params.next == 'download_video': if 'dailymotion.com/embed' in video_html: video_id = re.compile( r'embed/video/(.*?)[\"\?\']').findall( video_html)[0] return resolver.get_stream_dailymotion(video_id, True) else: video_urls = re.compile( 'file: \"(.*?)\"').findall(video_html) stream_url = '' for video_url in video_urls: if 'm3u8' in video_url: stream_url = video_url return stream_url
def getVideoURL(channel,id): #xbmc.log("channel:"+str(channel)) #xbmc.log("id:"+str(id)) html=utils.get_webcontent(urlVideo % id) url= re.findall("file:\"(.*\/"+id[3:len(id)]+"\/.*)\"",html) [0] #xbmc.log("url:"+str(url)) return url
def get_timestamp(): soup = utils.get_webcontent('http://www.wat.tv/servertime') html = soup.decode("utf-8") print 'GET TIMESTAMP : '+html.encode('utf-8') timestamp = html.split(u"""|""")[0].encode("utf-8") print 'TIMESTAMP :'+timestamp return int(timestamp)
def get_video_url(params): """Get video URL and start video player""" if params.next == 'download_video': return params.video_url elif params.next == 'play_r': video_html = utils.get_webcontent(params.video_url) video_data = video_html.split( 'addPlayer(')[1].split( ');')[0].replace( "\n", "").replace("\r", "").split(',') data_account = video_data[0].strip().replace("'", "") data_player = video_data[1].strip().replace("'", "") if params.module_name == 'tx': data_video_id = video_data[4].strip().replace("'", "") else: data_video_id = 'ref:' + video_data[4].strip().replace("'", "") json_parser = resolver.get_brightcove_video_json( data_account, data_player, data_video_id) video_url = '' for url in json_parser["sources"]: if 'm3u8' in url["src"]: video_url = url["src"] return video_url
def getVideoURL(channel,video_id): url_infos = '%s/video/%s/%s' %(url_pg_infos,channel_index[channel],video_id) webcontent = utils.get_webcontent(url_infos) infosdic = json.loads(webcontent) url_video = infosdic['main']['MEDIA']['VIDEOS']['HLS'] if url_video == '' : url_video = infosdic['main']['MEDIA']['VIDEOS']['IPAD'] return url_video
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_r': info_video_html = utils.get_webcontent(params.video_url) video_json = re.compile( 'data-broadcast=\'(.*?)\'').findall( info_video_html)[0] video_json = json.loads(video_json) return video_json["files"][0]["url"] elif params.next == 'play_r_tivi5monde': info_video_html = utils.get_webcontent(params.video_url) video_url = re.compile( 'contentUrl\"\: \"(.*?)\"').findall( info_video_html)[0] return video_url elif params.next == 'play_l': return params.live_url
def getVideoURL(channel,assetId): filPrgm = utils.get_webcontent(urlVideo % assetId) jsonParser = json.loads(filPrgm) for plst in jsonParser['Video']['Playlists']['Playlist']: if plst['@protocol']=='HTTP-HLS': for url in plst['url']: if url['@quality']=='HD': return url['text']
def get_mediaId(url) : mediaId = None soup = utils.get_webcontent(url) html = soup.decode("utf-8") mediaIdList = re.findall('mediaId_(.*?)"',html) if mediaIdList : mediaId = mediaIdList[0] return mediaId
def get_video_url(params): """Get video URL and start video player""" video_html = utils.get_webcontent(params.video_url) video_url = re.compile( r'property=\"og\:video\" content=\"(.*?)\"').findall( video_html)[0] return video_url
def get_video_desc(url): soup = utils.get_webcontent(url) html = soup.decode("utf-8") encart_titre_mea_mea_video = common.parseDOM(html,"div",attrs={"class":u"encart_titre_mea mea_video"})[0] text_infos = common.parseDOM(encart_titre_mea_mea_video,"div",attrs={"class":u"text-infos"})[0] text = common.parseDOM(text_infos,"div",attrs={"class":"text"})[0] video_desc = common.parseDOM(text,"p")[0] return video_desc.encode("utf-8")
def start_live_tv_stream(params): html_live = utils.get_webcontent(URL_LIVE_SITE) video_id = re.compile( r'www.dailymotion.com/embed/video/(.*?)\?').findall( html_live)[0] params['next'] = 'play_l' params['video_id'] = video_id return get_video_url(params)
def start_live_tv_stream(params): file_path = utils.get_webcontent(URL_LIVE) url_live = re.compile( r'x-mpegurl" src="(.*?)"').findall(file_path)[0] params['next'] = 'play_l' params['url_live'] = url_live return get_video_url(params)
def getVideoURL(channel, url): # print 'URL VIDEO : ' + url html = utils.get_webcontent(url) url_videos = re.compile( r'file: "(.*?)"', re.DOTALL).findall(html) for url_video in url_videos: if '.mp4' in url_video: return url_video return url_video
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_r' or params.next == 'download_video': file_path = utils.get_webcontent(params.video_url) url_video = re.compile( 'source src=\"(.*?)\"').findall(file_path)[0] return url_video elif params.next == 'play_l': return params.url_live
def getVideoURL(channel, url): html = utils.get_webcontent(url).replace('\xe9', 'e').replace('\xe0', 'a').replace('\n', ' ').replace('\r', '') urls = re.compile(r'"file": "(.*?)"', re.DOTALL).findall(html) for url in urls: if '.hd.mp4' in url: url_video=url return url_video
def list_shows(params): """Build categories listing""" shows = [] if params.next == 'list_shows_1': url_root_site = '' if params.channel_name == 'stories' or \ params.channel_name == 'bruce' or \ params.channel_name == 'crazy_kitchen' or \ params.channel_name == 'home' or \ params.channel_name == 'styles' or \ params.channel_name == 'comedy' or \ params.channel_name == 'fun_radio': url_root_site = URL_ROOT % params.channel_name else: url_root_site = URL_ROOT % (params.channel_name + 'replay') file_path = utils.download_catalog(url_root_site, '%s.json' % (params.channel_name), random_ua=True) file_prgm = open(file_path).read() json_parser = json.loads(file_prgm) # do not cache failed catalog fetch # the error format is: # {"error":{"code":403,"message":"Forbidden"}} if isinstance(json_parser, dict) and \ 'error' in json_parser.keys(): utils.os.remove(file_path) raise Exception('Failed to fetch the 6play catalog') for array in json_parser: category_id = str(array['id']) category_name = array['name'].encode('utf-8') shows.append({ 'label': category_name, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', category_id=category_id, next='list_shows_2', title=category_name, window_title=category_name) }) elif params.next == 'list_shows_2': file_prgm = utils.get_webcontent(URL_CATEGORY % (params.category_id), random_ua=True) json_parser = json.loads(file_prgm) for array in json_parser: program_title = array['title'].encode('utf-8') program_id = str(array['id']) program_desc = array['description'].encode('utf-8') program_imgs = array['images'] program_img = '' program_fanart = '' for img in program_imgs: if img['role'].encode('utf-8') == 'vignette': external_key = img['external_key'].encode('utf-8') program_img = URL_IMG % (external_key) elif img['role'].encode('utf-8') == 'carousel': external_key = img['external_key'].encode('utf-8') program_fanart = URL_IMG % (external_key) info = {'video': {'title': program_title, 'plot': program_desc}} shows.append({ 'label': program_title, 'thumb': program_img, 'fanart': program_fanart, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='list_shows_3', program_id=program_id, program_img=program_img, program_fanart=program_fanart, program_desc=program_desc, title=program_title, window_title=program_title), 'info': info }) elif params.next == 'list_shows_3': program_json = utils.get_webcontent(URL_SUBCATEGORY % (params.program_id), random_ua=True) json_parser = json.loads(program_json) try: program_fanart = params.program_fanart except Exception: program_fanart = '' for sub_category in json_parser['program_subcats']: sub_category_id = str(sub_category['id']) sub_category_title = sub_category['title'].encode('utf-8') info = { 'video': { 'title': sub_category_title, 'plot': params.program_desc } } shows.append({ 'label': sub_category_title, 'thumb': params.program_img, 'fanart': program_fanart, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='list_videos', program_id=params.program_id, sub_category_id=sub_category_id, window_title=sub_category_title), 'info': info }) info = { 'video': { 'title': common.ADDON.get_localized_string(30701), 'plot': params.program_desc } } shows.append({ 'label': common.ADDON.get_localized_string(30701), 'thumb': params.program_img, 'fanart': program_fanart, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='list_videos', program_id=params.program_id, sub_category_id='null', window_title=params.window_title), 'info': info }) return common.PLUGIN.create_listing( shows, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL), category=common.get_window_title(params))
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_l': live_json = utils.get_webcontent(URL_LIVE) live_jsonparser = json.loads(live_json) return live_jsonparser["url"]
def list_videos(params): """Build videos listing""" videos = [] if 'previous_listing' in params: videos = ast.literal_eval(params['previous_listing']) url = params.category_url + '/' + params.page file_path = utils.download_catalog( url, '%s_%s_%s.html' % (params.channel_name, params.category_name, params.page)) root_html = open(file_path).read() root_soup = bs(root_html, 'html.parser') category_soup = root_soup.find_all('a', class_='colead') for program in category_soup: # Get Video_ID url = URL_ROOT + program['href'].encode('utf-8') html_video_equipe = utils.get_webcontent(url) video_id = re.compile(r'www.dailymotion.com/embed/video/(.*?)\?', re.DOTALL).findall(html_video_equipe)[0] title = program.find('h2').get_text().encode('utf-8') colead__image = program.find('div', class_='colead__image') img = colead__image.find('img')['data-src'].encode('utf-8') date = colead__image.find( 'span', class_='colead__layerText colead__layerText--bottomleft').get_text( ).strip().encode('utf-8') # 07/09/17 | 01 min date = date.split('/') day = date[0] mounth = date[1] year = '20' + date[2].split(' ')[0] date = '.'.join((day, mounth, year)) aired = '-'.join((year, mounth, day)) duration_string = colead__image.find( 'span', class_='colead__layerText colead__layerText--bottomleft').get_text( ).strip().encode('utf-8') duration_list = duration_string.split(' ') duration = int(duration_list[2]) * 60 info = { 'video': { 'title': title, 'aired': aired, 'date': date, 'duration': duration, 'year': year, 'mediatype': 'tvshow' } } download_video = (common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url(action='download_video', module_path=params.module_path, module_name=params.module_name, video_id=video_id) + ')') context_menu = [] context_menu.append(download_video) videos.append({ 'label': title, 'thumb': img, 'fanart': img, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r', video_id=video_id), 'is_playable': True, 'info': info, 'context_menu': context_menu }) # More videos... videos.append({ 'label': common.ADDON.get_localized_string(30700), 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', category_url=params.category_url, category_name=params.category_name, next='list_videos', page=str(int(params.page) + 1), update_listing=True, previous_listing=str(videos)) }) return common.PLUGIN.create_listing( videos, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_PLAYCOUNT, common.sp.xbmcplugin.SORT_METHOD_DATE, common.sp.xbmcplugin.SORT_METHOD_DURATION, common.sp.xbmcplugin.SORT_METHOD_LABEL_IGNORE_THE), content='tvshows', update_listing='update_listing' in params, category=common.get_window_title(params))
def list_shows(params): """Build categories listing""" shows = [] if params.next == 'list_shows_1': replay_categories_html = utils.get_webcontent(URL_ROOT) replay_categories_soup = bs(replay_categories_html, 'html.parser') categories = replay_categories_soup.find( 'ul', class_='nav nav-pills pull-right' ).find_all( 'li', class_='menu-item menu-item-type-post_type menu-item-object-page') for category in categories: if 'meteo' in category.find('a').get('href') or \ 'actualite' in category.find('a').get('href') or \ 'sports' in category.find('a').get('href') or \ 'emissions' in category.find('a').get('href') or \ 'concours' in category.find('a').get('href') or \ 'programmes' in category.find('a').get('href'): category_name = category.find('a').get_text() category_id = category.find('a').get('href').split( '.html')[0].split('c_')[1] shows.append({ 'label': category_name, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', category_id=category_id, category_name=category_name, next='list_videos_1', page='1', window_title=category_name) }) sub_categories = category.find_all( 'li', class_= 'menu-item menu-item-type-post_type menu-item-object-post') for sub_category in sub_categories: category_name = sub_category.find('a').get_text() category_id = sub_category.find("a").get("href").split( '.html')[0].split('c_')[1] shows.append({ 'label': category_name, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', category_id=category_id, category_name=category_name, next='list_videos_1', page='1', window_title=category_name) }) return common.PLUGIN.create_listing( shows, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL), category=common.get_window_title())
def list_videos(params): """Build videos listing""" videos = [] if params.sub_category_id == 'null': url = URL_VIDEOS2 % params.program_id else: url = URL_VIDEOS % (params.program_id, params.sub_category_id) program_json = utils.get_webcontent(url, random_ua=True) json_parser = json.loads(program_json) # TO DO Playlist More one 'clips' for video in json_parser: video_id = str(video['id']) title = video['title'].encode('utf-8') duration = video['clips'][0]['duration'] description = video['description'].encode('utf-8') try: aired = video['clips'][0]['product']['last_diffusion'] aired = aired.encode('utf-8') aired = aired[:10] year = aired[:4] # date : string (%d.%m.%Y / 01.01.2009) # aired : string (2008-12-07) day = aired.split('-')[2] mounth = aired.split('-')[1] year = aired.split('-')[0] date = '.'.join((day, mounth, year)) except Exception: aired = '' year = '' date = '' img = '' program_imgs = video['clips'][0]['images'] program_img = '' for img in program_imgs: if img['role'].encode('utf-8') == 'vignette': external_key = img['external_key'].encode('utf-8') program_img = URL_IMG % (external_key) info = { 'video': { 'title': title, 'plot': description, 'aired': aired, 'date': date, 'duration': duration, 'year': year, 'mediatype': 'tvshow' } } download_video = (common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url(action='download_video', module_path=params.module_path, module_name=params.module_name, video_id=video_id) + ')') context_menu = [] context_menu.append(download_video) videos.append({ 'label': title, 'thumb': program_img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play', video_id=video_id, ), 'is_playable': True, 'info': info, 'context_menu': context_menu }) return common.PLUGIN.create_listing( videos, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_DATE, common.sp.xbmcplugin.SORT_METHOD_DURATION, common.sp.xbmcplugin.SORT_METHOD_LABEL_IGNORE_THE, common.sp.xbmcplugin.SORT_METHOD_UNSORTED), content='tvshows', category=common.get_window_title(params))
def list_videos(params): """Build videos listing""" videos = [] if params.next == 'list_videos_1': list_videos_html = utils.get_webcontent(params.category_url) list_videos_soup = bs(list_videos_html, 'html.parser') videos_data = list_videos_soup.find_all('div', class_='item') for video in videos_data: title = video.find('h4').get_text() plot = video.find('p').get_text() duration = 0 img = video.find('img').get('src') video_id = video.get('data-mediaid') info = { 'video': { 'title': title, 'plot': plot, # 'aired': aired, # 'date': date, 'duration': duration, # 'year': year, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url(action='download_video', module_path=params.module_path, module_name=params.module_name, video_id=video_id) + ')') context_menu = [] context_menu.append(download_video) videos.append({ 'label': title, 'thumb': img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r', video_id=video_id, ), 'is_playable': True, 'info': info, 'context_menu': context_menu }) return common.PLUGIN.create_listing( videos, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED), content='tvshows', category=common.get_window_title(params))
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_l': token_json = utils.get_webcontent(URL_TOKEN_LIVE, request_type='post') token_jsonparser = json.loads(token_json) url_stream_json = utils.get_webcontent( URL_LIVE % (params.channel_name, token_jsonparser["vrtPlayerToken"])) url_stream_jsonparser = json.loads(url_stream_json) url_live = '' if "code" in url_stream_jsonparser: if url_stream_jsonparser["code"] == "INVALID_LOCATION": utils.send_notification( common.ADDON.get_localized_string(30713)) return None for url_stream_datas in url_stream_jsonparser["targetUrls"]: if url_stream_datas["type"] == "hls_aes": url_live = url_stream_datas["url"] return url_live elif params.next == 'play_r' or params.next == 'download_video': session_requests = requests.session() module_name = eval(params.module_path)[-1] # Build PAYLOAD payload = { 'loginID': common.PLUGIN.get_setting(module_name + '.login'), 'password': common.PLUGIN.get_setting(module_name + '.password'), 'targetEnv': 'jssdk', 'APIKey': get_api_key(), 'includeSSOToken': 'true', 'authMode': 'cookie' } result = session_requests.post(URL_LOGIN, data=payload) result_jsonpaser = json.loads(result.text) if result_jsonpaser['statusCode'] != 200: utils.send_notification(params.channel_name + ' : ' + common.ADDON.get_localized_string(30711)) return None headers = { 'Content-Type': 'application/json', 'Referer': URL_ROOT + '/vrtnu/' } data = '{"uid": "%s", ' \ '"uidsig": "%s", ' \ '"ts": "%s", ' \ '"email": "%s"}' % ( result_jsonpaser['UID'], result_jsonpaser['UIDSignature'], result_jsonpaser['signatureTimestamp'], common.PLUGIN.get_setting( module_name + '.login')) result_2 = session_requests.post(URL_TOKEN, data=data, headers=headers) build_url = params.video_url[:-1] + '.mssecurevideo.json' result_3 = session_requests.get(build_url) video_id_json = json.loads(result_3.text) video_id = '' for key in video_id_json.iteritems(): video_id = video_id_json[key[0]]['videoid'] result_4 = session_requests.get(URL_STREAM_JSON % video_id) streams_json = json.loads(result_4.text) url = '' for stream in streams_json['targetUrls']: if 'HLS' in stream['type']: url = stream['url'] return url
def list_videos(params): """Build videos listing""" videos = [] if params.next == 'list_videos_1': replays_html = utils.get_webcontent(params.url_channel_replay) replays_soup = bs(replays_html, 'html.parser') if params.module_name == 'cx': list_videos = replays_soup.find('div', class_='listinner').find_all('li') else: list_videos = replays_soup.find_all('li', class_='resumable') for video_data in list_videos: title = video_data.find('h3').get_text() plot = video_data.find('p', class_='summary').get_text() duration = 0 img = re.compile(r'url\((.*?)\);').findall( video_data.find('div', class_='picinner').get('style'))[0] video_url = URL_ROOT + video_data.find('a').get('href') info = { 'video': { 'title': title, 'plot': plot, #'episode': episode_number, #'season': season_number, #'rating': note, #'aired': aired, #'date': date, 'duration': duration, #'year': year, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url(action='download_video', module_path=params.module_path, module_name=params.module_name, video_url=video_url) + ')') context_menu = [] context_menu.append(download_video) videos.append({ 'label': title, 'thumb': img, 'fanart': img, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r', video_url=video_url), 'is_playable': True, 'info': info, 'context_menu': context_menu }) return common.PLUGIN.create_listing( videos, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED), content='tvshows', category=common.get_window_title())
def get_video_url(params): """Get video URL and start video player""" desired_quality = common.PLUGIN.get_setting('quality') if params.next == 'play_r' or params.next == 'download_video': if 'id_yatta' in params: result = utils.get_webcontent(URL_YATTA_VIDEO % params.id_yatta) result = json.loads(result) for media in result['content_has_medias']: if 'si_id' in media['media']: params['id_diffusion'] = media['media']['si_id'] break json_parser = json.loads( utils.get_webcontent(SHOW_INFO % (params.id_diffusion))) subtitles = [] if json_parser['subtitles']: subtitles_list = json_parser['subtitles'] for subtitle in subtitles_list: if subtitle['format'] == 'vtt': subtitles.append(subtitle['url'].encode('utf-8')) url_selected = '' if desired_quality == "DIALOG": all_datas_videos_quality = [] all_datas_videos_path = [] for video in json_parser['videos']: if video['format'] == 'hls_v5_os' or \ video['format'] == 'm3u8-download': if video['format'] == 'hls_v5_os': all_datas_videos_quality.append("HD") else: all_datas_videos_quality.append("SD") all_datas_videos_path.append((video['url'], video['drm'])) seleted_item = common.sp.xbmcgui.Dialog().select( common.GETTEXT('Choose video quality'), all_datas_videos_quality) if seleted_item == -1: return None url_selected = all_datas_videos_path[seleted_item][0] drm = all_datas_videos_path[seleted_item][1] elif desired_quality == "BEST": for video in json_parser['videos']: if video['format'] == 'hls_v5_os': url_selected = video['url'] drm = video['drm'] else: for video in json_parser['videos']: if video['format'] == 'm3u8-download': url_selected = video['url'] drm = video['drm'] if drm: utils.send_notification(common.ADDON.get_localized_string(30702)) return None else: return url_selected elif params.next == 'play_l': json_parser = json.loads(utils.get_webcontent(URL_LIVE_JSON)) for live in json_parser["result"]: if live["channel"] == params.channel_name: live_datas = live["collection"][0]["content_has_medias"] liveId = '' for live_data in live_datas: if "si_direct_id" in live_data["media"]: liveId = live_data["media"]["si_direct_id"] json_parser_liveId = json.loads( utils.get_webcontent(SHOW_INFO % liveId)) url_hls_v1 = '' url_hls_v5 = '' url_hls = '' for video in json_parser_liveId['videos']: if 'format' in video: if 'hls_v1_os' in video['format'] and \ video['geoblocage'] is not None: url_hls_v1 = video['url'] if 'hls_v5_os' in video['format'] and \ video['geoblocage'] is not None: url_hls_v5 = video['url'] if 'hls' in video['format']: url_hls = video['url'] final_url = '' # Case France 3 Région if url_hls_v1 == '' and url_hls_v5 == '': final_url = url_hls # Case Jarvis if common.sp.xbmc.__version__ == '2.24.0' \ and url_hls_v1 != '': final_url = url_hls_v1 # Case Krypton, Leia, ... if final_url == '' and url_hls_v5 != '': final_url = url_hls_v5 elif final_url == '': final_url = url_hls_v1 json_parser2 = json.loads( utils.get_webcontent(HDFAUTH_URL % (final_url))) return json_parser2['url']
def list_shows(params): """Build categories listing""" shows = [] if params.next == 'list_shows_1': emission_name = _('All videos') file_path = utils.get_webcontent(URL_JSON_MTV % URL_VIDEOS) json_mtv = json.loads(file_path) emission_url = json_mtv["manifest"]["zones"]["t4_lc_promo1"]["feed"] shows.append({ 'label': emission_name, 'url': common.PLUGIN.get_url(action='channel_entry', emission_url=emission_url, category_name=emission_name, next='list_videos_1', window_title=emission_name) }) # Get Emission : root_json = utils.get_webcontent(URL_JSON_MTV % URL_EMISSION) json_emission_mtv = json.loads(root_json) emission_json_url = json_emission_mtv["manifest"]["zones"][ "t5_lc_promo1"]["feed"] emission_json = utils.get_webcontent(emission_json_url) emission_json_parser = json.loads(emission_json) for emissions in emission_json_parser["result"]["shows"]: emissions_letter = emissions["key"] shows.append({ 'label': emissions_letter, 'url': common.PLUGIN.get_url(action='channel_entry', emissions_letter=emissions_letter, category_name=emissions_letter, next='list_shows_2', window_title=emissions_letter) }) elif params.next == 'list_shows_2': root_json = utils.get_webcontent(URL_JSON_MTV % URL_EMISSION) json_emission_mtv = json.loads(root_json) emission_json_url = json_emission_mtv["manifest"]["zones"][ "t5_lc_promo1"]["feed"] emission_json = utils.get_webcontent(emission_json_url) emission_json_parser = json.loads(emission_json) for emissions in emission_json_parser["result"]["shows"]: if params.emissions_letter == emissions["key"]: for emission in emissions["value"]: emission_name = emission["title"] file_path_2 = utils.get_webcontent(URL_JSON_MTV % emission["url"]) json_mtv = json.loads(file_path_2) emission_url = json_mtv["manifest"]["zones"][ "t5_lc_promo1"]["feed"] shows.append({ 'label': emission_name, 'url': common.PLUGIN.get_url(action='channel_entry', emission_url=emission_url, category_name=emission_name, next='list_videos_1', window_title=emission_name) }) return common.PLUGIN.create_listing( shows, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED), category=common.get_window_title())
def get_brightcove_policy_key(data_account, data_player): """Get policy key""" file_js = utils.get_webcontent(URL_BRIGHTCOVE_POLICY_KEY % (data_account, data_player)) return re.compile('policyKey:"(.+?)"').findall(file_js)[0]
def list_shows(params): """Build categories listing""" shows = [] if params.next == 'list_shows_1': for category_context, category_title in CATEGORIES_VRT.iteritems(): category_url = URL_ROOT + category_context shows.append({ 'label': category_title, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='list_shows_2', page='0', title=category_title, category_url=category_url, window_title=category_title) }) elif params.next == 'list_shows_2': if 'categorieen' in params.category_url: categories_html = utils.get_webcontent(params.category_url) categories_soup = bs(categories_html, 'html.parser') list_datas = categories_soup.find_all( 'li', class_="vrtlist__item vrtlist__item--grid") value_next = 'list_shows_3' elif 'a-z' in params.category_url: emissions_html = utils.get_webcontent(params.category_url) emissions_soup = bs(emissions_html, 'html.parser') list_datas = emissions_soup.find_all( 'li', class_="vrtlist__item vrtglossary__item") value_next = 'list_videos_1' for data in list_datas: data_url = URL_ROOT + data.find('a').get('href') data_img = 'https:' + data.find('img').get('srcset').split( '1x')[0].strip() if data.find('p'): data_title = data.find( 'h3').get_text().encode('utf-8') + ' - ' + \ data.find('p').get_text().encode('utf-8') else: data_title = data.find('h3').get_text().encode('utf-8') shows.append({ 'label': data_title, 'thumb': data_img, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, data_title=data_title, action='replay_entry', data_url=data_url, next=value_next, window_title=data_title) }) elif params.next == 'list_shows_3': category_name = re.compile('categorieen/(.*?)/').findall( params.data_url)[0] emissions_json = utils.get_webcontent(URL_CATEGORIES_JSON % category_name) emissions_jsonparser = json.loads(emissions_json) for data in emissions_jsonparser: data_url = 'https:' + data['targetUrl'] data_img = 'https:' + data['thumbnail'] data_title = data['title'] shows.append({ 'label': data_title, 'thumb': data_img, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, data_title=data_title, action='replay_entry', data_url=data_url, next='list_videos_1', window_title=data_title) }) return common.PLUGIN.create_listing( shows, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED), category=common.get_window_title(params))
def list_videos(params): """Build videos listing""" videos = [] if params.next == 'list_videos_1': file_path = utils.get_webcontent(params.data_url) episodes_soup = bs(file_path, 'html.parser') if episodes_soup.find('ul', class_='swiper-wrapper'): list_episodes = episodes_soup.find( 'ul', class_='swiper-wrapper').find_all('li') for episode in list_episodes: title = episode.find('h3').get_text().strip() duration = 0 video_url = URL_ROOT + episode.find('a').get('href') img = 'https:' + episode.find('img').get('srcset').split( '1x')[0].strip() info = { 'video': { 'title': title, # 'plot': plot, # 'episode': episode_number, # 'season': season_number, # 'rating': note, # 'aired': aired, # 'date': date, 'duration': duration, # 'year': year, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url(action='download_video', module_path=params.module_path, module_name=params.module_name, video_url=video_url) + ')') context_menu = [] context_menu.append(download_video) videos.append({ 'label': title, 'thumb': img, 'fanart': img, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r', video_url=video_url), 'is_playable': True, 'info': info, 'context_menu': context_menu }) else: if episodes_soup.find('div', class_='content-container'): episode = episodes_soup.find('div', class_='content-container') title = episode.find( 'span', class_='content__title').get_text().strip() plot = episode.find( 'span', class_='content__shortdescription').get_text().strip() duration = 0 video_url = re.compile(r'page_url":"(.*?)"').findall( file_path)[0] img = 'https:' + episode.find('img').get('srcset').strip() info = { 'video': { 'title': title, 'plot': plot, # 'episode': episode_number, # 'season': season_number, # 'rating': note, # 'aired': aired, # 'date': date, 'duration': duration, # 'year': year, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url(action='download_video', module_path=params.module_path, module_name=params.module_name, video_url=video_url) + ')') context_menu = [] context_menu.append(download_video) videos.append({ 'label': title, 'thumb': img, 'fanart': img, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r', video_url=video_url), 'is_playable': True, 'info': info, 'context_menu': context_menu }) return common.PLUGIN.create_listing( videos, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_DATE), content='tvshows', category=common.get_window_title(params))
def list_videos(params): """Build videos listing""" videos = [] if 'previous_listing' in params: videos = ast.literal_eval(params['previous_listing']) if params.next == 'list_videos_1': replay_episodes_json = utils.get_webcontent( URL_VIDEOS % (params.category_id, params.page)) replay_episodes_jsonparser = json.loads(replay_episodes_json) for replay_episodes_datas in replay_episodes_jsonparser["items_html"]: list_episodes_soup = bs(replay_episodes_datas, 'html.parser') list_episodes = list_episodes_soup.find_all('li') for episode in list_episodes: video_title = episode.find('img').get('alt') video_url = URL_ROOT + episode.find('a').get('href') video_img = episode.find('img').get('src') video_duration = 0 info = { 'video': { 'title': video_title, # 'aired': aired, # 'date': date, 'duration': video_duration, # 'plot': video_plot, # 'year': year, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url(action='download_video', module_path=params.module_path, module_name=params.module_name, video_url=video_url) + ')') context_menu = [] context_menu.append(download_video) videos.append({ 'label': video_title, 'thumb': video_img, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='website_entry', next='play_r', video_url=video_url), 'is_playable': True, 'info': info, 'context_menu': context_menu }) # More videos... videos.append({ 'label': '# ' + common.ADDON.get_localized_string(30700), 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='website_entry', next='list_videos_1', category_id=params.category_id, page=str(int(params.page) + 1), update_listing=True, previous_listing=str(videos)) }) return common.PLUGIN.create_listing( videos, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED), content='tvshows', update_listing='update_listing' in params, category=common.get_window_title(params))
def get_api_key(): api_key_html = utils.get_webcontent(URL_ROOT + '/vrtnu/') #return re.compile( # 'apiKey=(.*?)\&').findall(api_key_html)[0] return '3_qhEcPa5JGFROVwu5SWKqJ4mVOIkwlFNMSKwzPDAh8QZOtHqu6L4nD5Q7lk0eXOOG'
def list_videos(params): """Build videos listing""" videos = [] if 'previous_listing' in params: videos = ast.literal_eval(params['previous_listing']) if params.next == 'list_videos_films_series_1': replay_episodes_html = utils.get_webcontent(params.show_url + '?page=%s' % params.page) replay_episodes_soup = bs(replay_episodes_html, 'html.parser') episodes = replay_episodes_soup.find_all( 'article', class_="media-meta sidecontent small") for episode in episodes: video_title = episode.find('h2').find('span').find( 'a').get_text().strip().encode('utf-8') video_id = re.compile('cmedia=(.*?)&').findall( episode.find('a').get('href'))[0] video_img = episode.find('img').get('src').encode('utf-8') video_duration = 0 info = { 'video': { 'title': video_title, # 'aired': aired, # 'date': date, 'duration': video_duration, # 'plot': video_plot, # 'year': year, 'mediatype': 'tvshow' } } download_video = (_('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url(action='download_video', video_id=video_id) + ')') context_menu = [] context_menu.append(download_video) videos.append({ 'label': video_title, 'thumb': video_img, 'url': common.PLUGIN.get_url(action='channel_entry', next='play_r', video_id=video_id), 'is_playable': True, 'info': info, 'context_menu': context_menu }) # More videos... videos.append({ 'label': '# ' + common.ADDON.get_localized_string(30100), 'url': common.PLUGIN.get_url(action='channel_entry', show_url=params.show_url, next='list_videos_films_series_1', page=str(int(params.page) + 1), update_listing=True, previous_listing=str(videos)) }) elif params.next == 'list_videos_emissions_1': replay_episodes_html = utils.get_webcontent(params.show_url + '?page=%s' % params.page) replay_episodes_soup = bs(replay_episodes_html, 'html.parser') if replay_episodes_soup.find( 'section', class_='media-meta-list by2 j_w') is not None: root_episodes_soup = replay_episodes_soup.find( 'section', class_='media-meta-list by2 j_w') episodes = root_episodes_soup.find_all('figure', class_='media-meta-fig') else: episodes = replay_episodes_soup.find_all('figure', class_='media-meta-fig') for episode in episodes: if episode.find('h3') is not None: video_title = episode.find('h3').find('span').find( 'a').get_text().strip() else: video_title = episode.find('h2').find('span').find( 'a').get_text().strip() if '?cmedia=' in episode.find('a').get('href'): video_id = episode.find('a').get('href').split('?cmedia=')[1] elif 'cfilm=' in episode.find('a').get('href') or \ 'cserie=' in episode.find('a').get('href'): video_id = episode.find('h2').find('span').find('a').get( 'href').split('_cmedia=')[1].split('&')[0] else: video_id = episode.find('a').get('href').split('-')[1].replace( '/', '') video_plot = '' for plot_value in episode.find( 'div', class_='media-meta-figcaption-inner').find_all('p'): video_plot = plot_value.get_text().strip() if episode.find('meta') is not None: video_img = episode.find('meta').get('content').encode('utf-8') else: video_img = episode.find('img').get('src').encode('utf-8') video_duration = 0 info = { 'video': { 'title': video_title, # 'aired': aired, # 'date': date, 'duration': video_duration, 'plot': video_plot, # 'year': year, 'mediatype': 'tvshow' } } download_video = (_('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url(action='download_video', video_id=video_id) + ')') context_menu = [] context_menu.append(download_video) videos.append({ 'label': video_title, 'thumb': video_img, 'url': common.PLUGIN.get_url(action='channel_entry', next='play_r', video_id=video_id), 'is_playable': True, 'info': info, 'context_menu': context_menu }) # More videos... videos.append({ 'label': '# ' + common.ADDON.get_localized_string(30100), 'url': common.PLUGIN.get_url( action='channel_entry', show_url=params.show_url, # last_page=params.last_page, next='list_videos_1', page=str(int(params.page) + 1), update_listing=True, previous_listing=str(videos)) }) return common.PLUGIN.create_listing( videos, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_LABEL_IGNORE_THE, common.sp.xbmcplugin.SORT_METHOD_UNSORTED), content='tvshows', update_listing='update_listing' in params, category=common.get_window_title())
def get_token(channel_name): """Get session token""" file_token = utils.get_webcontent(URL_TOKEN % (channel_name)) token_json = json.loads(file_token) return token_json['session']['token'].encode('utf-8')
def get_video_url(params): """Get video URL and start video player""" video_json = utils.get_webcontent(URL_API_MEDIA % (params.video_id, PARTNER)) video_json_parser = json.loads(video_json) desired_quality = common.PLUGIN.get_setting('quality') url = '' if 'rendition' in video_json_parser["media"]: # (Video Hosted By Allocine) if desired_quality == "DIALOG": all_datas_videos_quality = [] all_datas_videos_path = [] for media in video_json_parser["media"]["rendition"]: all_datas_videos_quality.append(media["bandwidth"]["$"]) all_datas_videos_path.append(media["href"]) seleted_item = common.sp.xbmcgui.Dialog().select( _('Choose video quality'), all_datas_videos_quality) if seleted_item == -1: return None url = all_datas_videos_path[seleted_item] elif desired_quality == "BEST": for media in video_json_parser["media"]["rendition"]: url = media["href"] else: for media in video_json_parser["media"]["rendition"][0]: url = media["href"] if requests.get(url, stream=True).status_code == 404: utils.send_notification(common.ADDON.get_localized_string(30111)) return '' return url else: # (Video Not Hosted By Allocine) url_video_embeded = re.compile('src=\'(.*?)\'').findall( video_json_parser["media"]["trailerEmbed"])[0] if 'allocine' in url_video_embeded: url_video_embeded_html = utils.get_webcontent(url_video_embeded) url_video_resolver = re.compile('data-model="(.*?)"').findall( url_video_embeded_html)[0] url_video_resolver = url_video_resolver.replace('"', '"') url_video_resolver = url_video_resolver.replace('\\', '') url_video_resolver = url_video_resolver.replace('&', '&') url_video_resolver = url_video_resolver.replace('%2F', '/') # Case Youtube if 'youtube' in url_video_resolver: video_id = re.compile('www.youtube.com/embed/(.*?)[\?\"\&]' ).findall(url_video_resolver)[0] if params.next == 'download_video': return resolver.get_stream_youtube(video_id, True) else: return resolver.get_stream_youtube(video_id, False) # Case DailyMotion elif 'dailymotion' in url_video_resolver: video_id = re.compile(r'embed/video/(.*?)[\"\?]').findall( url_video_resolver)[0] if params.next == 'download_video': return resolver.get_stream_dailymotion(video_id, True) else: return resolver.get_stream_dailymotion(video_id, False) # Case Facebook elif 'facebook' in url_video_resolver: video_id = re.compile('www.facebook.com/allocine/videos/(.*?)/' ).findall(url_video_resolver)[0] if params.next == 'download_video': return resolver.get_stream_facebook(video_id, True) else: return resolver.get_stream_facebook(video_id, False) # Case Vimeo elif 'vimeo' in url_video_resolver: video_id = re.compile('player.vimeo.com/video/(.*?)"').findall( url_video_resolver)[0] if params.next == 'download_video': return resolver.get_stream_vimeo(video_id, True) else: return resolver.get_stream_vimeo(video_id, False) # TO DO ? (return an error) else: return '' else: # Case Youtube if 'youtube' in url_video_embeded: video_id = re.compile('www.youtube.com/embed/(.*?)[\?\"\&]' ).findall(url_video_embeded)[0] if params.next == 'download_video': return resolver.get_stream_youtube(video_id, True) else: return resolver.get_stream_youtube(video_id, False) # TO DO ? (return an error) else: return ''
def list_videos(params): """Build videos listing""" videos = [] if 'previous_listing' in params: videos = ast.literal_eval(params['previous_listing']) if params.next == 'list_videos_1': file_path = utils.get_webcontent(params.emission_url) json_mtv = json.loads(file_path) if 'data' in json_mtv["result"]: for episode in json_mtv["result"]["data"]["items"]: video_title = episode["title"] video_plot = episode["description"] video_duration = 0 video_url = episode["canonicalURL"] if 'images' in episode: video_img = episode["images"]["url"] else: video_img = '' info = { 'video': { 'title': video_title, # 'aired': aired, # 'date': date, 'duration': video_duration, 'plot': video_plot, # 'year': year, 'mediatype': 'tvshow' } } download_video = ( _('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url( action='download_video', video_url=video_url) + ')') context_menu = [] context_menu.append(download_video) videos.append({ 'label': video_title, 'thumb': video_img, 'fanart': video_img, 'url': common.PLUGIN.get_url(action='channel_entry', next='play_r', video_url=video_url), 'is_playable': True, 'info': info, 'context_menu': context_menu }) # More videos... if 'nextPageURL' in json_mtv["result"]: videos.append({ 'label': '# ' + common.ADDON.get_localized_string(30100), 'url': common.PLUGIN.get_url( action='channel_entry', next='list_videos_1', update_listing=True, emission_url=json_mtv["result"]["nextPageURL"], previous_listing=str(videos)) }) return common.PLUGIN.create_listing( videos, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_DURATION, common.sp.xbmcplugin.SORT_METHOD_LABEL_IGNORE_THE, common.sp.xbmcplugin.SORT_METHOD_GENRE, common.sp.xbmcplugin.SORT_METHOD_UNSORTED), content='tvshows', update_listing='update_listing' in params, category=common.get_window_title())
def list_videos(params): """Build videos listing""" videos = [] videos_html = utils.get_webcontent(URL_VIDEOS % (params.category_id)) videos_html = videos_html.strip() videos_html = videos_html.replace('&', '&') xml_elements = ET.XML(videos_html) for video in xml_elements.find("channel").findall("item"): title = video.find("title").text.encode('utf-8') duration = 0 video_url = video.find("link").text.encode('utf-8') img = video.find("enclosure").get('url').encode('utf-8') plot = video.find("description").text.encode('utf-8') info = { 'video': { 'title': title, # 'aired': aired, # 'date': date, 'plot': plot, 'duration': duration, # 'year': year, 'mediatype': 'tvshow' } } download_video = (common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url(action='download_video', module_path=params.module_path, module_name=params.module_name, video_url=video_url) + ')') context_menu = [] context_menu.append(download_video) videos.append({ 'label': title, 'thumb': img, 'fanart': img, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r', video_url=video_url), 'is_playable': True, 'info': info, 'context_menu': context_menu }) return common.PLUGIN.create_listing( videos, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_PLAYCOUNT, common.sp.xbmcplugin.SORT_METHOD_DATE, common.sp.xbmcplugin.SORT_METHOD_DURATION, common.sp.xbmcplugin.SORT_METHOD_LABEL_IGNORE_THE), content='tvshows', category=common.get_window_title())
def isHD(VideoURL): m3u8 = utils.get_webcontent(VideoURL) if '1280x720' in m3u8: return True else: return False
def list_videos(params): """Build videos listing""" videos = [] if 'previous_listing' in params: videos = ast.literal_eval(params['previous_listing']) if params.next == 'list_videos_1': list_videos = utils.get_webcontent( URL_VIDEOS % (params.channel_name, params.page)) list_videos_soup = bs(list_videos, 'html.parser') videos_data = list_videos_soup.find_all( 'div', class_=re.compile("views-row")) for video in videos_data: title = video.find( 'span', class_='field-content').find( 'a').get_text() plot = video.find( 'div', class_='field-resume').get_text().strip() duration = 0 img = URL_ROOT % params.channel_name + \ video.find('img').get('src') video_url = URL_ROOT % params.channel_name + '/' + \ video.find('a').get('href').encode('utf-8') info = { 'video': { 'title': title, 'plot': plot, # 'aired': aired, # 'date': date, 'duration': duration, # 'year': year, 'mediatype': 'tvshow' } } download_video = ( _('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url( action='download_video', video_url=video_url) + ')' ) context_menu = [] context_menu.append(download_video) videos.append({ 'label': title, 'thumb': img, 'url': common.PLUGIN.get_url( action='channel_entry', next='play_r', video_url=video_url, ), 'is_playable': True, 'info': info, 'context_menu': context_menu }) # More videos... videos.append({ 'label': common.ADDON.get_localized_string(30100), 'url': common.PLUGIN.get_url( action='channel_entry', next='list_videos_1', page=str(int(params.page) + 1), update_listing=True, previous_listing=str(videos) ) }) return common.PLUGIN.create_listing( videos, sort_methods=( common.sp.xbmcplugin.SORT_METHOD_UNSORTED ), content='tvshows', update_listing='update_listing' in params, category=common.get_window_title() )
def list_videos(params): """Build videos listing""" videos = [] if 'previous_listing' in params: videos = ast.literal_eval(params['previous_listing']) if 'search' in params.next: url_search = URL_SEARCH_VIDEOS body = "{\"params\": \"filters=class:video&page=%s&query=%s\"}" % ( params.page, params.query) result = utils.get_webcontent(url_search, request_type='post', specific_headers=HEADERS_YATTA, post_dic=body) json_d = json.loads(result) nb_pages = json_d['nbPages'] for hit in json_d['hits']: label = hit['program']['label'] title = hit['title'] headline = hit['headline_title'] desc = hit['text'] duration = hit['duration'] season = hit['season_number'] episode = hit['episode_number'] id_yatta = hit['id'] director = hit['director'] # producer = hit['producer'] presenter = hit['presenter'] casting = hit['casting'] # characters = hit['characters'] last_publication_date = hit['dates']['last_publication_date'] image_400 = '' image_1024 = '' if 'image' in hit: image_400 = hit['image']['formats']['vignette_16x9']['urls'][ 'w:400'] image_1024 = hit['image']['formats']['vignette_16x9']['urls'][ 'w:1024'] image_400 = URL_API + image_400 image_1024 = URL_API + image_1024 title = label + ' - ' + title if headline and headline != '': desc = headline + '\n' + desc if not director: director = presenter info = { 'video': { 'title': title, 'plot': desc, 'aired': time.strftime('%Y-%m-%d', time.localtime(last_publication_date)), 'date': time.strftime('%d.%m.%Y', time.localtime(last_publication_date)), 'duration': duration, 'year': time.strftime('%Y', time.localtime(last_publication_date)), 'mediatype': 'tvshow', 'season': season, 'episode': episode, 'cast': casting.split(', '), 'director': director } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url(action='download_video', module_path=params.module_path, module_name=params.module_name, id_yatta=id_yatta) + ')') context_menu = [] context_menu.append(download_video) videos.append({ 'label': title, 'fanart': image_1024, 'thumb': image_400, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r', id_yatta=id_yatta), 'is_playable': True, 'info': info, 'context_menu': context_menu, # 'subtitles': 'subtitles' }) if int(params.page) != nb_pages - 1: # More videos... videos.append({ 'label': common.ADDON.get_localized_string(30700), 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', next=params.next, query=params.query, page=str(int(params.page) + 1), window_title=params.window_title, update_listing=True, previous_listing=str(videos)) }) return common.PLUGIN.create_listing( videos, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_DATE, common.sp.xbmcplugin.SORT_METHOD_DURATION, common.sp.xbmcplugin.SORT_METHOD_LABEL_IGNORE_THE, common.sp.xbmcplugin.SORT_METHOD_EPISODE), content='tvshows', update_listing='update_listing' in params, category=common.get_window_title(params)) else: if params.next == 'list_videos_1': json_filepath = utils.download_catalog( URL_VIDEOS % params.program_part_url, '%s_%s.json' % (params.program_part_url, params.page), params={ 'page': params.page, 'filter': 'with-no-vod,only-visible' }) with open(json_filepath) as json_file: json_parser = json.load(json_file) elif params.next == 'list_videos_2': json_filepath = utils.download_catalog( URL_LAST_VIDEOS % params.channel_name, '%s_%s.json' % (params.channel_name, params.page), params={ 'page': params.page, 'filter': 'with-no-vod,only-visible' }) with open(json_filepath) as json_file: json_parser = json.load(json_file) for video_datas in json_parser["result"]: title = video_datas["title"].encode('UTF-8') id_diffusion = '' duration = 0 image = '' for video_media in video_datas["content_has_medias"]: if "main" in video_media["type"]: id_diffusion = video_media["media"]["si_id"] duration = int(video_media["media"]["duration"]) elif "image" in video_media["type"]: for image_datas in video_media["media"]["patterns"]: if "vignette_16x9" in image_datas["type"]: image = URL_API + image_datas["urls"]["w:1024"] date_value = video_datas["creation_date"].split('T')[0].split('-') year = int(date_value[0]) day = date_value[2] month = date_value[1] date = '.'.join((day, month, str(year))) aired = '-'.join((str(year), month, day)) plot = '' if "text" in video_datas: plot = video_datas["text"] info = { 'video': { 'title': title, 'duration': duration, 'plot': plot, 'aired': aired, 'date': date, 'year': year } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url(action='download_video', module_path=params.module_path, module_name=params.module_name, id_diffusion=id_diffusion) + ')') context_menu = [] context_menu.append(download_video) videos.append({ 'label': title, 'fanart': image, 'thumb': image, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r', id_diffusion=id_diffusion), 'is_playable': True, 'info': info, 'context_menu': context_menu }) if json_parser["cursor"]["next"] is not None: # More videos... videos.append({ 'label': common.ADDON.get_localized_string(30700), 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', next=params.next, program_part_url=params.program_part_url, page=str(json_parser["cursor"]["next"]), window_title=params.window_title, update_listing=True, previous_listing=str(videos)) }) return common.PLUGIN.create_listing( videos, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_DATE, common.sp.xbmcplugin.SORT_METHOD_DURATION, common.sp.xbmcplugin.SORT_METHOD_LABEL_IGNORE_THE, common.sp.xbmcplugin.SORT_METHOD_EPISODE), content='tvshows', update_listing='update_listing' in params, category=common.get_window_title(params))
def list_videos(params): """Build videos listing""" videos = [] if 'previous_listing' in params: videos = ast.literal_eval(params['previous_listing']) if params.next == 'list_videos_1': replay_episodes_html = utils.get_webcontent( URL_ROOT + '/?page=%s' % params.page) replay_episodes_soup = bs(replay_episodes_html, 'html.parser') # Get Video First Page if replay_episodes_soup.find('iframe'): url_first_video = replay_episodes_soup.find( 'iframe').get('src') info_first_video = utils.get_webcontent(url_first_video) info_first_video_json = re.compile( 'config = (.*?)};').findall(info_first_video)[0] print 'info_first_video_json : ' + info_first_video_json + '}' info_first_video_jsonparser = json.loads( info_first_video_json + '}') video_title = info_first_video_jsonparser["metadata"]["title"] video_url = info_first_video_jsonparser["metadata"]["url"] + '?' video_img = info_first_video_jsonparser["metadata"]["poster_url"] video_duration = 0 info = { 'video': { 'title': video_title, # 'aired': aired, # 'date': date, 'duration': video_duration, # 'plot': video_plot, # 'year': year, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url( action='download_video', module_path=params.module_path, module_name=params.module_name, video_url=video_url) + ')' ) context_menu = [] context_menu.append(download_video) videos.append({ 'label': video_title, 'thumb': video_img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='website_entry', next='play_r', video_url=video_url ), 'is_playable': True, 'info': info, 'context_menu': context_menu }) episodes = replay_episodes_soup.find_all( 'div', class_='col-xs-6 col-sm-12') for episode in episodes: video_title = episode.find('img').get('alt') video_url = URL_ROOT + episode.find('a').get('href') video_img = episode.find( 'img').get('src').replace('|','%7C') video_duration = 0 info = { 'video': { 'title': video_title, # 'aired': aired, # 'date': date, 'duration': video_duration, # 'plot': video_plot, # 'year': year, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url( action='download_video', module_path=params.module_path, module_name=params.module_name, video_url=video_url) + ')' ) context_menu = [] context_menu.append(download_video) videos.append({ 'label': video_title, 'thumb': video_img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='website_entry', next='play_r', video_url=video_url ), 'is_playable': True, 'info': info, 'context_menu': context_menu }) # More videos... videos.append({ 'label': '# ' + common.ADDON.get_localized_string(30700), 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='website_entry', next='list_videos_1', page=str(int(params.page) + 1), update_listing=True, previous_listing=str(videos) ) }) return common.PLUGIN.create_listing( videos, sort_methods=( common.sp.xbmcplugin.SORT_METHOD_UNSORTED ), content='tvshows', update_listing='update_listing' in params, category=common.get_window_title(params) )
def list_videos(params): """Build videos listing""" videos = [] if params.next == 'list_videos_1': replay_episodes_html = utils.get_webcontent( params.category_url) replay_episodes_soup = bs(replay_episodes_html, 'html.parser') episodes = replay_episodes_soup.find_all( 'div', class_='showcategory') for episode in episodes: video_title = episode.find( 'h5').find('a').get_text().strip() video_url = URL_ROOT + '/' + episode.find('a').get('href') video_img = URL_ROOT + '/' + episode.find('img').get('src') video_duration = 0 video_plot = episode.find( 'p', class_='mod-articles-category-introtext' ).get_text().strip().encode('utf-8') info = { 'video': { 'title': video_title, # 'aired': aired, # 'date': date, 'duration': video_duration, 'plot': video_plot, # 'year': year, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url( action='download_video', module_path=params.module_path, module_name=params.module_name, video_url=video_url) + ')' ) context_menu = [] context_menu.append(download_video) videos.append({ 'label': video_title, 'thumb': video_img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='website_entry', next='play_r', video_url=video_url ), 'is_playable': True, 'info': info, 'context_menu': context_menu }) return common.PLUGIN.create_listing( videos, sort_methods=( common.sp.xbmcplugin.SORT_METHOD_LABEL_IGNORE_THE, common.sp.xbmcplugin.SORT_METHOD_UNSORTED ), content='tvshows', update_listing='update_listing' in params, category=common.get_window_title() )
def list_videos(params): """Build videos listing""" videos = [] if 'previous_listing' in params: videos = ast.literal_eval(params['previous_listing']) if params.channel_name == 'cnews': url_page = params.category_url + '/page/%s' % params.page file_path = utils.download_catalog( url_page, '%s_%s_%s.html' % (params.channel_name, params.category_name, params.page)) root_html = open(file_path).read() root_soup = bs(root_html, 'html.parser') programs = root_soup.find_all('article', class_='item') for program in programs: title = program.find('h3').get_text().encode('utf-8') thumb = program.find('img').get('src').encode('utf-8') # Get Video_ID video_html = utils.get_webcontent( program.find('a').get('href').encode('utf-8')) id = re.compile(r'videoId=(.*?)"').findall(video_html)[0] # Get Description datas_video = bs(video_html, 'html.parser') description = datas_video.find( 'article', class_='entry-body').get_text().encode('utf-8') duration = 0 date = re.compile(r'property="video:release_date" content="(.*?)"' ).findall(video_html)[0].split('T')[0].split('-') day = date[2] mounth = date[1] year = date[0] date = '.'.join((day, mounth, year)) aired = '-'.join((year, mounth, day)) info = { 'video': { 'title': title, 'plot': description, 'aired': aired, 'date': date, 'duration': duration, 'year': year, # 'genre': category, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url(action='download_video', module_path=params.module_path, module_name=params.module_name, id=id) + ')') context_menu = [] context_menu.append(download_video) videos.append({ 'label': title, 'thumb': thumb, 'fanart': thumb, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r', id=id), 'is_playable': True, 'info': info, 'context_menu': context_menu }) # More videos... videos.append({ 'label': common.ADDON.get_localized_string(30700), 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', category_url=params.category_url, category_name=params.category_name, next='list_videos', page=str(int(params.page) + 1), update_listing=True, previous_listing=str(videos)) }) return common.PLUGIN.create_listing( videos, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_DATE, common.sp.xbmcplugin.SORT_METHOD_DURATION, common.sp.xbmcplugin.SORT_METHOD_LABEL_IGNORE_THE, common.sp.xbmcplugin.SORT_METHOD_GENRE, common.sp.xbmcplugin.SORT_METHOD_PLAYCOUNT, common.sp.xbmcplugin.SORT_METHOD_UNSORTED), content='tvshows', update_listing='update_listing' in params, category=common.get_window_title(params))
def list_videos(params): """Build videos listing""" videos = [] if 'previous_listing' in params: videos = ast.literal_eval(params['previous_listing']) if params.channel_name == 'cnews': root_html = utils.get_webcontent(params.category_url % params.page) root_html = root_html.replace('\n\r', '').replace('\\"', '"').replace('\\/', '/') root_soup = bs(root_html, 'html.parser') programs = root_soup.find_all('a', class_='video-item-wrapper') programs += root_soup.find_all('a', class_='emission-item-wrapper') for program in programs: title = program.find('img').get('alt').encode('utf-8') thumb = program.find('img').get('data-src').encode('utf-8') video_url = URL_ROOT_SITE + program.get('href') duration = 0 info = { 'video': { 'title': title, # 'plot': description, # 'aired': aired, # 'date': date, 'duration': duration, # 'year': year, # 'genre': category, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url(action='download_video', module_path=params.module_path, module_name=params.module_name, video_url=video_url) + ')') context_menu = [] context_menu.append(download_video) videos.append({ 'label': title, 'thumb': thumb, 'fanart': thumb, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r', video_url=video_url), 'is_playable': True, 'info': info, 'context_menu': context_menu }) # More videos... videos.append({ 'label': common.ADDON.get_localized_string(30700), 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', category_url=params.category_url, category_name=params.category_name, next='list_videos', page=str(int(params.page) + 1), update_listing=True, previous_listing=str(videos)) }) return common.PLUGIN.create_listing( videos, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED), content='tvshows', update_listing='update_listing' in params, category=common.get_window_title(params))
def list_shows(params): """Build categories listing""" shows = [] if 'previous_listing' in params: shows = ast.literal_eval(params['previous_listing']) if params.next == 'list_shows_emissions_1': # Build Categories Emissions replay_categories_programs_html = utils.get_webcontent( params.category_url) replay_categories_programs_soup = bs(replay_categories_programs_html, 'html.parser') root_categories_programs = replay_categories_programs_soup.find( 'li', class_='item_4 is_active ') replay_categories_programs = root_categories_programs.find_all('a') for category_programs in replay_categories_programs: categorie_programs_title = category_programs.get_text() categorie_programs_title = categorie_programs_title.strip() categorie_programs_title = categorie_programs_title.encode('utf-8') categorie_programs_url = URL_ROOT + category_programs.get('href') shows.append({ 'label': categorie_programs_title, 'url': common.PLUGIN.get_url( action='channel_entry', next='list_shows_emissions_2', title=categorie_programs_title, categorie_programs_url=categorie_programs_url, window_title=categorie_programs_title) }) elif params.next == 'list_shows_emissions_2': # Build sub categories if exists / add 'Les Programmes', 'Les Vidéos' replay_subcategories_programs_html = utils.get_webcontent( params.categorie_programs_url) replay_subcategories_programs_soup = bs( replay_subcategories_programs_html, 'html.parser') # Les vidéos show_title = '# Les videos' next_value = 'list_videos_emissions_1' show_url = params.categorie_programs_url shows.append({ 'label': show_title, 'url': common.PLUGIN.get_url(action='channel_entry', next=next_value, title=show_title, page='1', show_url=show_url, window_title=show_title) }) # Les programmes programs_title = '# Les programmes' next_value = 'list_shows_emissions_4' programs_url = params.categorie_programs_url.replace( '/cat-', '/prgcat-') shows.append({ 'label': programs_title, 'url': common.PLUGIN.get_url(action='channel_entry', next=next_value, title=programs_title, page='1', programs_url=programs_url, window_title=programs_title) }) # Subcategories subcategories = replay_subcategories_programs_soup.find( 'div', class_='nav-button-filter').find_all('a') for subcategory in subcategories: subcategorie_programs_title = subcategory.find( 'span', class_='label').get_text().encode('utf-8') subcategorie_programs_url = URL_ROOT + subcategory.get('href') shows.append({ 'label': subcategorie_programs_title, 'url': common.PLUGIN.get_url( action='channel_entry', next='list_shows_emissions_3', title=subcategorie_programs_title, subcategorie_programs_url=subcategorie_programs_url, window_title=subcategorie_programs_title) }) elif params.next == 'list_shows_emissions_3': # Les vidéos show_title = '# Les videos' next_value = 'list_videos_emissions_1' show_url = params.subcategorie_programs_url shows.append({ 'label': show_title, 'url': common.PLUGIN.get_url(action='channel_entry', next=next_value, title=show_title, page='1', show_url=show_url, window_title=show_title) }) # Les programmes programs_title = '# Les programmes' next_value = 'list_shows_emissions_4' programs_url = params.subcategorie_programs_url.replace( '/cat-', '/prgcat-') shows.append({ 'label': programs_title, 'url': common.PLUGIN.get_url(action='channel_entry', next=next_value, title=programs_title, page='1', programs_url=programs_url, window_title=programs_title) }) elif params.next == 'list_shows_emissions_4': replay_programs_html = utils.get_webcontent(params.programs_url + '?page=%s' % params.page) replay_programs_soup = bs(replay_programs_html, 'html.parser') replay_programs = replay_programs_soup.find_all( 'figure', class_='media-meta-fig') for program in replay_programs: program_title = program.find('h2', class_='title ').find( 'span').find('a').get_text().strip().encode('utf-8') program_img = program.find('img').get('src') program_url = URL_ROOT + program.find('h2', class_='title ').find( 'span').find('a').get('href').encode('utf-8') shows.append({ 'label': program_title, 'thumb': program_img, 'url': common.PLUGIN.get_url(action='channel_entry', next='list_shows_emissions_5', program_title=program_title, program_url=program_url, window_title=program_title) }) if replay_programs_soup.find('div', class_='pager pager margin_40t') \ is not None: # More programs... shows.append({ 'label': '# ' + common.ADDON.get_localized_string(30108), 'url': common.PLUGIN.get_url(action='channel_entry', next='list_shows_emissions_4', programs_url=params.programs_url, page=str(int(params.page) + 1), update_listing=True, previous_listing=str(shows)) }) elif params.next == 'list_shows_emissions_5': replay_seasons_html = utils.get_webcontent(params.program_url + 'saisons/') replay_seasons_soup = bs(replay_seasons_html, 'html.parser') replay_seasons = replay_seasons_soup.find_all( 'h2', class_='fs18 d_inline_block margin_10r') if len(replay_seasons) > 0: for season in replay_seasons: season_title = season.find('a').find('span').get_text().strip() show_season_url = URL_ROOT + season.find( 'a', class_='no_underline').get('href').encode('utf-8') # Get Last Page last_page = '0' info_show_season = utils.get_webcontent(show_season_url) info_show_season_pages = re.compile('<a href="(.*?)"').findall( info_show_season) for info_show_season_page in info_show_season_pages: if '?page=' in info_show_season_page: last_page = info_show_season_page.split('=')[1] shows.append({ 'label': season_title, 'url': common.PLUGIN.get_url(action='channel_entry', next='list_videos_emissions_1', title=season_title, page='1', last_page=last_page, show_url=show_season_url, window_title=season_title) }) else: season_title = replay_seasons_soup.find( 'div', class_='margin_20t margin_40b').find( 'a').get_text().strip().encode('utf-8') show_season_url = URL_ROOT + replay_seasons_soup.find( 'div', class_='margin_20t margin_40b').find('a').get( 'href').encode('utf-8') # Get Last Page last_page = '0' info_show_season = utils.get_webcontent(show_season_url) info_show_season_pages = re.compile('<a href="(.*?)"').findall( info_show_season) for info_show_season_page in info_show_season_pages: if '?page=' in info_show_season_page: last_page = info_show_season_page.split('=')[1] shows.append({ 'label': season_title, 'url': common.PLUGIN.get_url(action='channel_entry', next='list_videos_emissions_1', title=season_title, page='1', last_page=last_page, show_url=show_season_url, window_title=season_title) }) elif params.next == 'list_shows_films_series_1': # Build All Types replay_types_films_series_html = utils.get_webcontent( params.category_url) replay_types_films_series_soup = bs(replay_types_films_series_html, 'html.parser') replay_types_films_series = replay_types_films_series_soup.find_all( 'div', class_='left_col_menu_item')[0] show_title = '# Toutes les videos' next_value = 'list_videos_films_series_1' show_url = params.category_url shows.append({ 'label': show_title, 'url': common.PLUGIN.get_url(action='channel_entry', next=next_value, title=show_title, page='1', show_url=show_url, window_title=show_title) }) for all_types in replay_types_films_series.find_all('a'): show_title = all_types.get_text() next_value = 'list_shows_films_series_2' show_url = URL_ROOT + all_types.get('href') shows.append({ 'label': show_title, 'url': common.PLUGIN.get_url(action='channel_entry', next=next_value, title=show_title, show_url=show_url, window_title=show_title) }) elif params.next == 'list_shows_films_series_2': # Build All Languages show_title = '# Toutes les videos' next_value = 'list_videos_films_series_1' show_url = params.show_url shows.append({ 'label': show_title, 'url': common.PLUGIN.get_url(action='channel_entry', next=next_value, title=show_title, show_url=show_url, page='1', window_title=show_title) }) for language, language_url in CATEGORIES_LANGUAGE.iteritems(): show_title = language next_value = 'list_videos_films_series_1' show_url = params.show_url + language_url shows.append({ 'label': show_title, 'url': common.PLUGIN.get_url(action='channel_entry', next=next_value, title=show_title, show_url=show_url, page='1', window_title=show_title) }) return common.PLUGIN.create_listing( shows, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_LABEL, common.sp.xbmcplugin.SORT_METHOD_UNSORTED), update_listing='update_listing' in params, category=common.get_window_title())
def get_stream_dailymotion(video_id, isDownloadVideo): # Sous Jarvis nous avons ces éléments qui ne fonctionnent pas : # * KO for playing m3u8 but MP4 work # * Les vidéos au format dailymotion proposé par Allociné # * Les directs TV de PublicSenat, LCP, L"Equipe TV et Numero 23 herbergés par dailymotion. url_dmotion = URL_DAILYMOTION_EMBED % (video_id) if isDownloadVideo == True: return url_dmotion html_video = utils.get_webcontent(url_dmotion) html_video = html_video.replace('\\', '') # Case Jarvis if common.sp.xbmc.__version__ == '2.24.0': all_url_video = re.compile( r'{"type":"video/mp4","url":"(.*?)"').findall(html_video) if len(all_url_video) > 0: if DESIRED_QUALITY == "DIALOG": all_datas_videos_quality = [] all_datas_videos_path = [] for datas in all_url_video: datas_quality = re.search('H264-(.+?)/', datas).group(1) all_datas_videos_quality.append('H264-' + datas_quality) all_datas_videos_path.append(datas) seleted_item = common.sp.xbmcgui.Dialog().select( common.GETTEXT('Choose video quality'), all_datas_videos_quality) return all_datas_videos_path[seleted_item].encode('utf-8') elif DESIRED_QUALITY == 'BEST': # Last video in the Best for datas in all_url_video: url = datas return url else: return all_url_video[0] # In case some M3U8 work in Jarvis else: url_video_auto = re.compile( r'{"type":"application/x-mpegURL","url":"(.*?)"').findall( html_video)[0] return url_video_auto # Case Krypton and newer version else: url_video_auto = re.compile( r'{"type":"application/x-mpegURL","url":"(.*?)"').findall( html_video)[0] m3u8_video_auto = utils.get_webcontent(url_video_auto) # Case no absolute path in the m3u8 # (TO DO how to build the absolute path ?) add quality after if 'http' not in m3u8_video_auto: return url_video_auto # Case absolute path in the m3u8 else: url = '' lines = m3u8_video_auto.splitlines() if DESIRED_QUALITY == "DIALOG": all_datas_videos_quality = [] all_datas_videos_path = [] for k in range(0, len(lines) - 1): if 'RESOLUTION=' in lines[k]: all_datas_videos_quality.append( re.compile(r'RESOLUTION=(.*?),').findall( lines[k])[0]) all_datas_videos_path.append(lines[k + 1]) seleted_item = common.sp.xbmcgui.Dialog().select( common.GETTEXT('Choose video quality'), all_datas_videos_quality) return all_datas_videos_path[seleted_item].encode('utf-8') elif DESIRED_QUALITY == 'BEST': # Last video in the Best for k in range(0, len(lines) - 1): if 'RESOLUTION=' in lines[k]: url = lines[k + 1] return url else: for k in range(0, len(lines) - 1): if 'RESOLUTION=' in lines[k]: url = lines[k + 1] break return url
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_r' or params.next == 'download_video': video_html = utils.get_webcontent(params.video_url) video_id = re.compile( r'www.wat.tv/embedframe/(.*?)[\"\?]').findall( video_html)[0] url_wat_embed = URL_WAT_BY_ID % video_id wat_embed_html = utils.get_webcontent(url_wat_embed) stream_id = re.compile('UVID=(.*?)&').findall(wat_embed_html)[0] url_json = URL_VIDEO_STREAM % stream_id htlm_json = utils.get_webcontent(url_json, random_ua=True) json_parser = json.loads(htlm_json) # Check DRM in the m3u8 file manifest = utils.get_webcontent( json_parser["hls"], random_ua=True) if 'drm' in manifest: utils.send_notification(common.ADDON.get_localized_string(30102)) return '' root = os.path.dirname(json_parser["hls"]) manifest = utils.get_webcontent( json_parser["hls"].split('&max_bitrate=')[0]) lines = manifest.splitlines() if DESIRED_QUALITY == "DIALOG": all_datas_videos_quality = [] all_datas_videos_path = [] for k in range(0, len(lines) - 1): if 'RESOLUTION=' in lines[k]: if len(re.compile( r'RESOLUTION=(.*?),').findall( lines[k])) > 0: all_datas_videos_quality.append( re.compile( r'RESOLUTION=(.*?),').findall( lines[k])[0]) else: all_datas_videos_quality.append( lines[k].split('RESOLUTION=')[1]) all_datas_videos_path.append( root + '/' + lines[k + 1]) seleted_item = common.sp.xbmcgui.Dialog().select( _('Choose video quality'), all_datas_videos_quality) return all_datas_videos_path[seleted_item].encode( 'utf-8') elif DESIRED_QUALITY == 'BEST': # Last video in the Best for k in range(0, len(lines) - 1): if 'RESOLUTION=' in lines[k]: url = root + '/' + lines[k + 1] return url else: for k in range(0, len(lines) - 1): if 'RESOLUTION=' in lines[k]: url = root + '/' + lines[k + 1] break return url