def get_partener_key(params): """Get Partener Key""" file_path_root_live = utils.download_catalog( URL_ROOT_LIVE, '%s_root_live.html' % params.channel_name, ) html_root_live = open(file_path_root_live).read() list_js_files = re.compile( r'<script type="text\/javascript" src="(.*?)">' ).findall(html_root_live) partener_key_value = '' i = 0 for js_file in list_js_files: # Get partener key file_path_js = utils.download_catalog( js_file, '%s_partener_key_%s.js' % (params.channel_name, str(i)), ) partener_key_js = open(file_path_js).read() partener_key = re.compile( 'partner_key: \'(.+?)\'').findall(partener_key_js) if len(partener_key) > 0: partener_key_value = partener_key[0] i = i + 1 return partener_key_value
def start_live_tv_stream(params): url_live = '' file_path = utils.download_catalog(URL_LIVE_TV, params.channel_name + '_live.html') root_live_html = open(file_path).read() root_live_soup = bs(root_live_html, 'html.parser') live_soup = root_live_soup.find('div', class_='wrapperVideo') url_live_embeded = '' for live in live_soup.find_all('iframe'): url_live_embeded = live.get('src').encode('utf-8') file_path_2 = utils.download_catalog( url_live_embeded, params.channel_name + '_live_embeded.html') root_live_embeded_html = open(file_path_2).read() all_url_video = re.compile(r'file: \'(.*?)\'').findall( root_live_embeded_html) for url_video in all_url_video: if url_video.count('m3u8') > 0: url_live = url_video params['next'] = 'play_l' params['url_live'] = url_live return get_video_url(params)
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_r' or params.next == 'download_video': url = '' file_path = utils.download_catalog( URL_VIDEO_VOD % (get_pcode(params), params.video_id), '%s_%s_video_vod.json' % (params.channel_name, params.video_id)) video_vod = open(file_path).read() json_parser = json.loads(video_vod) # Get Value url encodebase64 for stream in json_parser["authorization_data"][ params.video_id]["streams"]: url_base64 = stream["url"]["data"] url = base64.standard_b64decode(url_base64) return url elif params.next == 'play_l': # Get URL Live file_path = utils.download_catalog( URL_LIVE_NHK % params.channel_name, '%s_live.xml' % params.channel_name, ) live_xml = open(file_path).read() xmlElements = ET.XML(live_xml) url_live = '' desired_country = common.PLUGIN.get_setting(params.channel_name + '.country') if desired_country == 'Outside Japan': url_live = xmlElements.find("tv_url").findtext("wstrm").encode( 'utf-8') else: url_live = xmlElements.find("tv_url").findtext("jstrm").encode( 'utf-8') return url_live
def get_partener_key(params): """Get Partener Key""" file_path_root_live = utils.download_catalog( URL_ROOT_LIVE, '%s_root_live.html' % params.channel_name, ) html_root_live = open(file_path_root_live).read() list_js_files = re.compile(r'<script type="text\/javascript" src="(.*?)">' ).findall(html_root_live) partener_key_value = '' i = 0 for js_file in list_js_files: # Get partener key file_path_js = utils.download_catalog( js_file, '%s_partener_key_%s.js' % (params.channel_name, str(i)), ) partener_key_js = open(file_path_js).read() partener_key = re.compile('partner_key: \'(.+?)\'').findall( partener_key_js) if len(partener_key) > 0: partener_key_value = partener_key[0] i = i + 1 return partener_key_value
def list_live(params): """Build live listing""" lives = [] title = '' plot = '' duration = 0 img = '' url_live = '' file_path = utils.download_catalog(URL_LIVE_TV, params.channel_name + '_live.html') root_live_html = open(file_path).read() root_live_soup = bs(root_live_html, 'html.parser') live_soup = root_live_soup.find('div', class_='wrapperVideo') url_live_embeded = '' for live in live_soup.find_all('iframe'): url_live_embeded = live.get('src').encode('utf-8') file_path_2 = utils.download_catalog( url_live_embeded, params.channel_name + '_live_embeded.html') root_live_embeded_html = open(file_path_2).read() all_url_video = re.compile(r'file: \'(.*?)\'').findall( root_live_embeded_html) for url_video in all_url_video: if url_video.count('m3u8') > 0: url_live = url_video title = '%s Live' % params.channel_name.upper() info = {'video': {'title': title, 'plot': plot, 'duration': duration}} lives.append({ 'label': title, 'fanart': img, 'thumb': img, 'url': common.PLUGIN.get_url( action='channel_entry', next='play_l', url_live=url_live, ), 'is_playable': True, 'info': info }) return common.PLUGIN.create_listing( lives, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL), category=common.get_window_title())
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_l': desired_language = common.PLUGIN.get_setting(params.channel_name + '.language') if desired_language == 'EN': url_live_json = URL_LIVE_API % 'www' elif desired_language == 'AR': url_live_json = URL_LIVE_API % 'arabic' else: url_live_json = URL_LIVE_API % desired_language.lower() file_path = utils.download_catalog( url_live_json, '%s_%s_live.json' % (params.channel_name, desired_language.lower())) json_live = open(file_path).read() json_parser = json.loads(json_live) url_2nd_json = json_parser["url"] file_path_2 = utils.download_catalog( url_2nd_json, '%s_%s_live_2.json' % (params.channel_name, desired_language.lower())) json_live_2 = open(file_path_2).read() json_parser_2 = json.loads(json_live_2) return json_parser_2["primary"]
def get_live_item(params): plot = '' duration = 0 img = '' url_live = '' file_path = utils.download_catalog(URL_LIVE_SITE, '%s_live.html' % (params.channel_name)) live_html = open(file_path).read() id_value = re.compile(r'<script id="(.*?)"').findall(live_html)[0].split( '_') # json with hls file_path_json = utils.download_catalog( JSON_LIVE % (id_value[0], id_value[1], id_value[2]), '%s_live.json' % (params.channel_name)) live_json = open(file_path_json).read() live_jsonparser = json.loads(live_json) # json with token file_path_json_token = utils.download_catalog( JSON_LIVE_TOKEN % (id_value[0], id_value[1], id_value[2]), '%s_live_token.json' % (params.channel_name)) live_json_token = open(file_path_json_token).read() live_jsonparser_token = json.loads(live_json_token) url_live = 'http:' + live_jsonparser["hls"].encode('utf-8') + \ live_jsonparser_token["token"].encode('utf-8') info = { 'video': { 'title': params.channel_label, 'plot': plot, 'duration': duration } } return { 'label': params.channel_label, 'fanart': img, 'thumb': img, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='start_live_tv_stream', next='play_l', url_live=url_live), 'is_playable': True, 'info': info }
def list_shows(channel, param): shows = [] if param == 'none': filePath = utils.download_catalog(url_root + '/themes', channel + '.html') root_html = open(filePath).read() root_soup = bs(root_html, "html.parser") themes_soup = root_soup.find('div', attrs={'class': 'fright'}) themes_soup = themes_soup.find('ul', attrs={'id': 'themes'}) for theme in themes_soup.findAll('li'): name_theme = theme.find('a')['title'].encode('utf-8') url_theme = theme.find('a')['href'].encode('utf-8') img_theme = theme.find('img')['src'].encode('utf-8') depth = url_theme.count('/') shows.append([ channel, url_theme + '|' + str(depth), name_theme, url_root + img_theme, 'folder' ]) else: current_url = param.split('|')[0] current_url_depth = int(param.split('|')[1]) file_path = utils.download_catalog(current_url, current_url + '.html') theme_html = open(file_path).read() theme_soup = bs(theme_html, "html.parser") categories = theme_soup.find('ul', attrs={'id': 'racine'}) categories = categories.find_all('a') for category in categories: url = category['href'].encode('utf-8') url_depth = url.count('/') if url_depth == current_url_depth + 1: if current_url in url: title = category.find('span').get_text().encode('utf-8') next_type = 'folder' if category.find('span')['class'] == 'file': next_type = 'shows' shows.append([ channel, url + '|' + str(url_depth), title, '', next_type ]) shows.append([ channel, current_url + '|none', 'Dernières vidéos de cette cétégorie', '', 'shows' ]) return shows
def get_live_item(params): plot = '' duration = 0 img = '' url_live = '' file_path = utils.download_catalog( URL_LIVE_SITE, '%s_live.html' % (params.channel_name)) live_html = open(file_path).read() id_value = re.compile( r'<script id="(.*?)"').findall(live_html)[0].split('_') # json with hls file_path_json = utils.download_catalog( JSON_LIVE % (id_value[0], id_value[1], id_value[2]), '%s_live.json' % (params.channel_name)) live_json = open(file_path_json).read() live_jsonparser = json.loads(live_json) # json with token file_path_json_token = utils.download_catalog( JSON_LIVE_TOKEN % (id_value[0], id_value[1], id_value[2]), '%s_live_token.json' % (params.channel_name)) live_json_token = open(file_path_json_token).read() live_jsonparser_token = json.loads(live_json_token) url_live = 'http:' + live_jsonparser["hls"].encode('utf-8') + \ live_jsonparser_token["token"].encode('utf-8') info = { 'video': { 'title': params.channel_label, 'plot': plot, 'duration': duration } } return { 'label': params.channel_label, 'fanart': img, 'thumb': img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='start_live_tv_stream', next='play_l', url_live=url_live ), 'is_playable': True, 'info': info }
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_l': desired_language = common.PLUGIN.get_setting( params.channel_name + '.language') url_live = URL_LIVE_SITE % desired_language.lower() file_path = utils.download_catalog( url_live, '%s_%s_live.html' % (params.channel_name, desired_language.lower()) ) html_live = open(file_path).read() root_soup = bs(html_live, 'html.parser') url_stream = '' json_parser = json.loads( root_soup.select_one("script[type=application/json]").text) media_datas_list = json_parser['medias']['media'] media_datas_list = media_datas_list['media_sources']['media_source'] for datas in media_datas_list: if datas['source']: url_stream = datas['source'] return url_stream elif params.next == 'play_r' or params.next == 'download_video': return params.url elif params.next == 'play_r_youtube': return resolver.get_stream_youtube(params.video_id, False)
def list_shows(channel, param): shows = [] if param == 'none': for url, title in categories.iteritems(): url = url_root + url shows.append([channel, url + '|' + title, title, '', 'folder']) else: url = param.split('|')[0] cat = param.split('|')[1] file_path = utils.download_catalog(url, cat + '.html', random_ua=True) html = open(file_path).read() page_soup = bs(html, "html.parser") articles = page_soup.find('section', class_='js-item-container') articles = articles.find_all('article') for article in articles: title_url = article.find('h3').find('a') title = title_url['title'].encode('utf-8') url_pgm = title_url['href'].encode('utf-8') imgs = article.find('img')['data-srcset'] imgs = re.compile(r'http://(.*?).jpg', re.DOTALL).findall(imgs) if len(imgs) == 0: img = '' else: img = imgs[len(imgs) - 1] img = 'http://' + img + '.jpg' shows.append([channel, url_pgm, title, img, 'shows']) return shows
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_weather_r': return params.video_url elif params.next == 'play_news_r': url = '' file_path = utils.download_catalog( URL_STREAM_NEWS % (params.video_date, params.video_id), '%s_%s.json' % (params.channel_name, params.video_id)) video_vod = open(file_path).read() json_parser = json.loads(video_vod) return json_parser["mediaResource"]["url"] elif params.next == 'play_lifestyle_r': video_id_html = utils.get_webcontent(params.video_url) if re.compile('player.php\?v=(.*?)&').findall(video_id_html): video_id = re.compile('player.php\?v=(.*?)&').findall( video_id_html)[0] else: video_id = re.compile('movie-s.nhk.or.jp/v/(.*?)\?').findall( video_id_html)[0] api_key_html = utils.get_webcontent(URL_API_KEY_LIFE_STYLE % video_id) api_key = re.compile('data-de-api-key="(.*?)"').findall( api_key_html)[0] url_stream = URL_STREAM_NHK_LIFE_STYLE % (api_key, video_id) url_stream_json = utils.get_webcontent(url_stream) json_parser_stream = json.loads(url_stream_json) return json_parser_stream["response"]["WsProgramResponse"]["program"][ "asset"]["ipadM3u8Url"]
def get_brightcove_video_json(data_account, data_player, data_video_id): # Method to get JSON from 'edge.api.brightcove.com' file_json = utils.download_catalog( URL_BRIGHTCOVE_VIDEO_JSON % (data_account, data_video_id), '%s_%s_replay.json' % (data_account, data_video_id), force_dl=False, request_type='get', post_dic={}, random_ua=False, specific_headers={ 'Accept': 'application/json;pk=%s' % (get_brightcove_policy_key(data_account, data_player)) }, params={}) video_json = open(file_json).read() json_parser = json.loads(video_json) video_url = '' if 'sources' in json_parser: for url in json_parser["sources"]: if 'src' in url: if 'm3u8' in url["src"]: video_url = url["src"] else: if json_parser[0]['error_code'] == "ACCESS_DENIED": utils.send_notification(common.ADDON.get_localized_string(30713)) return None return video_url
def list_videos(channel, param): videos = [] url = param.split('|')[0] # print 'URL : ' + url file_path = utils.download_catalog(url, url + '.html') theme_html = open(file_path).read() theme_soup = bs(theme_html, "html.parser") videos_soup = theme_soup.find_all('li', class_='fleft lasts-online-even') videos2_soup = theme_soup.find_all('li', class_='fleft lasts-online-odd') for video in videos_soup: url = video.find('a')['href'].encode('utf-8') img = video.find('div', class_='visible') img = img.find('img')['src'].encode('utf-8') img = url_root + img title = video.find('h4').get_text().encode('utf-8') title = title.replace('\n', '').replace('\r', '') duration = 0 infoLabels = {"Title": title, 'Duration': duration} videos.append([channel, url, title, img, infoLabels, 'play']) for video in videos2_soup: url = video.find('a')['href'].encode('utf-8') img = video.find('div', class_='visible') img = img.find('img')['src'].encode('utf-8') img = url_root + img title = video.find('h4').get_text().encode('utf-8') title = title.replace('\n', '').replace('\r', '') duration = 0 infoLabels = {"Title": title, 'Duration': duration} videos.append([channel, url, title, img, infoLabels, 'play']) page_soup = theme_soup.find('div', class_='pagination') if page_soup is not None: page_soup = page_soup.find_all('a') current_page = 0 for page in page_soup: if page.has_attr('class'): if page['class'][0].encode('utf-8') == 'selected': current_page = page.get_text().encode('utf-8') current_page = int(current_page) if current_page < len(page_soup): next_url = page_soup[current_page]['href'].encode('utf-8') videos.append([ channel, next_url, 'Page suivante (page ' + str(current_page + 1) + ')', '', {}, 'shows' ]) return videos
def list_live(params): """Build live listing""" lives = [] desired_language = common.PLUGIN.get_setting(params.channel_id + '.language') if desired_language == 'DE': desired_language = 'de' else: desired_language = 'fr' url_live = '' file_path = utils.download_catalog( URL_LIVE_ARTE % desired_language, '%s_%s_live.json' % (params.channel_name, desired_language)) file_live = open(file_path).read() json_parser = json.loads(file_live) title = json_parser["videoJsonPlayer"]["VTI"].encode('utf-8') img = json_parser["videoJsonPlayer"]["VTU"]["IUR"].encode('utf-8') plot = '' if 'V7T' in json_parser["videoJsonPlayer"]: plot = json_parser["videoJsonPlayer"]["V7T"].encode('utf-8') elif 'VDE' in json_parser["videoJsonPlayer"]: plot = json_parser["videoJsonPlayer"]["VDE"].encode('utf-8') duration = 0 duration = json_parser["videoJsonPlayer"]["videoDurationSeconds"] url_live = json_parser["videoJsonPlayer"]["VSR"]["HLS_SQ_1"]["url"] info = {'video': {'title': title, 'plot': plot, 'duration': duration}} lives.append({ 'label': title, 'fanart': img, 'thumb': img, 'url': common.PLUGIN.get_url( action='channel_entry', next='play_l', url=url_live, ), 'is_playable': True, 'info': info }) return common.PLUGIN.create_listing( lives, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL), category=common.get_window_title())
def list_live(params): """Build live listing""" lives = [] title = '' plot = '' duration = 0 img = '' url_live = '' desired_language = common.PLUGIN.get_setting(params.channel_id + '.language') url_live = URL_LIVE_SITE % desired_language.lower() file_path = utils.download_catalog( url_live, '%s_%s_live.html' % (params.channel_name, desired_language.lower())) html_live = open(file_path).read() root_soup = bs(html_live, 'html.parser') json_parser = json.loads( root_soup.select_one("script[type=application/json]").text) media_datas_list = json_parser['medias']['media'] media_datas_list = media_datas_list['media_sources']['media_source'] for datas in media_datas_list: if datas['source']: url_live = datas['source'] title = 'Live ' + params.channel_name + ' ' + desired_language.lower() info = {'video': {'title': title, 'plot': plot, 'duration': duration}} lives.append({ 'label': title, 'fanart': img, 'thumb': img, 'url': common.PLUGIN.get_url( action='channel_entry', next='play_l', url=url_live, ), 'is_playable': True, 'info': info }) return common.PLUGIN.create_listing( lives, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL), category=common.get_window_title())
def list_shows(params): """Build categories listing""" shows = [] if params.channel_name == 'rmcdecouverte': all_video = common.ADDON.get_localized_string(30701) shows.append({ 'label': common.GETTEXT('All videos'), 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='list_videos_1', all_video=all_video, window_title=all_video) }) else: if params.next == 'list_shows_1': file_path = utils.download_catalog( URL_REPLAY % (params.channel_name, get_token(params.channel_name)), '%s.json' % (params.channel_name)) file_categories = open(file_path).read() json_categories = json.loads(file_categories) json_categories = json_categories['page']['contents'][0] json_categories = json_categories['elements'][0]['items'] for categories in json_categories: title = categories['title'].encode('utf-8') image_url = categories['image_url'].encode('utf-8') category = categories['categories'].encode('utf-8') shows.append({ 'label': title, 'thumb': image_url, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', category=category, next='list_videos_1', title=title, page='1', window_title=title) }) return common.PLUGIN.create_listing( shows, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL), category=common.get_window_title(params))
def get_api_key(params): # Get apikey file_path_js = utils.download_catalog( URL_COMMONJS_NHK % params.channel_name, '%s_info.js' % params.channel_name, ) info_js = open(file_path_js).read() apikey = re.compile('nw_api_key\|\|"(.+?)"').findall(info_js) return apikey[0]
def get_api_key(params): # Get apikey file_path_js = utils.download_catalog( URL_COMMONJS_NHK % params.channel_name, '%s_info.js' % params.channel_name, ) info_js = open(file_path_js).read() apikey = re.compile('nw_api_key\|\|"(.+?)"').findall(info_js) return apikey[0]
def list_shows(params): """Build categories listing""" shows = [] if params.next == 'list_shows_1': file_path = utils.download_catalog( URL_PROGRAMS, '%s_programs.html' % params.channel_name) programs_html = open(file_path).read() programs_soup = bs(programs_html, 'html.parser') list_js = programs_soup.find_all("script") # 7ème script contient la liste des categories au format json json_categories = list_js[6].prettify().replace( '</script>', '' ).replace( '<script>', '' ).replace( 'var programList = ', '' ).replace( '\n', '' ).replace( '\r', '' ).replace( ',]', ']') json_categories_jsonparser = json.loads(json_categories) for category in json_categories_jsonparser["programmings"]: category_name = category["title"] category_img = URL_ROOT + category["image"] category_url = URL_ROOT + '/programma/' + category["description"] shows.append({ 'label': category_name, 'thumb': category_img, 'fanart': category_img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='list_videos_cat', category_url=category_url, window_title=category_name, category_name=category_name, ) }) return common.PLUGIN.create_listing( shows, sort_methods=( common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL ), category=common.get_window_title() )
def list_shows(params): """Build categories listing""" shows = [] if params.next == 'list_shows_1': for category_title, category_url in CATEGORIES.iteritems(): shows.append({ 'label': category_title, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', category_url=category_url % get_api_key(), next='list_shows_cat', title=category_title, window_title=category_title) }) elif params.next == 'list_shows_cat': file_path = utils.download_catalog(params.category_url, '%s_%s.json' % (params.channel_name, params.title), random_ua=True) file = open(file_path).read() json_category = json.loads(file) for show in json_category['res']: program_title = show['program_title'.encode('utf-8')] program_id = show['program_id'].encode('utf-8') fanart = show['program_image'].encode('utf-8') shows.append({ 'label': program_title, 'thumb': fanart, 'fanart': fanart, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', program_id=program_id, next='list_videos', title=program_title, window_title=program_title) }) return common.PLUGIN.create_listing( shows, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL), category=common.get_window_title())
def get_pcode(params): # Get js file file_path = utils.download_catalog( URL_GET_JS_PCODE % params.channel_name, '%s_js.html' % params.channel_name, ) file_js = open(file_path).read() js_file = re.compile('<script src="\/(.+?)"').findall(file_js) # Get last JS script url_get_pcode = URL_ROOT + js_file[len(js_file) - 1] # Get apikey file_path_js = utils.download_catalog( url_get_pcode, '%s_pcode.js' % params.channel_name, ) pcode_js = open(file_path_js).read() pcode = re.compile('pcode: "(.+?)"').findall(pcode_js) return pcode[0]
def list_shows(params): """Build shows listing""" shows = [] if params.next == 'list_shows_1': all_video = common.ADDON.get_localized_string(30701) shows.append({ 'label': common.GETTEXT('All videos'), 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='list_videos_cat', category_id=0, all_video=all_video, window_title=all_video ) }) file_path = utils.download_catalog( URL_CATEGORIES_NHK % (params.channel_name, get_api_key(params)), '%s_categories.json' % (params.channel_name) ) file_categories = open(file_path).read() json_parser = json.loads(file_categories) for category in json_parser["vod_categories"]: name_category = category["name"].encode('utf-8') category_id = category["category_id"] shows.append({ 'label': name_category, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='list_videos_cat', category_id=category_id, name_category=name_category, window_title=name_category ) }) return common.PLUGIN.create_listing( shows, sort_methods=( common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL ), category=common.get_window_title() )
def get_pcode(params): # Get js file file_path = utils.download_catalog( URL_GET_JS_PCODE % params.channel_name, '%s_js.html' % params.channel_name, ) file_js = open(file_path).read() js_file = re.compile('<script src="\/(.+?)"').findall(file_js) # Get last JS script url_get_pcode = URL_ROOT + js_file[len(js_file) - 1] # Get apikey file_path_js = utils.download_catalog( url_get_pcode, '%s_pcode.js' % params.channel_name, ) pcode_js = open(file_path_js).read() pcode = re.compile('pcode: "(.+?)"').findall(pcode_js) return pcode[0]
def get_live_item(params): plot = '' duration = 0 img = '' url_live = '' desired_language = common.PLUGIN.get_setting(params.channel_name + '.language') url_live = URL_LIVE_SITE % desired_language.lower() file_path = utils.download_catalog( url_live, '%s_%s_live.html' % (params.channel_name, desired_language.lower())) html_live = open(file_path).read() root_soup = bs(html_live, 'html.parser') json_parser = json.loads( root_soup.select_one("script[type=application/json]").text) media_datas_list = json_parser['medias']['media'] media_datas_list = media_datas_list['media_sources']['media_source'] for datas in media_datas_list: if datas['source']: url_live = datas['source'] info = { 'video': { 'title': params.channel_label, 'plot': plot, 'duration': duration } } return { 'label': params.channel_label, 'fanart': img, 'thumb': img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='start_live_tv_stream', next='play_l', url=url_live, ), 'is_playable': True, 'info': info }
def get_live_item(params): if DESIRED_LANGUAGE == 'FR' or \ DESIRED_LANGUAGE == 'DE': url_live = '' file_path = utils.download_catalog( URL_LIVE_ARTE % DESIRED_LANGUAGE.lower(), '%s_%s_live.json' % (params.channel_name, DESIRED_LANGUAGE)) file_live = open(file_path).read() json_parser = json.loads(file_live) title = json_parser["videoJsonPlayer"]["VTI"].encode('utf-8') img = json_parser["videoJsonPlayer"]["VTU"]["IUR"].encode('utf-8') plot = '' if 'V7T' in json_parser["videoJsonPlayer"]: plot = json_parser["videoJsonPlayer"]["V7T"].encode('utf-8') elif 'VDE' in json_parser["videoJsonPlayer"]: plot = json_parser["videoJsonPlayer"]["VDE"].encode('utf-8') duration = 0 duration = json_parser["videoJsonPlayer"]["videoDurationSeconds"] url_live = json_parser["videoJsonPlayer"]["VSR"]["HLS_SQ_1"]["url"] info = { 'video': { 'title': params.channel_label + " - [I]" + title + "[/I]", 'plot': plot, 'duration': duration } } return { 'label': params.channel_label + " - [I]" + title + "[/I]", 'fanart': img, 'thumb': img, 'url': common.PLUGIN.get_url( action='start_live_tv_stream', next='play_l', module_name=params.module_name, module_path=params.module_path, url=url_live, ), 'is_playable': True, 'info': info } else: return None
def list_shows(params): """Build categories listing""" shows = [] if params.next == 'list_shows_1': file_path = utils.download_catalog( URL_PROGRAMS, '%s_programs.html' % params.channel_name) programs_html = open(file_path).read() programs_soup = bs(programs_html, 'html.parser') list_js = programs_soup.find_all("script") for js in list_js: js_value = js.prettify() if 'programList' in js_value: json_categories = js_value.replace('</script>', '').replace( '<script>', '').replace('var programList = ', '').replace('\n', '').replace('\r', '').replace(',]', ']') json_categories_jsonparser = json.loads(json_categories) for category in json_categories_jsonparser["programmings"]: category_name = category["title"] category_img = URL_ROOT + category["image"] category_url = URL_ROOT + '/programma/' + category[ "description"] shows.append({ 'label': category_name, 'thumb': category_img, 'fanart': category_img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='list_videos_cat', category_url=category_url, window_title=category_name, category_name=category_name, ) }) return common.PLUGIN.create_listing( shows, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL), category=common.get_window_title(params))
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_l': file_path = utils.download_catalog( URL_LIVE_DATAS, '%s_live.html' % (params.channel_name)) live_html = open(file_path).read() id_value = re.compile( r'dacast\(\'(.*?)\'\,').findall(live_html)[0].split('_') # json with hls file_path_json = utils.download_catalog( JSON_LIVE % (id_value[0], id_value[1], id_value[2]), '%s_live.json' % (params.channel_name)) live_json = open(file_path_json).read() live_jsonparser = json.loads(live_json) # json with token file_path_json_token = utils.download_catalog( JSON_LIVE_TOKEN % (id_value[0], id_value[1], id_value[2]), '%s_live_token.json' % (params.channel_name)) live_json_token = open(file_path_json_token).read() live_jsonparser_token = json.loads(live_json_token) return 'http:' + live_jsonparser["hls"].encode('utf-8') + \ live_jsonparser_token["token"].encode('utf-8') elif params.next == 'play_r' or params.next == 'download_video': # get token file_path_json_token = utils.download_catalog( URL_TOKEN, '%s_replay_token.json' % (params.channel_name)) replay_json_token = open(file_path_json_token).read() replay_jsonparser_token = json.loads(replay_json_token) token = replay_jsonparser_token["token"] # Get HLS link file_path_video_replay = utils.download_catalog( URL_VIDEO_REPLAY % (params.id_episode, token), '%s_%s_video_replay.js' % (params.channel_name, params.id_episode)) video_replay_json = open(file_path_video_replay).read() video_replay_jsonparser = json.loads(video_replay_json) url_hls = '' if 'items' in video_replay_jsonparser: for video in video_replay_jsonparser["items"][0]: url_json_url_hls = video["url"].encode('utf-8') break file_path_hls_replay = utils.download_catalog( url_json_url_hls + \ 'jsonpCallback%s5910' % (str(time.time()).replace('.', '')), '%s_%s_hls_replay.js' % (params.channel_name, params.id_episode)) hls_replay_js = open(file_path_hls_replay).read() hls_replay_json = re.compile(r'\((.*?)\)').findall(hls_replay_js)[0] hls_replay_jsonparser = json.loads(hls_replay_json) if 'url' in hls_replay_jsonparser: url_hls = hls_replay_jsonparser["url"].encode('utf-8') return url_hls
def start_live_tv_stream(params): file_path = utils.download_catalog( URL_INFO_LIVE_JSON, '%s_info_live.json' % (params.channel_name)) file_info_live = open(file_path).read() json_parser = json.loads(file_info_live) video_id = json_parser["video"].encode('utf-8') # url_live = url_dailymotion_embed % video_id params['next'] = 'play_l' params['video_id'] = video_id return get_video_url(params)
def get_live_item(params): lives = [] title = '' plot = '' duration = 0 img = '' url_live = '' file_path = utils.download_catalog(URL_JSON_LIVES, '%s_live.json' % (params.channel_name)) lives_json = open(file_path).read() lives_json = lives_json.replace(')', '').replace('parseLiveJson(', '') lives_jsonparser = json.loads(lives_json) for lives_value in lives_jsonparser.iteritems(): if 'backup' not in lives_value[0] and \ 'geo' not in lives_value[0]: title = str(lives_value[0]).replace('vualto_', '').replace('_', ' ') url_live = lives_jsonparser[lives_value[0]]["hls"] info = { 'video': { 'title': params.channel_label + " - [I]" + title + "[/I]", 'plot': plot, 'duration': duration } } lives.append({ 'label': params.channel_label + " - [I]" + title + "[/I]", 'fanart': img, 'thumb': img, 'url': common.PLUGIN.get_url( action='start_live_tv_stream', next='play_l', module_name=params.module_name, module_path=params.module_path, url_live=url_live, ), 'is_playable': True, 'info': info }) return lives
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_r' or params.next == 'download_video': url_root = params.url_streaming.replace('playlist.m3u8', '') m3u8_content = utils.get_webcontent(params.url_streaming) last_url = '' for line in m3u8_content.splitlines(): if 'm3u8' in line and 'video' in line: last_url = line return url_root + last_url elif params.next == 'play_l': url_live = '' file_path = utils.download_catalog(URL_LIVE_TV, params.channel_name + '_live.html') root_live_html = open(file_path).read() root_live_soup = bs(root_live_html, 'html.parser') live_soup = root_live_soup.find('div', class_='wrapperVideo') url_live_embeded = '' for live in live_soup.find_all('iframe'): url_live_embeded = live.get('src').encode('utf-8') file_path_2 = utils.download_catalog( url_live_embeded, params.channel_name + '_live_embeded.html') root_live_embeded_html = open(file_path_2).read() all_url_video = re.compile(r'file: \'(.*?)\'').findall( root_live_embeded_html) for url_video in all_url_video: if url_video.count('m3u8') > 0: url_live = url_video return url_live
def list_shows(params): """Build shows listing""" shows = [] if params.next == 'list_shows_1': all_video = common.ADDON.get_localized_string(30701) shows.append({ 'label': common.GETTEXT('All videos'), 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='list_videos_cat', category_id=0, all_video=all_video, window_title=all_video) }) file_path = utils.download_catalog( URL_CATEGORIES_NHK % (params.channel_name, get_api_key(params)), '%s_categories.json' % (params.channel_name)) file_categories = open(file_path).read() json_parser = json.loads(file_categories) for category in json_parser["vod_categories"]: name_category = category["name"].encode('utf-8') category_id = category["category_id"] shows.append({ 'label': name_category, 'url': common.PLUGIN.get_url(module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='list_videos_cat', category_id=category_id, name_category=name_category, window_title=name_category) }) return common.PLUGIN.create_listing( shows, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL), category=common.get_window_title(params))
def start_live_tv_stream(params): file_path_html = utils.download_catalog( URL_LIVE_CNEWS, '%s_live.html' % (params.channel_name)) html_live = open(file_path_html).read() video_id_re = re.compile(r'content: \'(.*?)\'').findall(html_live) file_live_json = utils.get_webcontent(URL_INFO_CONTENT % (video_id_re[0])) json_parser = json.loads(file_live_json) url_live = json_parser[0]["MEDIA"]["VIDEOS"]["IPAD"].encode('utf-8') params['next'] = 'play_l' params['url'] = url_live return get_video_url(params)
def list_live(params): """Build live listing""" lives = [] title = '' subtitle = ' - ' plot = '' duration = 0 img = '' video_id = '' # Get URL Live file_path = utils.download_catalog( URL_LIVE_SKYNEWS, '%s_live.html' % params.channel_name, ) live_html = open(file_path).read() video_id = re.compile(r'www.youtube.com/embed/(.*?)\?').findall( live_html)[0] title = 'Watch Sky News Live' img = URL_IMG_YOUTUBE % video_id info = {'video': {'title': title, 'plot': plot, 'duration': duration}} lives.append({ 'label': title, 'fanart': img, 'thumb': img, 'url': common.PLUGIN.get_url( action='channel_entry', next='play_l', video_id=video_id, ), 'is_playable': True, 'info': info }) return common.PLUGIN.create_listing( lives, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL), category=common.get_window_title())
def list_live(params): """Build live listing""" lives = [] title = '' plot = '' duration = 0 img = '' url_live = '' file_path_html = utils.download_catalog( URL_LIVE_CNEWS, '%s_live.html' % (params.channel_name)) html_live = open(file_path_html).read() video_id_re = re.compile(r'content: \'(.*?)\'').findall(html_live) file_live_json = utils.get_webcontent(URL_INFO_CONTENT % (video_id_re[0])) json_parser = json.loads(file_live_json) title = json_parser[0]["INFOS"]["TITRAGE"]["TITRE"].encode('utf-8') plot = json_parser[0]["INFOS"]["DESCRIPTION"].encode('utf-8') img = json_parser[0]["MEDIA"]["IMAGES"]["GRAND"].encode('utf-8') url_live = json_parser[0]["MEDIA"]["VIDEOS"]["IPAD"].encode('utf-8') info = {'video': {'title': title, 'plot': plot, 'duration': duration}} lives.append({ 'label': title, 'fanart': img, 'thumb': img, 'url': common.PLUGIN.get_url( action='channel_entry', next='play_l', url=url_live, ), 'is_playable': True, 'info': info }) return common.PLUGIN.create_listing( lives, sort_methods=(common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL), category=common.get_window_title())
def start_live_tv_stream(params): file_path = utils.download_catalog( URL_INFO_LIVE_JSON, '%s_info_live.json' % (params.channel_name) ) file_info_live = open(file_path).read() json_parser = json.loads(file_info_live) video_id = json_parser["video"].encode('utf-8') # url_live = url_dailymotion_embed % video_id params['next'] = 'play_l' params['video_id'] = video_id return get_video_url(params)
def get_live_item(params): plot = '' duration = 0 img = '' url_live = '' desired_language = common.PLUGIN.get_setting( params.channel_name + '.language') url_live = URL_LIVE_SITE % desired_language.lower() file_path = utils.download_catalog( url_live, '%s_%s_live.html' % (params.channel_name, desired_language.lower()) ) html_live = open(file_path).read() root_soup = bs(html_live, 'html.parser') json_parser = json.loads( root_soup.select_one("script[type=application/json]").text) media_datas_list = json_parser['medias']['media'] media_datas_list = media_datas_list['media_sources']['media_source'] for datas in media_datas_list: if datas['source']: url_live = datas['source'] info = { 'video': { 'title': params.channel_label, 'plot': plot, 'duration': duration } } return { 'label': params.channel_label, 'fanart': img, 'thumb': img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='start_live_tv_stream', next='play_l', url=url_live, ), 'is_playable': True, 'info': info }
def get_live_item(params): if DESIRED_LANGUAGE == 'FR' or \ DESIRED_LANGUAGE == 'DE': url_live = '' file_path = utils.download_catalog( URL_LIVE_ARTE % DESIRED_LANGUAGE.lower(), '%s_%s_live.json' % (params.channel_name, DESIRED_LANGUAGE) ) file_live = open(file_path).read() json_parser = json.loads(file_live) title = json_parser["videoJsonPlayer"]["VTI"].encode('utf-8') img = json_parser["videoJsonPlayer"]["VTU"]["IUR"].encode('utf-8') plot = '' if 'V7T' in json_parser["videoJsonPlayer"]: plot = json_parser["videoJsonPlayer"]["V7T"].encode('utf-8') elif 'VDE' in json_parser["videoJsonPlayer"]: plot = json_parser["videoJsonPlayer"]["VDE"].encode('utf-8') duration = 0 duration = json_parser["videoJsonPlayer"]["videoDurationSeconds"] url_live = json_parser["videoJsonPlayer"]["VSR"]["HLS_SQ_1"]["url"] info = { 'video': { 'title': params.channel_label + " - [I]" + title + "[/I]", 'plot': plot, 'duration': duration } } return { 'label': params.channel_label + " - [I]" + title + "[/I]", 'fanart': img, 'thumb': img, 'url': common.PLUGIN.get_url( action='start_live_tv_stream', next='play_l', module_name=params.module_name, module_path=params.module_path, url=url_live, ), 'is_playable': True, 'info': info } else: return None
def get_brightcove_video_json(data_account, data_player, data_video_id): # Method to get JSON from 'edge.api.brightcove.com' file_json = utils.download_catalog( URL_BRIGHTCOVE_VIDEO_JSON % (data_account, data_video_id), '%s_%s_replay.json' % (data_account, data_video_id), force_dl=False, request_type='get', post_dic={}, random_ua=False, specific_headers={'Accept': 'application/json;pk=%s' % ( get_brightcove_policy_key(data_account, data_player))}, params={}) video_json = open(file_json).read() json_parser = json.loads(video_json) return json_parser
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_r': return resolver.get_stream_dailymotion(params.video_id, False) elif params.next == 'play_l': file_path = utils.download_catalog( URL_INFO_LIVE_JSON, '%s_info_live.json' % (params.channel_name) ) file_info_live = open(file_path).read() json_parser = json.loads(file_info_live) video_id = json_parser["video"].encode('utf-8') return resolver.get_stream_dailymotion(video_id, False) elif params.next == 'download_video': return resolver.get_stream_dailymotion(params.video_id, True)
def list_shows(channel, param): shows = [] if param == 'none': for url, title in categories.iteritems(): url = url_root + url shows.append([ channel, url + '|' + title, title, '', 'folder']) else: url = param.split('|')[0] cat = param.split('|')[1] file_path = utils.download_catalog( url, cat + '.html', random_ua=True) html = open(file_path).read() page_soup = bs(html, "html.parser") articles = page_soup.find('section', class_='js-item-container') articles = articles.find_all('article') for article in articles: title_url = article.find( 'h3').find('a') title = title_url['title'].encode('utf-8') url_pgm = title_url['href'].encode('utf-8') imgs = article.find('img')['data-srcset'] imgs = re.compile( r'http://(.*?).jpg', re.DOTALL).findall(imgs) if len(imgs) == 0: img = '' else: img = imgs[len(imgs) - 1] img = 'http://' + img + '.jpg' shows.append([ channel, url_pgm, title, img, 'shows']) return shows
def start_live_tv_stream(params): file_path_html = utils.download_catalog( URL_LIVE_CNEWS, '%s_live.html' % (params.channel_name) ) html_live = open(file_path_html).read() video_id_re = re.compile(r'content: \'(.*?)\'').findall(html_live) file_live_json = utils.get_webcontent( URL_INFO_CONTENT % (video_id_re[0])) json_parser = json.loads(file_live_json) url_live = json_parser[0]["MEDIA"]["VIDEOS"]["IPAD"].encode('utf-8') params['next'] = 'play_l' params['url'] = url_live return get_video_url(params)
def list_shows(params): """Build shows listing""" shows = [] if params.next == 'list_shows_1': file_path = utils.download_catalog( URL_REPLAY, params.channel_name + '.html') root_html = open(file_path).read() root_soup = bs(root_html, 'html.parser') categories_soup = root_soup.find( 'div', class_='nav-programs' ) for category in categories_soup.find_all('a'): category_name = category.find( 'span').get_text().encode('utf-8').replace( '\n', ' ').replace('\r', ' ').rstrip('\r\n') category_hash = common.sp.md5(category_name).hexdigest() url = category.get('href').encode('utf-8') shows.append({ 'label': category_name, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', category_hash=category_hash, next='list_videos_cat', url=url, window_title=category_name, category_name=category_name, ) }) return common.PLUGIN.create_listing( shows, sort_methods=( common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL ), category=common.get_window_title() )
def list_shows(params): """Build categories listing""" shows = [] # Get categories : file_path = utils.download_catalog( URL_ROOT_BRF, '%s_categories.html' % ( params.channel_name)) root_html = open(file_path).read() root_soup = bs(root_html, 'html.parser') menu_soup = root_soup.find('ul', class_="off-canvas-list") categories_soup = menu_soup.find_all('a') for category in categories_soup: category_name = category.get_text().encode('utf-8') category_url = category.get('href') if 'http' in category_url: shows.append({ 'label': category_name, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', category_url=category_url, page='1', category_name=category_name, next='list_videos', window_title=category_name ) }) return common.PLUGIN.create_listing( shows, sort_methods=( common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL ), category=common.get_window_title() )
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_r' or params.next == 'download_video': url = '' file_path = utils.download_catalog( URL_VIDEO_VOD % (get_pcode(params), params.video_id), '%s_%s_video_vod.json' % (params.channel_name, params.video_id) ) video_vod = open(file_path).read() json_parser = json.loads(video_vod) # Get Value url encodebase64 for stream in json_parser[ "authorization_data"][params.video_id]["streams"]: url_base64 = stream["url"]["data"] url = base64.standard_b64decode(url_base64) return url elif params.next == 'play_l': return params.url_live
def get_live_item(params): plot = '' duration = 0 img = '' video_id = '' # Get URL Live file_path = utils.download_catalog( URL_LIVE_SKYNEWS, '%s_live.html' % params.channel_name, ) live_html = open(file_path).read() video_id = re.compile( r'www.youtube.com/embed/(.*?)\?').findall(live_html)[0] img = URL_IMG_YOUTUBE % video_id info = { 'video': { 'title': params.channel_label, 'plot': plot, 'duration': duration } } return { 'label': params.channel_label, 'fanart': img, 'thumb': img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='start_live_tv_stream', next='play_l', video_id=video_id ), 'is_playable': True, 'info': info }
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_r' or params.next == 'download_video': value_to_encode = { 'policy': '43674', 'player': 'NBC.com Instance of: rational-player-production', 'formats': 'm3u,mpeg4', 'format': 'SMIL', 'embedded': 'true', 'tracking': 'true' } url_to_get_stream = (URL_STREAM % params.video_id) + urllib.urlencode(value_to_encode) file_path = utils.download_catalog( url_to_get_stream, '%s_episode_%s.html' % (params.channel_name, params.video_id) ) stream_html = open(file_path).read() return re.compile('<video src="(.*?)"').findall(stream_html)[0] return ''
def get_video_url(params): """Get video URL and start video player""" if params.next == 'play_weather_r': return params.video_url elif params.next == 'play_news_r': url = '' file_path = utils.download_catalog( URL_STREAM_NEWS % (params.video_date, params.video_id), '%s_%s.json' % (params.channel_name, params.video_id) ) video_vod = open(file_path).read() json_parser = json.loads(video_vod) return json_parser["mediaResource"]["url"] elif params.next == 'play_lifestyle_r': video_id_html = utils.get_webcontent(params.video_url) video_id = re.compile('player.php\?v=(.*?)&').findall(video_id_html)[0] api_key_html = utils.get_webcontent(URL_API_KEY_LIFE_STYLE % video_id) api_key = re.compile( 'data-de-api-key="(.*?)"').findall(api_key_html)[0] url_stream = URL_STREAM_NHK_LIFE_STYLE % (api_key, video_id) url_stream_json = utils.get_webcontent(url_stream) json_parser_stream = json.loads(url_stream_json) return json_parser_stream["response"]["WsProgramResponse"]["program"]["asset"]["ipadM3u8Url"]
def list_shows(params): """Build categories listing""" shows = [] if params.next == 'list_shows_1': file_path = utils.download_catalog( URL_SHOWS % (params.channel_name), '%s_show.html' % (params.channel_name) ) replay_shows_html = open(file_path).read() replay_shows_soup = bs(replay_shows_html, 'html.parser') replay_shows = replay_shows_soup.find_all('div', class_='span2') for show in replay_shows: show_title = show.find('a').find('img').get('alt').encode('utf-8') show_img = show.find('a').find('img').get('src') show_url = URL_ROOT + show.find('a').get('href') if 'episodes' in show.find('p', class_='series-ep').get_text(): shows.append({ 'label': show_title, 'thumb': show_img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='list_shows_2', title=show_title, show_url=show_url, window_title=show_title ) }) else: shows.append({ 'label': show_title, 'thumb': show_img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='list_videos_1', title=show_title, show_url=show_url, window_title=show_title ) }) elif params.next == 'list_shows_2': file_path = utils.download_catalog( params.show_url, '%s_show_%s.html' % (params.channel_name, params.title) ) replay_show_html = open(file_path).read() replay_show_seasons_soup = bs(replay_show_html, 'html.parser') replay_show_seasons = replay_show_seasons_soup.find( 'ul', class_='clearfix tag-nav') get_show_seasons = replay_show_seasons.find_all('li') for season in get_show_seasons: season_title = 'Series %s' % season.get( 'id').encode('utf-8').split('nav-series-')[1] shows.append({ 'label': season_title, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='list_videos_1', title=params.title + '_' + season_title, show_url=params.show_url, window_title=season_title ) }) return common.PLUGIN.create_listing( shows, sort_methods=( common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL ), category=common.get_window_title() )
def list_videos(params): """Build videos listing""" videos = [] file_path = utils.download_catalog( params.show_url, '%s_show_%s.html' % (params.channel_name, params.title) ) replay_show_html = open(file_path).read() episodes_soup = bs(replay_show_html, 'html.parser') root_episodes = episodes_soup.find_all('div', class_='carousel-inner')[0] episodes = root_episodes.find_all( 'div', class_='col-md-4 wrapper-item season') for episode in episodes: value_episode = episode.find( 'span', class_='caption-description' ).get_text().split(' | ')[1].split(' ')[1] value_season = episode.find( 'span', class_='caption-description' ).get_text().split(' | ')[0].split(' ')[1] video_title = episode.find( 'span', class_='caption-title' ).get_text() + ' S%sE%s' % (value_season, value_episode) video_duration = 0 video_plot = episode.find( 'span', class_='caption-title').get_text().encode('utf-8') + ' ' video_plot = video_plot + episode.find( 'span', class_='caption-description').get_text().encode('utf-8') video_img = episode.find('a').find('img').get('src') video_url = URL_ROOT + episode.find('a').get('href').encode('utf-8') info = { 'video': { 'title': video_title, # 'aired': aired, # 'date': date, 'duration': video_duration, 'plot': video_plot, # 'year': year, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url( action='download_video', module_path=params.module_path, module_name=params.module_name, video_url=video_url) + ')' ) context_menu = [] context_menu.append(download_video) videos.append({ 'label': video_title, 'thumb': video_img, 'fanart': video_img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r', video_url=video_url ), 'is_playable': True, 'info': info, 'context_menu': context_menu }) return common.PLUGIN.create_listing( videos, sort_methods=( common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_DURATION, common.sp.xbmcplugin.SORT_METHOD_LABEL_IGNORE_THE, common.sp.xbmcplugin.SORT_METHOD_GENRE, common.sp.xbmcplugin.SORT_METHOD_UNSORTED ), content='tvshows', update_listing='update_listing' in params, category=common.get_window_title() )
def list_videos(channel, param): videos = [] url = param.split('|')[0] # print 'URL : ' + url file_path = utils.download_catalog( url, url + '.html') theme_html = open(file_path).read() theme_soup = bs(theme_html, "html.parser") videos_soup = theme_soup.find_all( 'li', class_='fleft lasts-online-even') videos2_soup = theme_soup.find_all( 'li', class_='fleft lasts-online-odd') for video in videos_soup: url = video.find('a')['href'].encode('utf-8') img = video.find('div', class_='visible') img = img.find('img')['src'].encode('utf-8') img = url_root + img title = video.find('h4').get_text().encode('utf-8') title = title.replace('\n', '').replace('\r', '') duration = 0 infoLabels = { "Title": title, 'Duration': duration} videos.append([ channel, url, title, img, infoLabels, 'play']) for video in videos2_soup: url = video.find('a')['href'].encode('utf-8') img = video.find('div', class_='visible') img = img.find('img')['src'].encode('utf-8') img = url_root + img title = video.find('h4').get_text().encode('utf-8') title = title.replace('\n', '').replace('\r', '') duration = 0 infoLabels = { "Title": title, 'Duration': duration} videos.append([ channel, url, title, img, infoLabels, 'play']) page_soup = theme_soup.find( 'div', class_='pagination') if page_soup is not None: page_soup = page_soup.find_all('a') current_page = 0 for page in page_soup: if page.has_attr('class'): if page['class'][0].encode('utf-8') == 'selected': current_page = page.get_text().encode('utf-8') current_page = int(current_page) if current_page < len(page_soup): next_url = page_soup[current_page]['href'].encode('utf-8') videos.append([ channel, next_url, 'Page suivante (page ' + str(current_page + 1) + ')', '', {}, 'shows']) return videos
def list_shows(params): """Build categories listing""" shows = [] if 'previous_listing' in params: shows = ast.literal_eval(params['previous_listing']) if params.next == 'list_shows_1': file_path = utils.download_catalog( URL_SHOWS % params.page, '%s_shows_%s.html' % (params.channel_name, params.page) ) replay_shows_html = open(file_path).read() replay_shows_soup = bs(replay_shows_html, 'html.parser') replay_shows = replay_shows_soup.find_all('div', class_='item') for show in replay_shows: show_title = show.find('a').find('img').get('alt') show_img = show.find('a').find('img').get('src').encode('utf-8') show_url = URL_ROOT + show.find('a').get('href').encode('utf-8') shows.append({ 'label': show_title, 'thumb': show_img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='list_shows_2', title=show_title, show_url=show_url, window_title=show_title ) }) # More programs... shows.append({ 'label': common.ADDON.get_localized_string(30708), 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='list_shows_1', page=str(int(params.page) + 1), update_listing=True, previous_listing=str(shows) ) }) elif params.next == 'list_shows_2': file_path = utils.download_catalog( params.show_url, '%s_show_%s.html' % (params.channel_name, params.title) ) replay_show_html = open(file_path).read() replay_show_seasons_soup = bs(replay_show_html, 'html.parser') replay_show_seasons = replay_show_seasons_soup.find( 'div', class_='pagination') get_show_seasons = replay_show_seasons.find_all('a') for season in get_show_seasons: season_title = 'Series %s' % season.get_text().strip() show_season_url = URL_ROOT + season.get('href').encode('utf-8') shows.append({ 'label': season_title, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='list_videos_1', title=params.title + '_' + season_title, show_url=show_season_url, window_title=season_title ) }) return common.PLUGIN.create_listing( shows, sort_methods=( common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL ), update_listing='update_listing' in params, category=common.get_window_title() )
def list_shows(params): """Create categories list""" shows = [] if params.next == 'list_shows_1': file_path = utils.download_catalog( URL_VIDEOS_CNEWS, '%s_categories.html' % ( params.channel_name)) root_html = open(file_path).read() root_soup = bs(root_html, 'html.parser') menu_soup = root_soup.find('div', class_="nav-tabs-inner") categories_soup = menu_soup.find_all('a') for category in categories_soup: category_name = category.get_text().encode('utf-8') category_url = URL_ROOT_SITE + category.get('href') if category_name != 'Les tops': shows.append({ 'label': category_name, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', category_url=category_url, category_name=category_name, next='list_shows_2', window_title=category_name ) }) elif params.next == 'list_shows_2': if params.category_name == 'Les sujets': file_path = utils.download_catalog( params.category_url, '%s_%s.html' % ( params.channel_name, params.category_name)) root_html = open(file_path).read() root_soup = bs(root_html, 'html.parser') categories_soup = root_soup.find_all('a', class_="checkbox") for category in categories_soup: category_name = category.get_text().encode('utf-8') category_url = URL_ROOT_SITE + category.get('href') shows.append({ 'label': category_name, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', category_url=category_url, page="1", category_name=category_name, next='list_videos', window_title=category_name ) }) else: # Find all emissions file_path = utils.download_catalog( URL_EMISSIONS_CNEWS, '%s_ALL_EMISSION.html' % ( params.channel_name)) root_html = open(file_path).read() root_soup = bs(root_html, 'html.parser') categories_soup = root_soup.find_all('article', class_="item") for category in categories_soup: category_name = category.find('h3').get_text().encode('utf-8') category_url = URL_VIDEOS_CNEWS + \ '/emissions' + \ category.find('a').get('href').split('.fr')[1] category_img = category.find('img').get('src').encode('utf-8') shows.append({ 'label': category_name, 'thumb': category_img, 'fanart': category_img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', category_url=category_url, page="1", category_name=category_name, next='list_videos', window_title=category_name ) }) return common.PLUGIN.create_listing( shows, sort_methods=( common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL ), category=common.get_window_title() )
def list_videos(params): """Build videos listing""" videos = [] if params.state_video == 'Toutes les videos (sans les categories)': file_path = utils.download_catalog( URL_ALL_VIDEO, '%s_all_video.xml' % params.channel_name, ) replay_xml = open(file_path).read() xml_elements = ET.XML(replay_xml) programs = xml_elements.findall( "{http://www.sitemaps.org/schemas/sitemap/0.9}url") for program in programs: url_site = program.findtext( "{http://www.sitemaps.org/schemas/sitemap/0.9}loc" ).encode('utf-8') check_string = '%s/replay/' % params.channel_name if url_site.count(check_string) > 0: # Title title = url_site.rsplit('/', 1)[1].replace("-", " ").upper() video_node = program.findall( "{http://www.google.com/schemas/sitemap-video/1.1}video")[0] # Duration duration = 0 # Image img = '' img_node = video_node.find( "{http://www.google.com/schemas/sitemap-video/1.1}thumbnail_loc") img = img_node.text.encode('utf-8') # Url Video url = '' url_node = video_node.find( "{http://www.google.com/schemas/sitemap-video/1.1}content_loc") url = url_node.text.encode('utf-8') # Plot plot = '' plot_node = video_node.find( "{http://www.google.com/schemas/sitemap-video/1.1}description") if plot_node.text: plot = plot_node.text.encode('utf-8') # Date value_date = '' value_date_node = video_node.find( "{http://www.google.com/schemas/sitemap-video/1.1}publication_date") value_date = value_date_node.text.encode('utf-8') date = value_date.split('T')[0].split('-') day = date[2] mounth = date[1] year = date[0] date = '.'.join((day, mounth, year)) aired = '-'.join((year, mounth, day)) info = { 'video': { 'title': title, 'plot': plot, 'duration': duration, 'aired': aired, 'date': date, 'year': year, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url( action='download_video', module_path=params.module_path, module_name=params.module_name, url_video=url_site) + ')' ) context_menu = [] context_menu.append(download_video) videos.append({ 'label': title, 'fanart': img, 'thumb': img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r', url_video=url ), 'is_playable': True, 'info': info, 'context_menu': context_menu }) else: file_path = utils.download_catalog( URL_REPLAY_API % params.channel_name, '%s_replay.xml' % params.channel_name, ) replay_xml = open(file_path).read() xml_elements = ET.XML(replay_xml) programs = xml_elements.findall("program") for program in programs: if params.state_video == 'Toutes les videos': # Title title = program.findtext("title").encode('utf-8') + " - " + \ program.findtext("subtitle").encode('utf-8') # Duration duration = 0 if program.findtext("duration"): try: duration = int(program.findtext("duration")) * 60 except ValueError: pass # or whatever # Image img = program.find("photos").findtext("photo") # Url Video url = '' # program.find("offres").find("offre").find("videos").findtext("video) for i in program.find("offres").findall("offre"): date_value = i.get("startdate") date_value_list = date_value.split(' ')[0].split('-') day = date_value_list[2] mounth = date_value_list[1] year = date_value_list[0] date = '.'.join((day, mounth, year)) aired = '-'.join((year, mounth, day)) for j in i.find("videos").findall("video"): url = j.text.encode('utf-8') # Plot plot = '' for i in program.find("stories").findall("story"): if int(i.get("maxlength")) == 680: plot = i.text.encode('utf-8') info = { 'video': { 'title': title, 'plot': plot, 'duration': duration, 'aired': aired, 'date': date, 'year': year, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url( action='download_video', module_path=params.module_path, module_name=params.module_name, url_video=url) + ')' ) context_menu = [] context_menu.append(download_video) videos.append({ 'label': title, 'fanart': img, 'thumb': img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r', url_video=url ), 'is_playable': True, 'info': info, 'context_menu': context_menu }) elif params.id_program == program.get("IDSERIE"): # Title title = program.findtext("title").encode('utf-8') + " - " + \ program.findtext("subtitle").encode('utf-8') # Duration duration = 0 if program.findtext("duration"): try: duration = int(program.findtext("duration")) * 60 except ValueError: pass # or whatever # Image img = program.find("photos").findtext("photo") # Url Video url = '' # program.find("offres").find("offre").find("videos").findtext("video) for i in program.find("offres").findall("offre"): date_value = i.get("startdate") date_value_list = date_value.split(' ')[0].split('-') day = date_value_list[2] mounth = date_value_list[1] year = date_value_list[0] date = '.'.join((day, mounth, year)) aired = '-'.join((year, mounth, day)) for j in i.find("videos").findall("video"): url = j.text.encode('utf-8') # Plot plot = '' for i in program.find("stories").findall("story"): if int(i.get("maxlength")) == 680: plot = i.text.encode('utf-8') info = { 'video': { 'title': title, 'plot': plot, 'duration': duration, 'aired': aired, 'date': date, 'year': year, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url( action='download_video', module_path=params.module_path, module_name=params.module_name, url_video=url) + ')' ) context_menu = [] context_menu.append(download_video) videos.append({ 'label': title, 'fanart': img, 'thumb': img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r', url_video=url ), 'is_playable': True, 'info': info, 'context_menu': context_menu }) return common.PLUGIN.create_listing( videos, sort_methods=( common.sp.xbmcplugin.SORT_METHOD_DATE, common.sp.xbmcplugin.SORT_METHOD_DURATION, common.sp.xbmcplugin.SORT_METHOD_LABEL_IGNORE_THE, common.sp.xbmcplugin.SORT_METHOD_GENRE, common.sp.xbmcplugin.SORT_METHOD_PLAYCOUNT, common.sp.xbmcplugin.SORT_METHOD_UNSORTED ), content='tvshows', category=common.get_window_title() )
def list_videos(params): """Build videos listing""" videos = [] if 'previous_listing' in params: videos = ast.literal_eval(params['previous_listing']) if params.channel_name == 'cnews': url_page = params.category_url + '/page/%s' % params.page file_path = utils.download_catalog( url_page, '%s_%s_%s.html' % ( params.channel_name, params.category_name, params.page)) root_html = open(file_path).read() root_soup = bs(root_html, 'html.parser') programs = root_soup.find_all('article', class_='item') for program in programs: title = program.find('h3').get_text().encode('utf-8') thumb = program.find('img').get('src').encode('utf-8') # Get Video_ID video_html = utils.get_webcontent( program.find('a').get('href').encode('utf-8')) id = re.compile(r'videoId=(.*?)"').findall(video_html)[0] # Get Description datas_video = bs(video_html, 'html.parser') description = datas_video.find( 'article', class_='entry-body').get_text().encode('utf-8') duration = 0 date = re.compile( r'property="video:release_date" content="(.*?)"' ).findall(video_html)[0].split('T')[0].split('-') day = date[2] mounth = date[1] year = date[0] date = '.'.join((day, mounth, year)) aired = '-'.join((year, mounth, day)) info = { 'video': { 'title': title, 'plot': description, 'aired': aired, 'date': date, 'duration': duration, 'year': year, # 'genre': category, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url( action='download_video', module_path=params.module_path, module_name=params.module_name, id=id) + ')' ) context_menu = [] context_menu.append(download_video) videos.append({ 'label': title, 'thumb': thumb, 'fanart': thumb, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r', id=id ), 'is_playable': True, 'info': info, 'context_menu': context_menu }) # More videos... videos.append({ 'label': common.ADDON.get_localized_string(30700), 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', category_url=params.category_url, category_name=params.category_name, next='list_videos', page=str(int(params.page) + 1), update_listing=True, previous_listing=str(videos) ) }) return common.PLUGIN.create_listing( videos, sort_methods=( common.sp.xbmcplugin.SORT_METHOD_DATE, common.sp.xbmcplugin.SORT_METHOD_DURATION, common.sp.xbmcplugin.SORT_METHOD_LABEL_IGNORE_THE, common.sp.xbmcplugin.SORT_METHOD_GENRE, common.sp.xbmcplugin.SORT_METHOD_PLAYCOUNT, common.sp.xbmcplugin.SORT_METHOD_UNSORTED ), content='tvshows', update_listing='update_listing' in params, category=common.get_window_title() )
def list_shows(params): """Build shows listing""" shows = [] if 'list_shows_without_categories' in params.next: # Pour avoir toutes les videos state_video = 'Toutes les videos (sans les categories)' shows.append({ 'label': state_video, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', state_video=state_video, next='list_videos_1', # title_category=category_name, window_title=state_video ) }) else: unique_item = dict() file_path = utils.download_catalog( URL_COLLECTION_API % params.channel_name, '%s_collection.xml' % params.channel_name, ) collection_xml = open(file_path).read() xml_elements = ET.XML(collection_xml) if 'list_shows_1' in params.next: # Build categories list (Tous les programmes, Séries, ...) collections = xml_elements.findall("collection") # Pour avoir toutes les videos, certaines videos ont des # categories non presentes dans cette URL 'url_collection_api' state_video = 'Toutes les videos' shows.append({ 'label': state_video, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', state_video=state_video, next='list_videos_1', # title_category=category_name, window_title=state_video ) }) for collection in collections: category_name = collection.findtext("category").encode('utf-8') if category_name not in unique_item: if category_name == '': category_name = 'NO_CATEGORY' unique_item[category_name] = category_name shows.append({ 'label': category_name, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', category_name=category_name, next='list_shows_programs', # title_category=category_name, window_title=category_name ) }) elif 'list_shows_programs' in params.next: # Build programm list (Tous les programmes, Séries, ...) collections = xml_elements.findall("collection") state_video = 'VIDEOS_BY_CATEGORY' for collection in collections: if params.category_name == collection.findtext( "category").encode('utf-8') \ or (params.category_name == 'NO_CATEGORY' and collection.findtext("category").encode('utf-8') == ''): name_program = collection.findtext("name").encode('utf-8') img_program = collection.findtext("picture") id_program = collection.get("id") shows.append({ 'label': name_program, 'thumb': img_program, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='list_videos_1', state_video=state_video, id_program=id_program, # title_program=name_program, window_title=name_program ) }) return common.PLUGIN.create_listing( shows, sort_methods=( common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL_IGNORE_THE ), category=common.get_window_title() )
def list_videos(params): """Build videos listing""" videos = [] file_path = utils.download_catalog( params.show_url, '%s_show_%s.html' % ( params.channel_name, params.title) ) replay_show_season_html = open(file_path).read() seasons_episodes_soup = bs(replay_show_season_html, 'html.parser') # Get data-account data_account = re.compile( r'data-account="(.*?)"').findall(replay_show_season_html)[0] data_player = re.compile( r'data-player="(.*?)"').findall(replay_show_season_html)[0] if "Series" in params.title: # GET VideoId for each episode of season selected seasons_episodes = seasons_episodes_soup.find_all( 'div', class_='spanOneThird vod-episode clearfix ') for episode in seasons_episodes: if episode.get('data-series') == \ params.title.split('Series')[1].strip(): data_vidid = episode.get('data-vidid') video_title = episode.get('data-title') video_title = video_title + ' S%sE%s' % ( episode.get('data-series'), episode.get('data-episode')) video_duration = 0 video_plot = 'Expire ' video_plot = video_plot + episode.get( 'data-publishend').split('T')[0] video_plot = video_plot + '\n' + episode.get( 'data-teaser').encode('utf-8') video_img = episode.find('img').get('src') date_value = episode.get("data-publishstart") date_value_list = date_value.split('T')[0].split('-') day = date_value_list[2] mounth = date_value_list[1] year = date_value_list[0] date = '.'.join((day, mounth, year)) aired = '-'.join((year, mounth, day)) info = { 'video': { 'title': video_title, 'aired': aired, 'date': date, 'duration': video_duration, 'plot': video_plot, 'year': year, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url( action='download_video', module_path=params.module_path, module_name=params.module_name, data_vidid=data_vidid, data_account=data_account, data_player=data_player) + ')' ) context_menu = [] context_menu.append(download_video) videos.append({ 'label': video_title, 'thumb': video_img, 'fanart': video_img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r', data_vidid=data_vidid, data_account=data_account, data_player=data_player ), 'is_playable': True, 'info': info, 'context_menu': context_menu }) play_episode = seasons_episodes_soup.find( 'div', class_='spanOneThird vod-episode clearfix playing in') if play_episode.get('data-series') == \ params.title.split('Series')[1].strip(): data_vidid = play_episode.get('data-vidid') video_title = play_episode.get('data-title') video_title = video_title + ' S%sE%s' % ( play_episode.get('data-series'), play_episode.get('data-episode') ) video_duration = 0 video_plot = 'Expire ' video_plot = video_plot + play_episode.get( 'data-publishend').split('T')[0] + '\n ' video_plot = video_plot + play_episode.get( 'data-teaser').encode('utf-8') video_img = play_episode.find('img').get('src') date_value = play_episode.get("data-publishstart") date_value_list = date_value.split('T')[0].split('-') day = date_value_list[2] mounth = date_value_list[1] year = date_value_list[0] date = '.'.join((day, mounth, year)) aired = '-'.join((year, mounth, day)) info = { 'video': { 'title': video_title, 'aired': aired, 'date': date, 'duration': video_duration, 'plot': video_plot, 'year': year, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url( action='download_video', module_path=params.module_path, module_name=params.module_name, data_vidid=data_vidid, data_account=data_account, data_player=data_player) + ')' ) context_menu = [] context_menu.append(download_video) videos.append({ 'label': video_title, 'thumb': video_img, 'fanart': video_img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r', data_vidid=data_vidid, data_account=data_account, data_player=data_player ), 'is_playable': True, 'info': info, 'context_menu': context_menu }) else: play_episode = seasons_episodes_soup.find( 'div', class_='vod-video-container') data_vidid = play_episode.find('a').get('data-vidid') video_title = play_episode.find('img').get('alt') video_duration = 0 video_plot = seasons_episodes_soup.find( 'p', class_='teaser').get_text().encode('utf-8') video_img = re.compile( 'itemprop="image" content="(.*?)"' ).findall(replay_show_season_html)[0] date_value = re.compile( 'itemprop="uploadDate" content="(.*?)"' ).findall(replay_show_season_html)[0] date_value_list = date_value.split(',')[0].split(' ') if len(date_value_list[0]) == 1: day = '0' + date_value_list[0] else: day = date_value_list[0] try: mounth = CORRECT_MOUNTH[date_value_list[1]] except Exception: mounth = '00' year = date_value_list[2] date = '.'.join((day, mounth, year)) aired = '-'.join((year, mounth, day)) info = { 'video': { 'title': video_title, 'aired': aired, 'date': date, 'duration': video_duration, 'plot': video_plot, 'year': year, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url( action='download_video', module_path=params.module_path, module_name=params.module_name, data_vidid=data_vidid, data_account=data_account, data_player=data_player) + ')' ) context_menu = [] context_menu.append(download_video) videos.append({ 'label': video_title, 'thumb': video_img, 'fanart': video_img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r', data_vidid=data_vidid, data_account=data_account, data_player=data_player ), 'is_playable': True, 'info': info, 'context_menu': context_menu }) return common.PLUGIN.create_listing( videos, sort_methods=( common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_DURATION, common.sp.xbmcplugin.SORT_METHOD_LABEL_IGNORE_THE, common.sp.xbmcplugin.SORT_METHOD_GENRE, common.sp.xbmcplugin.SORT_METHOD_UNSORTED ), content='tvshows', category=common.get_window_title() )
def get_live_item(params): lives = [] title = '' # subtitle = ' - ' plot = '' duration = 0 img = '' url_live = '' file_path = utils.download_catalog( URL_JSON_LIVE % (get_partener_key(params)), '%s_live.json' % (params.channel_name)) live_json = open(file_path).read() live_jsonparser = json.loads(live_json) # channel_live_in_process = False for live in live_jsonparser: if type(live["channel"]) is dict: live_channel = live["channel"]["label"] else: live_channel = 'Exclu Auvio' start_date_value = format_hours(live["start_date"]) end_date_value = format_hours(live["end_date"]) day_value = format_day(live["start_date"]) title = live_channel + " - [I]" + live["title"] + \ ' - ' + day_value + ' - ' + start_date_value + \ '-' + end_date_value + "[/I]" url_live = '' if live["url_streaming"]: url_live = live["url_streaming"]["url_hls"] plot = live["description"].encode('utf-8') img = live["images"]["illustration"]["16x9"]["1248x702"] info = { 'video': { 'title': title, 'plot': plot, 'duration': duration } } lives.append({ 'label': title, 'fanart': img, 'thumb': img, 'url': common.PLUGIN.get_url( action='start_live_tv_stream', next='play_l', module_name=params.module_name, module_path=params.module_path, url_live=url_live, ), 'is_playable': True, 'info': info }) return lives
def list_videos(params): """Build videos listing""" videos = [] if params.next == 'list_videos_emission': file_path = utils.download_catalog( URL_JSON_EMISSION_BY_ID % params.emission_id, 'url_videos_emission_%s.html' % params.emission_id) videos_json = open(file_path).read() videos_jsonparser = json.loads(videos_json) for video in videos_jsonparser['data']: if video["subtitle"]: title = video["title"].encode('utf-8') + \ ' - ' + video["subtitle"].encode('utf-8') else: title = video["title"].encode('utf-8') img = URL_ROOT_IMAGE_RTBF + video["thumbnail"]["full_medium"] url_video = video["urlHls"] plot = '' if video["description"]: plot = video["description"].encode('utf-8') duration = 0 duration = video["durations"] value_date = time.strftime( '%d %m %Y', time.localtime(video["liveFrom"])) date = str(value_date).split(' ') day = date[0] mounth = date[1] year = date[2] date = '.'.join((day, mounth, year)) aired = '-'.join((year, mounth, day)) info = { 'video': { 'title': title, 'plot': plot, # 'episode': episode_number, # 'season': season_number, # 'rating': note, 'aired': aired, 'date': date, 'duration': duration, 'year': year, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url( action='download_video', module_path=params.module_path, module_name=params.module_name, url_video=url_video) + ')' ) context_menu = [] context_menu.append(download_video) videos.append({ 'label': title, 'thumb': img, 'fanart': img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r', url_video=url_video ), 'is_playable': True, 'info': info, 'context_menu': context_menu }) elif params.next == 'list_videos_categorie': file_path = utils.get_webcontent(params.category_url) episodes_soup = bs(file_path, 'html.parser') list_episodes = episodes_soup.find_all('article') for episode in list_episodes: if episode.get('data-type') == 'media': if episode.find('h4'): title = episode.find('h3').find( 'a').get('title') + ' - ' + \ episode.find('h4').get_text() else: title = episode.find('h3').find('a').get('title') duration = 0 video_id = episode.get('data-id') all_images = episode.find('img').get( 'data-srcset').split(',') for image in all_images: img = image.split(' ')[0] info = { 'video': { 'title': title, # 'plot': plot, # 'episode': episode_number, # 'season': season_number, # 'rating': note, # 'aired': aired, # 'date': date, 'duration': duration, # 'year': year, 'mediatype': 'tvshow' } } download_video = ( common.GETTEXT('Download'), 'XBMC.RunPlugin(' + common.PLUGIN.get_url( action='download_video', module_path=params.module_path, module_name=params.module_name, video_id=video_id) + ')' ) context_menu = [] context_menu.append(download_video) videos.append({ 'label': title, 'thumb': img, 'fanart': img, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', next='play_r_categorie', video_id=video_id ), 'is_playable': True, 'info': info, 'context_menu': context_menu }) return common.PLUGIN.create_listing( videos, sort_methods=( common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_DATE ), content='tvshows', category=common.get_window_title() )
def list_shows(params): """Build categories listing""" shows = [] if params.next == 'list_shows_1': emission_title = 'Émissions' shows.append({ 'label': emission_title, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, emission_title=emission_title, action='replay_entry', next='list_shows_2', window_title=emission_title ) }) file_path = utils.get_webcontent(URL_CATEGORIES) categories_json = json.loads(file_path) for category in categories_json["item"]: if category["@attributes"]["id"] == 'category': for category_sub in category["item"]: if 'category-' in category_sub["@attributes"]["id"]: category_name = category_sub["@attributes"]["name"] category_url = category_sub["@attributes"]["url"] shows.append({ 'label': category_name, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, action='replay_entry', category_url=category_url, category_name=category_name, next='list_videos_categorie', window_title=category_name ) }) elif params.next == 'list_shows_2': file_path = utils.download_catalog( URL_EMISSIONS_AUVIO, 'url_emissions_auvio.html') emissions_html = open(file_path).read() emissions_soup = bs(emissions_html, 'html.parser') list_emissions = emissions_soup.find_all( 'article', class_="rtbf-media-item col-xxs-12 col-xs-6 col-md-4 col-lg-3 ") for emission in list_emissions: emission_id = emission.get('data-id') emission_title = emission.find('h4').get_text().encode('utf-8') shows.append({ 'label': emission_title, 'url': common.PLUGIN.get_url( module_path=params.module_path, module_name=params.module_name, emission_title=emission_title, action='replay_entry', emission_id=emission_id, next='list_videos_emission', window_title=emission_title ) }) return common.PLUGIN.create_listing( shows, sort_methods=( common.sp.xbmcplugin.SORT_METHOD_UNSORTED, common.sp.xbmcplugin.SORT_METHOD_LABEL ), category=common.get_window_title() )