def replaytv_item(ids=None, label=None, start=0, **kwargs): start = int(start) first = label[0] folder = plugin.Folder(title=label) if first.isalpha(): data = load_file(file=first + "_replay.json", isJSON=True) else: data = load_file(file='other_replay.json', isJSON=True) if not data: return folder processed = process_replaytv_list_content(data=data, ids=ids, start=start) if check_key(processed, 'items'): folder.add_items(processed['items']) if check_key(processed, 'totalrows') and check_key( processed, 'count') and processed['totalrows'] > processed['count']: folder.add_item( label=_(_.NEXT_PAGE, _bold=True), path=plugin.url_for(func_or_url=replaytv_item, ids=ids, label=label, start=processed['count']), ) return folder
def update_prefs(self): if self._debug_mode: log.debug('Executing: api.update_prefs') prefs = load_file(file="channel_prefs.json", isJSON=True) results = load_file(file="channel_test.json", isJSON=True) channels = load_file(file="channels.json", isJSON=True) if not results: results = {} if not prefs: prefs = {} if not channels: channels = {} for row in channels: channeldata = self.get_channel_data(row=row, channelno=1) id = unicode(channeldata['channel_id']) if len(unicode(id)) == 0: continue keys = ['live', 'replay', 'epg'] for key in keys: if not check_key(prefs, id) or not check_key(prefs[id], key): if not check_key(results, id): if not check_key(prefs, id): prefs[id] = { key: 'true', key + '_choice': 'auto' } else: prefs[id][key] = 'true' prefs[id][key + '_choice'] = 'auto' else: result_value = results[id][key] if not check_key(prefs, id): prefs[id] = { key: result_value, key + '_choice': 'auto' } else: prefs[id][key] = result_value prefs[id][key + '_choice'] = 'auto' elif prefs[id][key + '_choice'] == 'auto' and check_key(results, id): prefs[id][key] = results[id][key] write_file(file="channel_prefs.json", data=prefs, isJSON=True) if self._debug_mode: log.debug('Execution Done: api.update_prefs')
def search(query=None, **kwargs): items = [] if not query: query = gui.input(message=_.SEARCH, default='').strip() if not query: return for x in reversed(list(range(2, 10))): settings.set(key='_search' + unicode(x), value=settings.get(key='_search' + unicode(x - 1))) settings.set(key='_search1', value=query) folder = plugin.Folder(title=_(_.SEARCH_FOR, query=query)) data = load_file(file='list_replay.json', isJSON=True) processed = process_replaytv_search(data=data, start=0, search=query) items += processed['items'] if settings.getBool('showMoviesSeries') == True: processed = process_vod_content(data=load_file(file='vod.json', isJSON=True)['series'], start=0, search=query, type=_.SERIES) items += processed['items'] processed = process_vod_content(data=load_file(file='vod.json', isJSON=True)['movies'], start=0, search=query, type=_.MOVIES) items += processed['items'] processed = process_vod_content(data=load_file( file='vod.json', isJSON=True)['kidsseries'], start=0, search=query, type=_.KIDS_SERIES) items += processed['items'] processed = process_vod_content(data=load_file( file='vod.json', isJSON=True)['kidsmovies'], start=0, search=query, type=_.KIDS_MOVIES) items += processed['items'] items[:] = sorted(items, key=_sort_replay_items, reverse=True) items = items[:25] folder.add_items(items) return folder
def create_playlist(self): if self._debug_mode: log.debug('Executing: api.create_playlist') prefs = load_file(file="channel_prefs.json", isJSON=True) channels = load_file(file="channels.json", isJSON=True) playlist_all = u'#EXTM3U\n' playlist = u'#EXTM3U\n' for row in channels: channeldata = self.get_channel_data(row=row) id = unicode(channeldata['channel_id']) if len(id) > 0: path = 'plugin://{addonid}/?_=play_video&channel={channel}&id={asset}&type=channel&_l=.pvr'.format( addonid=ADDON_ID, channel=channeldata['channel_id'], asset=channeldata['asset_id']) playlist_all += u'#EXTINF:-1 tvg-id="{id}" tvg-chno="{channel}" tvg-name="{name}" tvg-logo="{logo}" group-title="TV" radio="false",{name}\n{path}\n'.format( id=channeldata['channel_id'], channel=channeldata['channel_number'], name=channeldata['label'], logo=channeldata['station_image_large'], path=path) if not prefs or not check_key( prefs, id) or prefs[id]['epg'] == 'true': playlist += u'#EXTINF:-1 tvg-id="{id}" tvg-chno="{channel}" tvg-name="{name}" tvg-logo="{logo}" group-title="TV" radio="false",{name}\n{path}\n'.format( id=channeldata['channel_id'], channel=channeldata['channel_number'], name=channeldata['label'], logo=channeldata['station_image_large'], path=path) self._channels_age = time.time() settings.setInt(key='_channels_age', value=self._channels_age) if self._debug_mode: log.debug('Setting _channels_age to: {channels_age}'.format( channels_age=self._channels_age)) log.debug('Writing tv.m3u8: {playlist}'.format(playlist=playlist)) write_file(file="tv.m3u8", data=playlist, isJSON=False) write_file(file="tv_all.m3u8", data=playlist_all, isJSON=False) combine_playlist() if self._debug_mode: log.debug('Execution Done: api.create_playlist')
def search(query=None, **kwargs): items = [] if not query: query = gui.input(message=_.SEARCH, default='').strip() if not query: return for x in reversed(list(range(2, 10))): settings.set(key='_search' + unicode(x), value=settings.get(key='_search' + unicode(x - 1))) settings.set(key='_search1', value=query) folder = plugin.Folder(title=_(_.SEARCH_FOR, query=query)) data = load_file(file='list_replay.json', isJSON=True) processed = process_replaytv_search(data=data, start=0, search=query) items += processed['items'] items[:] = sorted(items, key=_sort_replay_items, reverse=True) items = items[:25] folder.add_items(items) return folder
def update_settings(): settingsJSON = load_file(file='settings.json', isJSON=True) try: license_url = '{base_url}/{country_code}/{language_code}'.format(base_url=settingsJSON['settings']['urls']['base'], country_code=settingsJSON['settings']['countryCode'], language_code=settingsJSON['settings']['languageCode']) settings.set(key='_search_url', value=settingsJSON['settings']['routes']['search']) settings.set(key='_session_url', value=settingsJSON['settings']['routes']['session']) settings.set(key='_token_url', value=settingsJSON['settings']['routes']['refreshToken']) settings.set(key='_channels_url', value=settingsJSON['settings']['routes']['channels']) settings.set(key='_token_url', value='{license_url}/web/license/token'.format(license_url=license_url)) settings.set(key='_widevine_url', value='{license_url}/web/license/eme'.format(license_url=license_url)) settings.set(key='_listings_url', value=settingsJSON['settings']['routes']['listings']) settings.set(key='_mediaitems_url', value=settingsJSON['settings']['routes']['mediaitems']) settings.set(key='_mediagroupsfeeds_url', value=settingsJSON['settings']['routes']['mediagroupsfeeds']) settings.set(key='_watchlist_url', value=settingsJSON['settings']['routes']['watchlist']) except: pass try: client_id = settingsJSON['client_id'] except: client_id = CONST_DEFAULT_CLIENTID settings.set(key='_client_id', value=client_id)
def get_replay_channels(): channels = [] rows = load_file(file='channels.json', isJSON=True) if rows: for row in rows: channeldata = api.get_channel_data(rows=rows, row=row) channels.append({ 'label': channeldata['label'], 'channel': channeldata['channel_id'], 'chno': channeldata['channel_number'], 'description': channeldata['description'], 'image': channeldata['station_image_large'], 'path': plugin.url_for(func_or_url=replaytv_by_day, image=channeldata['station_image_large'], description=channeldata['description'], label=channeldata['label'], station=channeldata['channel_id']), 'playable': False, }) channels[:] = sorted(channels, key=_sort_live) return channels
def replaytv_list(character, label='', start=0, **kwargs): start = int(start) folder = plugin.Folder(title=label) data = load_file(file='list_replay.json', isJSON=True) if not data: gui.ok(message=_.NO_REPLAY_TV_INFO, heading=_.NO_REPLAY_TV_INFO) return folder if not check_key(data, character): return folder processed = process_replaytv_list(data=data[character], start=start) if check_key(processed, 'items'): folder.add_items(processed['items']) if check_key(processed, 'count') and len(data[character]) > processed['count']: folder.add_item( label=_(_.NEXT_PAGE, _bold=True), path=plugin.url_for(func_or_url=replaytv_list, character=character, label=label, start=processed['count']), ) return folder
def replaytv_content(label, day, station='', start=0, **kwargs): day = int(day) start = int(start) folder = plugin.Folder(title=label) data = load_file(file=station + "_replay.json", isJSON=True) if not data: gui.ok(_.DISABLE_ONLY_STANDARD, _.NO_REPLAY_TV_INFO) return folder totalrows = len(data) processed = process_replaytv_content(data=data, day=day, start=start) if check_key(processed, 'items'): folder.add_items(processed['items']) if check_key(processed, 'count') and totalrows > processed['count']: folder.add_item( label=_(_.NEXT_PAGE, _bold=True), path=plugin.url_for(func_or_url=replaytv_content, label=label, day=day, station=station, start=processed['count']), ) return folder
def update_api_url(): settingsJSON = load_file(file='settings.json', isJSON=True) try: settings.set(key='_api_url', value=settingsJSON['api_url']) except: settings.set(key='_api_url', value=CONST_DEFAULT_API)
def update_img_size(): settingsJSON = load_file(file='settings.json', isJSON=True) try: settings.set(key='_img_size', value=settingsJSON['img_size']) except: settings.set(key='_img_size', value=CONST_DEFAULT_IMG_SIZE)
def update_settings(): settingsJSON = load_file(file='settings.json', isJSON=True) try: base = settingsJSON['settings']['urls']['base'] if settings.getBool(key='_base_v3'): basethree = settingsJSON['settings']['urls']['alternativeAjaxBase'] else: basethree = base complete_base_url = '{base_url}/{country_code}/{language_code}'.format(base_url=basethree, country_code=settingsJSON['settings']['countryCode'], language_code=settingsJSON['settings']['languageCode']) settings.set(key='_base_url', value=complete_base_url + '/web') settings.set(key='_search_url', value=settingsJSON['settings']['routes']['search'].replace(base, basethree)) settings.set(key='_session_url', value=settingsJSON['settings']['routes']['session'].replace(base, basethree)) #settings.set(key='_token_url', value=settingsJSON['settings']['routes']['refreshToken'].replace(base, basethree)) settings.set(key='_channels_url', value=settingsJSON['settings']['routes']['channels'].replace(base, basethree)) settings.set(key='_token_url', value='{complete_base_url}/web/license/token'.format(complete_base_url=complete_base_url)) settings.set(key='_widevine_url', value='{complete_base_url}/web/license/eme'.format(complete_base_url=complete_base_url)) settings.set(key='_listings_url', value=settingsJSON['settings']['routes']['listings'].replace(base, basethree)) settings.set(key='_mediaitems_url', value=settingsJSON['settings']['routes']['mediaitems'].replace(base, basethree)) settings.set(key='_mediagroupsfeeds_url', value=settingsJSON['settings']['routes']['mediagroupsfeeds'].replace(base, basethree)) settings.set(key='_watchlist_url', value=settingsJSON['settings']['routes']['watchlist'].replace(base, basethree)) except: pass try: client_id = settingsJSON['client_id'] except: client_id = CONST_DEFAULT_CLIENTID settings.set(key='_client_id', value=client_id)
def vod_seasons(self, id): if self._debug_mode: log.debug('Executing: api.vod_seasons') log.debug('Vars: id={id}'.format(id=id)) seasons = [] program_url = '{api_url}/CONTENT/DETAIL/GROUP_OF_BUNDLES/{id}'.format( api_url=self._api_url, id=id) file = "cache" + os.sep + "vod_seasons_" + unicode(id) + ".json" if self._enable_cache and not is_file_older_than_x_minutes( file=ADDON_PROFILE + file, minutes=10): data = load_file(file=file, isJSON=True) else: data = self.download(url=program_url, type='get', code=[200], data=None, json_data=False, data_return=True, return_json=True, retry=True, check_data=True) if data and check_key(data['resultObj'], 'containers') and self._enable_cache: write_file(file=file, data=data, isJSON=True) if not data or not check_key(data['resultObj'], 'containers'): if self._debug_mode: log.debug('Failure to retrieve expected data') log.debug('Execution Done: api.vod_seasons') return None for row in data['resultObj']['containers']: for currow in row['containers']: if check_key(currow, 'metadata') and check_key( currow['metadata'], 'season' ) and currow['metadata']['contentSubtype'] == 'SEASON': seasons.append({ 'id': currow['metadata']['contentId'], 'seriesNumber': currow['metadata']['season'], 'desc': currow['metadata']['shortDescription'], 'image': currow['metadata']['pictureUrl'] }) if self._debug_mode: log.debug('Execution Done: api.vod_seasons') return seasons
def vod_seasons(self, id): profile_settings = load_profile(profile_id=1) seasons = [] program_url = '{api_url}/CONTENT/DETAIL/GROUP_OF_BUNDLES/{id}'.format( api_url=profile_settings['api_url'], id=id) file = "cache" + os.sep + "vod_seasons_" + unicode(id) + ".json" if settings.getBool( key='enable_cache') and not is_file_older_than_x_minutes( file=ADDON_PROFILE + file, minutes=10): data = load_file(file=file, isJSON=True) else: if not self.get_session(): return None download = self.download(url=program_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] resp = download['resp'] if resp and resp.status_code == 200 and data and check_key( data, 'resultCode') and data['resultCode'] == 'OK' and check_key( data, 'resultObj') and check_key( data['resultObj'], 'containers') and settings.getBool( key='enable_cache'): write_file(file=file, data=data, isJSON=True) if not data or not check_key(data['resultObj'], 'containers'): return None for row in data['resultObj']['containers']: for currow in row['containers']: if check_key(currow, 'metadata') and check_key( currow['metadata'], 'season' ) and currow['metadata']['contentSubtype'] == 'SEASON': seasons.append({ 'id': currow['metadata']['contentId'], 'seriesNumber': currow['metadata']['season'], 'desc': currow['metadata']['shortDescription'], 'image': currow['metadata']['pictureUrl'] }) return seasons
def get_live_channels(addon=False): global backend, query_channel channels = [] rows = load_file(file='channels.json', isJSON=True) if rows: if addon == True: query_addons = json.loads( xbmc.executeJSONRPC( '{"jsonrpc": "2.0", "id": 1, "method": "Addons.GetAddons", "params": {"type": "xbmc.pvrclient"}}' )) addons = query_addons['result']['addons'] backend = addons[0]['addonid'] query_channel = json.loads( xbmc.executeJSONRPC( '{"jsonrpc": "2.0", "method": "PVR.GetChannels", "params": {"channelgroupid": "alltv", "properties" :["uniqueid"]},"id": 1}' )) for row in rows: channeldata = api.get_channel_data(rows=rows, row=row) path = plugin.url_for(func_or_url=play_video, type='channel', channel=channeldata['channel_id'], id=None, _is_live=True) playable = True if addon == True and 'result' in query_channel: if 'channels' in query_channel['result']: pvrchannels = query_channel['result']['channels'] for channel in pvrchannels: if channel['label'] == channeldata['label']: channel_uid = channel['uniqueid'] path = plugin.url_for(func_or_url=switchChannel, channel_uid=channel_uid) playable = False break channels.append({ 'label': channeldata['label'], 'channel': channeldata['channel_id'], 'chno': channeldata['channel_number'], 'description': channeldata['description'], 'image': channeldata['station_image_large'], 'path': path, 'playable': playable, }) channels[:] = sorted(channels, key=_sort_live) return channels
def vod(file, label, kids=0, start=0, **kwargs): kids = int(kids) start = int(start) folder = plugin.Folder(title=label) data = load_file(file='vod.json', isJSON=True)[file] if not data: return folder processed = process_vod_content(data=data, start=start, series=kids, type=label) if check_key(processed, 'items'): folder.add_items(processed['items']) if check_key(processed, 'count') and len(data) > processed['count']: folder.add_item( label = _(_.NEXT_PAGE, _bold=True), path = plugin.url_for(func_or_url=vod, file=file, label=label, kids=kids, start=processed['count']), ) return folder
def update_settings(): profile_settings = load_profile(profile_id=1) settingsJSON = load_file(file='settings.json', isJSON=True) try: api_url = settingsJSON['api_url'] if len(api_url) == 0: api_url = CONST_DEFAULT_API except: api_url = CONST_DEFAULT_API try: img_size = settingsJSON['img_size'] if len(img_size) == 0: img_size = CONST_DEFAULT_IMG_SIZE except: img_size = CONST_DEFAULT_IMG_SIZE user_agent = profile_settings['user_agent'] browser_name = uaparser.detect(user_agent)['browser']['name'] browser_version = uaparser.detect(user_agent)['browser']['version'] os_name = uaparser.detect(user_agent)['os']['name'] os_version = uaparser.detect(user_agent)['os']['version'] query = "UPDATE `vars` SET `api_url`='{api_url}', `img_size`='{img_size}', `browser_name`='{browser_name}', `browser_version`='{browser_version}', `os_name`='{os_name}', `os_version`='{os_version}' WHERE profile_id={profile_id}".format( api_url=api_url, img_size=img_size, browser_name=browser_name, browser_version=browser_version, os_name=os_name, os_version=os_version, profile_id=1) query_settings(query=query, return_result=False, return_insert=False, commit=True)
def test_channels(self, tested=False, channel=None): if self._debug_mode: log.debug('Executing: api.test_channels') log.debug('Vars: tested={tested}, channel={channel}'.format( tested=tested, channel=channel)) if channel: channel = unicode(channel) try: if not self._last_login_success or not settings.getBool( key='run_tests'): return 5 settings.setBool(key='_test_running', value=True) channels = load_file(file="channels.json", isJSON=True) results = load_file(file="channel_test.json", isJSON=True) count = 0 first = True last_tested_found = False test_run = False user_agent = settings.get(key='_user_agent') if not results: results = {} for row in channels: if count == 5 or (count == 1 and tested): if test_run: self.update_prefs() settings.setBool(key='_test_running', value=False) return count channeldata = self.get_channel_data(row=row) id = unicode(channeldata['channel_id']) if len(id) > 0: if channel: if not id == channel: continue elif tested and check_key(results, 'last_tested'): if unicode(results['last_tested']) == id: last_tested_found = True continue elif last_tested_found: pass else: continue if check_key(results, id) and not tested and not first: continue livebandwidth = 0 replaybandwidth = 0 live = 'false' replay = 'false' epg = 'false' guide = 'false' if settings.getInt(key='_last_playing') > int(time.time() - 300): if test_run: self.update_prefs() settings.setBool(key='_test_running', value=False) return 5 playdata = self.play_url(type='channel', channel=id, id=channeldata['asset_id'], test=True) if first and not self._last_login_success: if test_run: self.update_prefs() settings.setBool(key='_test_running', value=False) return 5 if len(playdata['path']) > 0: CDMHEADERS = CONST_BASE_HEADERS CDMHEADERS['User-Agent'] = user_agent playdata['path'] = playdata['path'].split("&", 1)[0] self._session2 = Session(headers=CDMHEADERS) resp = self._session2.get(playdata['path']) if resp.status_code == 200: livebandwidth = find_highest_bandwidth( xml=resp.text) live = 'true' if check_key(results, id) and first and not tested: first = False if live == 'true': continue else: if test_run: self.update_prefs() settings.setBool(key='_test_running', value=False) return 5 first = False counter = 0 while not self._abortRequested and not xbmc.Monitor( ).abortRequested() and counter < 5: if self._abortRequested or xbmc.Monitor().waitForAbort( 1): self._abortRequested = True break counter += 1 if settings.getInt( key='_last_playing') > int(time.time() - 300): if test_run: self.update_prefs() settings.setBool(key='_test_running', value=False) return 5 if self._abortRequested or xbmc.Monitor().abortRequested(): return 5 program_url = '{api_url}/TRAY/AVA/TRENDING/YESTERDAY?maxResults=1&filter_channelIds={channel}'.format( api_url=self._api_url, channel=channeldata['channel_id']) data = self.download(url=program_url, type='get', code=[200], data=None, json_data=False, data_return=True, return_json=True, retry=False, check_data=True) if data and check_key( data['resultObj'], 'containers') and check_key( data['resultObj']['containers'][0], 'id'): if settings.getInt( key='_last_playing') > int(time.time() - 300): if test_run: self.update_prefs() settings.setBool(key='_test_running', value=False) return 5 playdata = self.play_url( type='program', channel=id, id=data['resultObj']['containers'][0]['id'], test=True) if len(playdata['path']) > 0: CDMHEADERS = CONST_BASE_HEADERS CDMHEADERS['User-Agent'] = user_agent playdata['path'] = playdata['path'].split( "&min_bitrate", 1)[0] self._session2 = Session(headers=CDMHEADERS) resp = self._session2.get(playdata['path']) if resp.status_code == 200: replaybandwidth = find_highest_bandwidth( xml=resp.text) replay = 'true' if os.path.isfile(ADDON_PROFILE + id + '_replay.json'): guide = 'true' if live == 'true': epg = 'true' results[id] = { 'id': id, 'live': live, 'replay': replay, 'livebandwidth': livebandwidth, 'replaybandwidth': replaybandwidth, 'epg': epg, 'guide': guide, } results['last_tested'] = id if not self._abortRequested: write_file(file="channel_test.json", data=results, isJSON=True) test_run = True counter = 0 while not self._abortRequested and not xbmc.Monitor( ).abortRequested() and counter < 15: if self._abortRequested or xbmc.Monitor().waitForAbort( 1): self._abortRequested = True break counter += 1 if settings.getInt( key='_last_playing') > int(time.time() - 300): if test_run: self.update_prefs() settings.setBool(key='_test_running', value=False) return 5 if self._abortRequested or xbmc.Monitor().abortRequested(): return 5 count += 1 except: if test_run: self.update_prefs() count = 5 settings.setBool(key='_test_running', value=False) if self._debug_mode: log.debug('Execution Done: api.test_channels') return count
def vod_season(self, id): if self._debug_mode: log.debug('Executing: api.vod_season') log.debug('Vars: id={id}'.format(id=id)) season = [] episodes = [] program_url = '{api_url}/CONTENT/DETAIL/BUNDLE/{id}'.format( api_url=self._api_url, id=id) file = "cache" + os.sep + "vod_season_" + unicode(id) + ".json" if self._enable_cache and not is_file_older_than_x_minutes( file=ADDON_PROFILE + file, minutes=10): data = load_file(file=file, isJSON=True) else: data = self.download(url=program_url, type='get', code=[200], data=None, json_data=False, data_return=True, return_json=True, retry=True, check_data=True) if data and check_key(data['resultObj'], 'containers') and self._enable_cache: write_file(file=file, data=data, isJSON=True) if not data or not check_key(data['resultObj'], 'containers'): if self._debug_mode: log.debug('Failure to retrieve expected data') log.debug('Execution Done: api.vod_season') return None for row in data['resultObj']['containers']: for currow in row['containers']: if check_key(currow, 'metadata') and check_key( currow['metadata'], 'season') and currow['metadata'][ 'contentSubtype'] == 'EPISODE' and not currow[ 'metadata']['episodeNumber'] in episodes: asset_id = '' for asset in currow['assets']: if check_key( asset, 'videoType' ) and asset['videoType'] == 'SD_DASH_PR' and check_key( asset, 'assetType' ) and asset['assetType'] == 'MASTER': asset_id = asset['assetId'] break episodes.append(currow['metadata']['episodeNumber']) season.append({ 'id': currow['metadata']['contentId'], 'assetid': asset_id, 'duration': currow['metadata']['duration'], 'title': currow['metadata']['episodeTitle'], 'episodeNumber': '{season}.{episode}'.format( season=currow['metadata']['season'], episode=currow['metadata']['episodeNumber']), 'desc': currow['metadata']['shortDescription'], 'image': currow['metadata']['pictureUrl'] }) if self._debug_mode: log.debug('Execution Done: api.vod_season') return season
def process_vod_content(data, start=0, search=None, type=None): subscription = load_file(file='vod_subscription.json', isJSON=True) start = int(start) items = [] count = 0 item_count = 0 if sys.version_info >= (3, 0): subscription = list(subscription) for row in data: currow = row if item_count == 50: break if count < start: count += 1 continue count += 1 if not check_key(currow, 'id') or not check_key(currow, 'title'): continue id = currow['id'] label = currow['title'] if not int(id) in subscription: continue if search: fuzz_set = fuzz.token_set_ratio(label, search) fuzz_partial = fuzz.partial_ratio(label, search) fuzz_sort = fuzz.token_sort_ratio(label, search) if (fuzz_set + fuzz_partial + fuzz_sort) > 160: properties = { "fuzz_set": fuzz.token_set_ratio(label, search), "fuzz_sort": fuzz.token_sort_ratio(label, search), "fuzz_partial": fuzz.partial_ratio(label, search), "fuzz_total": fuzz.token_set_ratio(label, search) + fuzz.partial_ratio(label, search) + fuzz.token_sort_ratio(label, search) } label = label + " (" + type + ")" else: continue description = '' program_image_large = '' duration = 0 properties = [] if check_key(currow, 'desc'): description = currow['desc'] if check_key(currow, 'duration'): duration = int(currow['duration']) if check_key(currow, 'image'): program_image_large = currow['image'] if not check_key(currow, 'type'): continue if currow['type'] == "show": path = plugin.url_for(func_or_url=vod_series, label=label, description=description, image=program_image_large, id=id) info = {'plot': description} playable = False else: path = plugin.url_for(func_or_url=play_video, type='vod', id=id, duration=duration, _is_live=False) info = { 'plot': description, 'duration': duration, 'mediatype': 'video' } playable = True items.append( plugin.Item( label=label, properties=properties, info=info, art={'thumb': program_image_large}, path=path, playable=playable, )) item_count += 1 return {'items': items, 'count': count}
def process_vod_content(data, start=0, search=None, type=None): profile_settings = load_profile(profile_id=1) subscription = load_file(file='vod_subscription.json', isJSON=True) start = int(start) items = [] count = start item_count = 0 if subscription and sys.version_info >= (3, 0): subscription = list(subscription) query = "SELECT * FROM `{table}` ORDER BY title ASC LIMIT 999999 OFFSET {start}".format(table=data, start=start) data = query_epg(query=query, return_result=True, return_insert=False, commit=False) if not data: return {'items': items, 'count': item_count, 'count2': count, 'total': 0} for row in data: if item_count == 50: break count += 1 id = row['id'] label = row['title'] if subscription and not int(id) in subscription: continue if search: fuzz_set = fuzz.token_set_ratio(label,search) fuzz_partial = fuzz.partial_ratio(label,search) fuzz_sort = fuzz.token_sort_ratio(label,search) if (fuzz_set + fuzz_partial + fuzz_sort) > 160: properties = {"fuzz_set": fuzz.token_set_ratio(label,search), "fuzz_sort": fuzz.token_sort_ratio(label,search), "fuzz_partial": fuzz.partial_ratio(label,search), "fuzz_total": fuzz.token_set_ratio(label,search) + fuzz.partial_ratio(label,search) + fuzz.token_sort_ratio(label,search)} label = label + " (" + type + ")" else: continue item_count += 1 properties = [] description = row['description'] duration = 0 if row['duration'] and len(unicode(row['duration'])) > 0: duration = int(row['duration']) program_image = row['icon'] program_image_large = row['icon'] if row['type'] == "show": path = plugin.url_for(func_or_url=vod_series, label=label, description=description, image=program_image_large, id=id) info = {'plot': description} playable = False else: path = plugin.url_for(func_or_url=play_video, type='vod', channel=None, id=id) info = {'plot': description, 'duration': duration, 'mediatype': 'video'} playable = True items.append(plugin.Item( label = label, properties = properties, info = info, art = { 'thumb': program_image, 'fanart': program_image_large }, path = path, playable = playable, )) if item_count == 50: total = int(len(data) + count) else: total = count returnar = {'items': items, 'count': item_count, 'count2': count, 'total': total} return returnar
def process_watchlist_listing(data, id=None): items = [] channeldata = {} stations = load_file(file='channels.json', isJSON=True) if stations: for row in stations: channeldata[row['stationSchedules'][0]['station']['id']] = row['stationSchedules'][0]['station']['title'] for row in data['listings']: context = [] if not check_key(row, 'program'): continue currow = row['program'] if not check_key(currow, 'title') or not check_key(row, 'id'): continue duration = 0 if check_key(row, 'endTime') and check_key(row, 'startTime'): startsplit = int(row['startTime']) // 1000 endsplit = int(row['endTime']) // 1000 duration = endsplit - startsplit startT = datetime.datetime.fromtimestamp(startsplit) startT = convert_datetime_timezone(startT, "UTC", "UTC") endT = datetime.datetime.fromtimestamp(endsplit) endT = convert_datetime_timezone(endT, "UTC", "UTC") if endT < (datetime.datetime.now(pytz.timezone("UTC")) - datetime.timedelta(days=7)): continue if xbmc.getLanguage(xbmc.ISO_639_1) == 'nl': label = '{weekday} {day} {month} {yearhourminute} '.format(weekday=date_to_nl_dag(startT), day=startT.strftime("%d"), month=date_to_nl_maand(startT), yearhourminute=startT.strftime("%Y %H:%M")) else: label = startT.strftime("%A %d %B %Y %H:%M ").capitalize() label += currow['title'] else: label = currow['title'] if check_key(channeldata, row['stationId']): label += ' ({station})'.format(station=channeldata[row['stationId']]) if id: context.append((_.ADD_TO_WATCHLIST, 'RunPlugin({context_url})'.format(context_url=plugin.url_for(func_or_url=add_to_watchlist, id=id, type="group")), )) description = '' image = '' if check_key(currow, 'description'): description = currow['description'] if check_key(currow, 'duration'): duration = int(currow['duration']) if check_key(currow, 'images'): image = get_image("boxart", currow['images']) items.append(plugin.Item( label = label, info = { 'plot': description, 'duration': duration, 'mediatype': 'video', }, art = {'thumb': image}, path = plugin.url_for(func_or_url=play_video, type="program", id=row['id'], duration=duration, _is_live=False), playable = True, context = context )) return items
def vod_season(self, id): profile_settings = load_profile(profile_id=1) season = [] episodes = [] program_url = '{api_url}/CONTENT/DETAIL/BUNDLE/{id}'.format( api_url=profile_settings['api_url'], id=id) file = "cache" + os.sep + "vod_season_" + unicode(id) + ".json" if settings.getBool( key='enable_cache') and not is_file_older_than_x_minutes( file=ADDON_PROFILE + file, minutes=10): data = load_file(file=file, isJSON=True) else: if not self.get_session(): return None download = self.download(url=program_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] resp = download['resp'] if resp and resp.status_code == 200 and data and check_key( data, 'resultCode') and data['resultCode'] == 'OK' and check_key( data, 'resultObj') and check_key( data['resultObj'], 'containers') and settings.getBool( key='enable_cache'): write_file(file=file, data=data, isJSON=True) if not data or not check_key(data['resultObj'], 'containers'): return None for row in data['resultObj']['containers']: for currow in row['containers']: if check_key(currow, 'metadata') and check_key( currow['metadata'], 'season') and currow['metadata'][ 'contentSubtype'] == 'EPISODE' and not currow[ 'metadata']['episodeNumber'] in episodes: asset_id = '' for asset in currow['assets']: if check_key( asset, 'videoType' ) and asset['videoType'] == 'SD_DASH_PR' and check_key( asset, 'assetType' ) and asset['assetType'] == 'MASTER': asset_id = asset['assetId'] break episodes.append(currow['metadata']['episodeNumber']) season.append({ 'id': currow['metadata']['contentId'], 'assetid': asset_id, 'duration': currow['metadata']['duration'], 'title': currow['metadata']['episodeTitle'], 'episodeNumber': '{season}.{episode}'.format( season=currow['metadata']['season'], episode=currow['metadata']['episodeNumber']), 'desc': currow['metadata']['shortDescription'], 'image': currow['metadata']['pictureUrl'] }) return season
def test_channels(self, tested=False, channel=None): if self._debug_mode: log.debug('Executing: api.test_channels') log.debug('Vars: tested={tested}, channel={channel}'.format(tested=tested, channel=channel)) if channel: channel = unicode(channel) try: if not self._last_login_success or not settings.getBool(key='run_tests'): return 5 settings.setBool(key='_test_running', value=True) channels = load_file(file="channels.json", isJSON=True) results = load_file(file="channel_test.json", isJSON=True) count = 0 first = True last_tested_found = False test_run = False user_agent = settings.get(key='_user_agent') if not results: results = {} for row in channels: if count == 5 or (count == 1 and tested): if test_run: self.update_prefs() settings.setBool(key='_test_running', value=False) return count channeldata = self.get_channel_data(row=row, channelno=1) id = unicode(channeldata['channel_id']) if len(id) > 0: if channel: if not id == channel: continue elif tested and check_key(results, 'last_tested'): if unicode(results['last_tested']) == id: last_tested_found = True continue elif last_tested_found: pass else: continue if check_key(results, id) and not tested and not first: continue livebandwidth = 0 replaybandwidth = 0 live = 'false' replay = 'false' epg = 'false' guide = 'false' if settings.getInt(key='_last_playing') > int(time.time() - 300): if test_run: self.update_prefs() settings.setBool(key='_test_running', value=False) return 5 playdata = self.play_url(type='channel', channel=id, id=id, test=True) if first and not self._last_login_success: if test_run: self.update_prefs() settings.setBool(key='_test_running', value=False) return 5 if len(playdata['path']) > 0: CDMHEADERS = CONST_BASE_HEADERS CDMHEADERS['User-Agent'] = user_agent self._session2 = Session(headers=CDMHEADERS) resp = self._session2.get(playdata['path']) if resp.status_code == 200: livebandwidth = find_highest_bandwidth(xml=resp.text) live = 'true' if check_key(results, id) and first and not tested: first = False if live == 'true': continue else: if test_run: self.update_prefs() settings.setBool(key='_test_running', value=False) return 5 first = False counter = 0 while not self._abortRequested and not xbmc.Monitor().abortRequested() and counter < 5: if self._abortRequested or xbmc.Monitor().waitForAbort(1): self._abortRequested = True break counter += 1 if settings.getInt(key='_last_playing') > int(time.time() - 300): if test_run: self.update_prefs() settings.setBool(key='_test_running', value=False) return 5 if self._abortRequested or xbmc.Monitor().abortRequested(): return 5 self._session.headers = CONST_BASE_HEADERS self._session.headers.update({'Authorization': 'Bearer ' + self._session_token}) yesterday = datetime.datetime.now() - datetime.timedelta(1) fromtime = datetime.datetime.strftime(yesterday, '%Y-%m-%dT%H:%M:%S.000Z') tilltime = datetime.datetime.strftime(yesterday, '%Y-%m-%dT%H:%M:59.999Z') program_url = "{api_url}/schedule?channels={id}&from={fromtime}&until={tilltime}".format(api_url=CONST_DEFAULT_API, id=id, fromtime=fromtime, tilltime=tilltime); data = self.download(url=program_url, type="get", code=[200], data=None, json_data=False, data_return=True, return_json=True, retry=True, check_data=False, allow_redirects=False) if data and check_key(data, 'epg') and check_key(data['epg'][0], 'id'): if settings.getInt(key='_last_playing') > int(time.time() - 300): if test_run: self.update_prefs() settings.setBool(key='_test_running', value=False) return 5 playdata = self.play_url(type='program', channel=id, id=data['epg'][0]['id'], test=True) if len(playdata['path']) > 0: CDMHEADERS = CONST_BASE_HEADERS CDMHEADERS['User-Agent'] = user_agent self._session2 = Session(headers=CDMHEADERS) resp = self._session2.get(playdata['path']) if resp.status_code == 200: replaybandwidth = find_highest_bandwidth(xml=resp.text) replay = 'true' if os.path.isfile(ADDON_PROFILE + id + '_replay.json'): guide = 'true' if live == 'true': epg = 'true' results[id] = { 'id': id, 'live': live, 'replay': replay, 'livebandwidth': livebandwidth, 'replaybandwidth': replaybandwidth, 'epg': epg, 'guide': guide, } results['last_tested'] = id if not self._abortRequested: write_file(file="channel_test.json", data=results, isJSON=True) test_run = True counter = 0 while not self._abortRequested and not xbmc.Monitor().abortRequested() and counter < 15: if self._abortRequested or xbmc.Monitor().waitForAbort(1): self._abortRequested = True break counter += 1 if settings.getInt(key='_last_playing') > int(time.time() - 300): if test_run: self.update_prefs() settings.setBool(key='_test_running', value=False) return 5 if self._abortRequested or xbmc.Monitor().abortRequested(): return 5 count += 1 except: if test_run: self.update_prefs() count = 5 settings.setBool(key='_test_running', value=False) if self._debug_mode: log.debug('Execution Done: api.test_channels') return count
def vod_season(self, id): season = [] file = "cache" + os.sep + "vod_season_" + unicode(id) + ".json" if settings.getBool( key='enable_cache') and not is_file_older_than_x_minutes( file=ADDON_PROFILE + file, minutes=10): data = load_file(file=file, isJSON=True) else: profile_settings = load_profile(profile_id=1) headers = CONST_BASE_HEADERS headers.update({'Content-Type': 'application/json'}) headers.update({'X_CSRFToken': profile_settings['csrf_token']}) session_post_data = { 'VODID': unicode(id), 'offset': '0', 'count': '35', } seasons_url = '{base_url}/VSP/V3/QueryEpisodeList?from=throughMSAAccess'.format( base_url=CONST_BASE_URL) download = self.download(url=seasons_url, type='post', headers=headers, data=session_post_data, json_data=True, return_json=True) data = download['data'] resp = download['resp'] if resp and resp.status_code == 200 and data and check_key( data, 'result') and check_key( data['result'], 'retCode') and data['result'][ 'retCode'] == '000000000' and check_key( data, 'episodes') and settings.getBool( key='enable_cache'): write_file(file=file, data=data, isJSON=True) if not data or not check_key(data, 'episodes'): return None for row in data['episodes']: if check_key( row, 'VOD') and check_key(row['VOD'], 'ID') and check_key( row['VOD'], 'name') and check_key(row, 'sitcomNO'): image = '' duration = 0 if not check_key(row['VOD'], 'mediaFiles') or not check_key( row['VOD']['mediaFiles'][0], 'ID'): continue if check_key(row['VOD']['mediaFiles'][0], 'elapseTime'): duration = row['VOD']['mediaFiles'][0]['elapseTime'] if check_key(row['VOD'], 'picture') and check_key( row['VOD']['picture'], 'posters'): image = row['VOD']['picture']['posters'][0] season.append({ 'id': row['VOD']['ID'], 'media_id': row['VOD']['mediaFiles'][0]['ID'], 'duration': duration, 'title': row['VOD']['name'], 'episodeNumber': row['sitcomNO'], 'desc': '', 'image': image }) return season
def process_online_search(data): items_vod = [] items_program = [] vod_links = {} if settings.getBool('showMoviesSeries') == True: vod_data = load_file(file='vod.json', isJSON=True) for vod_type in list(vod_data): for row in vod_data[vod_type]: if not check_key(row, 'id'): continue vod_links[row['id']] = {} if check_key(row, 'seasons'): vod_links[row['id']]['seasons'] = row['seasons'] if check_key(row, 'duration'): vod_links[row['id']]['duration'] = row['duration'] if check_key(row, 'desc'): vod_links[row['id']]['desc'] = row['desc'] for currow in list(data): if currow == "moviesAndSeries": if settings.getBool('showMoviesSeries') != True: continue type = 'vod' else: type = 'program' for row in data[currow]['entries']: context = [] if not check_key(row, 'id') or not check_key(row, 'title'): continue id = row['id'] label = row['title'] mediatype = '' description = '' duration = 0 program_image_large = '' if check_key(row, 'images'): get_image("boxart", row['images']) playable = False path = '' if check_key(vod_links, row['id']) and check_key(vod_links[row['id']], 'desc'): description = vod_links[row['id']]['desc'] if type == 'vod': label += " (Movies and Series)" else: label += " (ReplayTV)" if check_key(row, 'groupType') and row['groupType'] == 'show': if check_key(row, 'episodeMatch') and check_key(row['episodeMatch'], 'seriesEpisodeNumber') and check_key(row['episodeMatch'], 'secondaryTitle'): if len(description) == 0: description += label season = '' if check_key(row, 'seriesNumber'): season = "S" + row['seriesNumber'] description += " Episode Match: {season}E{episode} - {secondary}".format(season=season, episode=row['episodeMatch']['seriesEpisodeNumber'], secondary=row['episodeMatch']['secondaryTitle']) if type == 'vod': if not check_key(vod_links, row['id']) or not check_key(vod_links[row['id']], 'seasons'): continue context.append((_.ADD_TO_WATCHLIST, 'RunPlugin({context_url})'.format(context_url=plugin.url_for(func_or_url=add_to_watchlist, id=id, type='group')), )) path = plugin.url_for(func_or_url=vod_series, label=label, description=description, image=program_image_large, seasons=json.dumps(vod_links[row['id']]['seasons']), mediagroupid=id) else: context.append((_.ADD_TO_WATCHLIST, 'RunPlugin({context_url})'.format(context_url=plugin.url_for(func_or_url=add_to_watchlist, id=id, type='group')), )) path = plugin.url_for(func_or_url=watchlist_listing, label=label, description=description, image=program_image_large, id=id, search=True) else: context.append((_.ADD_TO_WATCHLIST, 'RunPlugin({context_url})'.format(context_url=plugin.url_for(func_or_url=add_to_watchlist, id=id, type='group')), )) if check_key(row, 'duration'): duration = int(row['duration']) elif check_key(row, 'episodeMatch') and check_key(row['episodeMatch'], 'startTime') and check_key(row['episodeMatch'], 'endTime'): duration = int(int(row['episodeMatch']['endTime']) - int(row['episodeMatch']['startTime'])) // 1000 id = row['episodeMatch']['id'] elif check_key(vod_links, row['id']) and check_key(vod_links[row['id']], 'duration'): duration = vod_links[row['id']]['duration'] path = plugin.url_for(func_or_url=play_video, type=type, id=id, duration=duration, _is_live=False) playable = True mediatype = 'video' item = plugin.Item( label = label, info = { 'plot': description, 'duration': duration, 'mediatype': mediatype, }, art = {'thumb': program_image_large}, path = path, playable = playable, context = context ) if type == "vod": items_vod.append(item) else: items_program.append(item) num = min(len(items_program), len(items_vod)) items = [None]*(num*2) items[::2] = items_program[:num] items[1::2] = items_vod[:num] items.extend(items_program[num:]) items.extend(items_vod[num:]) return items
def install_widevine(reinstall=False): ia_addon = get_ia_addon(required=True) system, arch = _get_system_arch() kodi_version = get_kodi_version() if kodi_version < 18: raise InputStreamError(_(_.IA_KODI18_REQUIRED, system=system)) elif system == 'Android': return True elif system == 'UWP': raise InputStreamError(_.IA_UWP_ERROR) elif 'aarch64' in arch or 'arm64' in arch: raise InputStreamError(_.IA_AARCH64_ERROR) last_check = int(ia_addon.getSetting('_last_check') or 0) ver_slug = system + arch + str(kodi_version) + ia_addon.getAddonInfo( 'version') if ver_slug != ia_addon.getSetting(IA_VERSION_KEY): reinstall = True if not reinstall and time.time() - last_check < 86400: return True ia_addon.setSetting(IA_VERSION_KEY, '') ia_addon.setSetting('_last_check', str(int(time.time()))) r = load_file(file="settings.json", isJSON=True) widevine = r['widevine']['widevine'] wv_platform = widevine['platforms'].get(system + arch, None) if not wv_platform: raise InputStreamError( _(_.IA_NOT_SUPPORTED, system=system, arch=arch, kodi_version=kodi_version)) decryptpath = xbmc.translatePath(ia_addon.getSetting('DECRYPTERPATH')) if sys.version_info < (3, 0): decryptpath = decryptpath.decode("utf-8") if 'arm' in arch: url = wv_platform['zip'] else: url = widevine['base_url'] + wv_platform['zip'] wv_path = os.path.join(decryptpath, wv_platform['dst']) if not os.path.isdir(decryptpath): os.makedirs(decryptpath) if not os.path.isdir(ADDON_PROFILE + "tmp"): os.makedirs(ADDON_PROFILE + "tmp") if not _download(url, wv_platform['dst'], wv_path, arch, wv_platform['md5']): return False ia_addon.setSetting(IA_VERSION_KEY, ver_slug) if reinstall: gui.ok(_.IA_WV_INSTALL_OK) return True