def _restore_network_bandwidth(**kwargs): bandwidth = load_file('bandwidth', isJSON=False) if bandwidth: method = 'settings.SetSettingValue' json_rpc(method, { "setting": "network.bandwidth", "value": "{}".format(bandwidth) }) try: os.remove(os.path.join(ADDON_PROFILE, 'bandwidth')) except: pass bandwidth2 = load_file('bandwidth2', isJSON=False) if bandwidth2: try: xbmcaddon.Addon('inputstream.ffmpegdirect').setSetting( 'streamBandwidth', str(bandwidth2)) except: pass try: os.remove(os.path.join(ADDON_PROFILE, 'bandwidth2')) except: pass
def api_vod_subscription(): file = "cache" + os.sep + "vod_subscription.json" if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=1): load_file(file=file, isJSON=True) else: if not api_get_session(): return None subscription = [] series_url = '{api_url}/TRAY/SEARCH/VOD?from=1&to=9999&filter_contentType=GROUP_OF_BUNDLES,VOD&filter_contentSubtype=SERIES,VOD&filter_contentTypeExtended=VOD&filter_excludedGenres=erotiek&filter_technicalPackages=10078,10081,10258,10255&dfilter_packages=matchSubscription&orderBy=activationDate&sortOrder=desc'.format( api_url=CONST_DEFAULT_API) download = api_download(url=series_url, type='get', headers=None, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if not code or not code == 200 or not data or not check_key( data, 'resultCode' ) or not data['resultCode'] == 'OK' or not check_key( data, 'resultObj') or not check_key(data['resultObj'], 'containers'): return False for row in data['resultObj']['containers']: subscription.append(row['metadata']['contentId']) write_file(file=file, data=subscription, isJSON=True) return True
def change_group(id, type_tv_radio, **kwargs): if not id or len(str(id)) == 0: return False id = str(id) type_tv_radio = str(type_tv_radio) select_list = [] if type_tv_radio == 'radio': groups = load_file('radio_groups.json', ext=False, isJSON=True) typestr = 'Radio' else: groups = load_file('tv_groups.json', ext=False, isJSON=True) typestr = 'TV' select_list.append(typestr) for group in groups: select_list.append(group) selected = gui.select(_.SELECT_GROUP, select_list) if type_tv_radio == 'radio': prefs = load_radio_prefs(profile_id=1) else: prefs = load_prefs(profile_id=1) try: prefs[id]['group'] = select_list[selected] except: pass if type_tv_radio == 'radio': save_radio_prefs(profile_id=1, prefs=prefs) else: save_prefs(profile_id=1, prefs=prefs) method = 'GUI.ActivateWindow' json_rpc( method, { "window": "videos", "parameters": [ 'plugin://' + str(ADDON_ID) + '/?_=group_picker_menu&type_tv_radio=' + type_tv_radio ] })
def api_get_series_nfo(): type = 'seriesnfo' type = encode32(txt=type) vod_url = '{dut_epg_url}/{type}.zip'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = os.path.join("cache", "{type}.json".format(type=type)) tmp = os.path.join(ADDON_PROFILE, 'tmp', "{type}.zip".format(type=type)) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.45): data = load_file(file=file, isJSON=True) else: resp = Session().get(vod_url, stream=True) if resp.status_code != 200: resp.close() return None with open(tmp, 'wb') as f: for chunk in resp.iter_content(chunk_size=SESSION_CHUNKSIZE): f.write(chunk) resp.close() extract_zip(file=tmp, dest=os.path.join(ADDON_PROFILE, "cache", ""))
def api_get_genre_list(type, add=1): add = int(add) if not os.path.isdir(os.path.join(ADDON_PROFILE, 'tmp')): os.makedirs(os.path.join(ADDON_PROFILE, 'tmp')) if add == 1: type = type + 'genres' type = encode32(txt=type) genres_url = '{dut_epg_url}/{type}.json'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = os.path.join("cache", "{type}.json".format(type=type)) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5): data = load_file(file=file, isJSON=True) else: download = api_download(url=genres_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) else: return None return data
def api_get_epg_by_date_channel(date, channel): type = '{date}_{channel}'.format(date=date, channel=channel) if check_key(CONST_MOD_CACHE, str(type)): days = CONST_MOD_CACHE[str(type)] else: days = 0.5 type = encode32(txt=type) epg_url = '{dut_epg_url}/{type}.json'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = os.path.join("cache", "{type}.json".format(type=type)) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=days): data = load_file(file=file, isJSON=True) else: download = api_download(url=epg_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) else: return None return data
def api_vod_seasons(type, id): if not api_get_session(): return None seasons = [] program_url = '{api_url}/CONTENT/DETAIL/GROUP_OF_BUNDLES/{id}'.format(api_url=CONST_DEFAULT_API, id=id) type = "vod_seasons_" + unicode(id) encodedBytes = base64.b32encode(type.encode("utf-8")) type = unicode(encodedBytes, "utf-8") file = "cache" + os.sep + type + ".json" if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=0.5): data = load_file(file=file, isJSON=True) else: download = api_download(url=program_url, type='get', headers=None, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key(data, 'resultCode') and data['resultCode'] == 'OK' and check_key(data, 'resultObj') and check_key(data['resultObj'], 'containers'): write_file(file=file, data=data, isJSON=True) if not data or not check_key(data['resultObj'], 'containers'): return None for row in data['resultObj']['containers']: for currow in row['containers']: if check_key(currow, 'metadata') and check_key(currow['metadata'], 'season') and unicode(currow['metadata']['contentSubtype']) == 'SEASON': seasons.append({'id': unicode(currow['metadata']['contentId']), 'seriesNumber': unicode(currow['metadata']['season']), 'description': unicode(currow['metadata']['shortDescription']), 'image': "{image_url}/vod/{image}/1920x1080.jpg?blurred=false".format(image_url=CONST_IMAGE_URL, image=unicode(currow['metadata']['pictureUrl']))}) return {'type': 'seasons', 'seasons': seasons}
def api_vod_season(series, id, use_cache=True): type = "vod_season_{id}".format(id=id) type = encode32(type) file = os.path.join("cache", "{type}.json".format(type=type)) cache = 0 profile_settings = load_profile(profile_id=1) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5) and use_cache == True: data = load_file(file=file, isJSON=True) cache = 1 else: season_url = '{mediaitems_url}?byMediaType=Episode%7CFeatureFilm&byParentId={id}&includeAdult=true&range=1-1000&sort=seriesEpisodeNumber|ASC'.format( mediaitems_url=CONST_URLS['mediaitems_url'], id=id) download = api_download(url=season_url, type='get', headers=None, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] return {'data': data, 'cache': cache}
def __init__(self, headers=None, cookies_key=None, save_cookies=True, base_url='{}', timeout=None, attempts=None): super(Session, self).__init__() base_headers = CONST_BASE_HEADERS base_headers.update({'User-Agent': DEFAULT_USER_AGENT}) if headers: base_headers.update(headers) self._headers = base_headers or {} self._cookies_key = cookies_key self._save_cookies = save_cookies self._base_url = base_url self._timeout = timeout or (5, 10) self._attempts = attempts or 2 self.headers.update(self._headers) if self._cookies_key: cookies = load_file(file='stream_cookies', isJSON=True) if not cookies: cookies = {} self.cookies.update(cookies)
def api_vod_download(type, start=0): if not api_get_session(): return None if type == "movies": url = '{base_url}/v6/tabs/GenreFilms?count=52&expand=true&expandlist=true&maxResults=52&offset={start}'.format(base_url=CONST_API_URL, start=start) elif type == "watchahead": url = '{base_url}/v6/tabs/VooruitKijken2?count=52&expand=true&expandlist=true&maxResults=52&offset={start}'.format(base_url=CONST_API_URL, start=start) elif type == "seriesbinge": url = '{base_url}/v6/tabs/SeriesBingewatch?count=52&expand=true&expandlist=true&maxResults=52&offset={start}'.format(base_url=CONST_API_URL, start=start) elif type == "mostviewed": url = '{base_url}/v6/tabs/MostViewed?count=52&expand=true&expandlist=true&maxResults=52&offset={start}'.format(base_url=CONST_API_URL, start=start) elif type == "tipfeed": url = '{base_url}/v6/tabs/Tipfeed?count=52&expand=true&expandlist=true&maxResults=52&offset={start}'.format(base_url=CONST_API_URL, start=start) else: return None file = "cache" + os.sep + "vod_" + type + "_" + unicode(start) + ".json" if settings.getBool(key='enable_cache') and not is_file_older_than_x_minutes(file=ADDON_PROFILE + file, minutes=10): data = load_file(file=file, isJSON=True) else: download = api_download(url=url, type='get', headers=None, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and settings.getBool(key='enable_cache'): write_file(file=file, data=data, isJSON=True) if not data or not check_key(data, 'Items'): return None return api_process_vod(data=data)
def api_vod_seasons(type, id, use_cache=True): type = "vod_seasons_{id}".format(id=id) type = encode32(type) file = os.path.join("cache", "{type}.json".format(type=type)) id = id[1:] cache = 0 if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5) and use_cache == True: data = load_file(file=file, isJSON=True) cache = 1 else: headers = api_get_headers(personal=False) seasons_url = '{base_url}/api/v3/series/{series}'.format( base_url=CONST_URLS['base'], series=id) download = api_download(url=seasons_url, type='get', headers=headers, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key(data, 'title'): write_file(file=file, data=data, isJSON=True) return {'data': data, 'cache': cache}
def api_get_genre_list(type): if not os.path.isdir(ADDON_PROFILE + 'tmp'): os.makedirs(ADDON_PROFILE + 'tmp') type = type + 'genres' encodedBytes = base64.b32encode(type.encode("utf-8")) type = str(encodedBytes, "utf-8") genres_url = '{dut_epg_url}/{type}.json'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = "cache" + os.sep + "{type}.json".format(type=type) if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=7): data = load_file(file=file, isJSON=True) else: download = api_download(url=genres_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) else: return None return data
def add_group(type, **kwargs): type = str(type) groups = load_file(type + '_groups.json', ext=False, isJSON=True) if not groups: groups = [] else: groups = list(groups) name = gui.input(message=_.ADD_GROUP, default='').strip() if name and len(str(name)) > 0 and name != str(type).lower(): groups.append(name) groups = sorted(groups) write_file(type + '_groups.json', data=groups, ext=False, isJSON=True) method = 'GUI.ActivateWindow' json_rpc( method, { "window": "videos", "parameters": ["plugin://" + ADDON_ID + "/?_=groups_menu&type=" + type] })
def api_get_epg_by_date_channel(date, channel): type = '{date}_{channel}'.format(date=date, channel=channel) encodedBytes = base64.b32encode(type.encode("utf-8")) type = str(encodedBytes, "utf-8") epg_url = '{dut_epg_url}/{type}.json'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = "cache" + os.sep + "{type}.json".format(type=type) if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=0.5): data = load_file(file=file, isJSON=True) else: download = api_download(url=epg_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) else: return None return data
def update_settings(): profile_settings = load_profile(profile_id=1) settingsJSON = load_file(file='settings.json', isJSON=True) try: api_url = settingsJSON['api_url'] if len(api_url) == 0: api_url = CONST_DEFAULT_API except: api_url = CONST_DEFAULT_API user_agent = profile_settings['user_agent'] if len(user_agent) == 0: user_agent = DEFAULT_USER_AGENT browser_name = uaparser.detect(user_agent)['browser']['name'] browser_version = uaparser.detect(user_agent)['browser']['version'] os_name = uaparser.detect(user_agent)['os']['name'] os_version = uaparser.detect(user_agent)['os']['version'] query = "UPDATE `vars` SET `api_url`='{api_url}', `browser_name`='{browser_name}', `browser_version`='{browser_version}', `os_name`='{os_name}', `os_version`='{os_version}', `user_agent`='{user_agent}' WHERE profile_id={profile_id}".format( api_url=api_url, browser_name=browser_name, browser_version=browser_version, os_name=os_name, os_version=os_version, user_agent=user_agent, profile_id=1) query_settings(query=query, return_result=False, return_insert=False, commit=True)
def groups_menu(type, **kwargs): if type == 'tv': typestr = 'TV ' else: typestr = 'Radio ' folder = plugin.Folder(title=typestr + _.GROUPS) groups = load_file(type + '_groups.json', ext=False, isJSON=True) if not groups: groups = [] else: groups = list(groups) folder.add_item(label=_(_.ADD_GROUP, _bold=True), path=plugin.url_for(func_or_url=add_group, type=type)) for entry in groups: folder.add_item(label=_(entry, _bold=True), path=plugin.url_for(func_or_url=remove_group, type=type, name=entry)) return folder
def update_settings(): profile_settings = load_profile(profile_id=1) settingsJSON = load_file(file='settings.json', isJSON=True) base = settingsJSON['settings']['urls']['base'] if profile_settings['base_v3'] == 1: basethree = settingsJSON['settings']['urls']['alternativeAjaxBase'] else: basethree = base complete_base_url = '{base_url}/{country_code}/{language_code}'.format(base_url=basethree, country_code=settingsJSON['settings']['countryCode'], language_code=settingsJSON['settings']['languageCode']) try: client_id = settingsJSON['client_id'] except: client_id = CONST_DEFAULT_CLIENTID user_agent = profile_settings['user_agent'] if len(user_agent) == 0: user_agent = DEFAULT_USER_AGENT query = "UPDATE `vars` SET `base_url`='{base_url}', `client_id`='{client_id}', `devices_url`='{devices_url}', `search_url`='{search_url}', `session_url`='{session_url}', `channels_url`='{channels_url}', `token_url`='{token_url}', `widevine_url`='{widevine_url}', `listings_url`='{listings_url}', `mediaitems_url`='{mediaitems_url}', `mediagroupsfeeds_url`='{mediagroupsfeeds_url}', `watchlist_url`='{watchlist_url}', `user_agent`='{user_agent}' WHERE profile_id={profile_id}".format(base_url=complete_base_url + '/web', client_id=client_id, devices_url=settingsJSON['settings']['routes']['devices'].replace(base, basethree), search_url=settingsJSON['settings']['routes']['search'].replace(base, basethree), session_url=settingsJSON['settings']['routes']['session'].replace(base, basethree), channels_url=settingsJSON['settings']['routes']['channels'].replace(base, basethree), token_url='{complete_base_url}/web/license/token'.format(complete_base_url=complete_base_url), widevine_url='{complete_base_url}/web/license/eme'.format(complete_base_url=complete_base_url), listings_url=settingsJSON['settings']['routes']['listings'].replace(base, basethree), mediaitems_url=settingsJSON['settings']['routes']['mediaitems'].replace(base, basethree), mediagroupsfeeds_url=settingsJSON['settings']['routes']['mediagroupsfeeds'].replace(base, basethree), watchlist_url=settingsJSON['settings']['routes']['watchlist'].replace(base, basethree), user_agent=user_agent, profile_id=1) query_settings(query=query, return_result=False, return_insert=False, commit=True)
def plugin_vod_subscription_filter(): subscription_filter = load_file(file='vod_subscription.json', isJSON=True) if subscription_filter and sys.version_info >= (3, 0): subscription_filter = list(subscription_filter) return subscription_filter
def api_get_channels(): channels_url = '{dut_epg_url}/channels.json'.format( dut_epg_url=CONST_DUT_EPG) file = "cache" + os.sep + "channels.json" if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=1): data = load_file(file=file, isJSON=True) else: download = api_download(url=channels_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) update_prefs(profile_id=1, channels=data) else: return None change_icon() clear_cache() data2 = OrderedDict() for currow in data: row = data[currow] data2[currow] = row return data2
def api_vod_seasons(type, id, use_cache=True): type = "vod_seasons_{id}".format(id=id) type = encode32(txt=type) file = os.path.join("cache", "{type}.json".format(type=type)) cache = 0 if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5) and use_cache == True: data = load_file(file=file, isJSON=True) cache = 1 else: program_url = '{base_url}/v7/series/{id}'.format( base_url=CONST_URLS['api'], id=id) download = api_download(url=program_url, type='get', headers=None, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) return {'data': data, 'cache': cache}
def api_vod_seasons(type, id, use_cache=True): type = "vod_seasons_{id}".format(id=id) type = encode32(type) file = os.path.join("cache", "{type}.json".format(type=type)) program_url = '{api_url}/CONTENT/DETAIL/GROUP_OF_BUNDLES/{id}'.format( api_url=CONST_URLS['api'], id=id) cache = 0 if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5) and use_cache == True: data = load_file(file=file, isJSON=True) cache = 1 else: download = api_download(url=program_url, type='get', headers=None, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key( data, 'resultCode') and data['resultCode'] == 'OK' and check_key( data, 'resultObj') and check_key(data['resultObj'], 'containers'): write_file(file=file, data=data, isJSON=True) return {'data': data, 'cache': cache}
def api_vod_seasons(type, id): if not api_get_session(): return None seasons = [] type = "vod_seasons_" + str(id) encodedBytes = base64.b32encode(type.encode("utf-8")) type = str(encodedBytes, "utf-8") file = "cache" + os.sep + type + ".json" ref = id id = id[1:] if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=0.5): data = load_file(file=file, isJSON=True) else: headers = { 'videoland-platform': 'videoland', } seasons_url = '{base_url}/api/v3/series/{series}'.format( base_url=CONST_BASE_URL, series=id) download = api_download(url=seasons_url, type='get', headers=headers, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key(data, 'title'): write_file(file=file, data=data, isJSON=True) if not data or not check_key(data, 'details'): return None for currow in data['details']: row = data['details'][currow] if check_key(row, 'type') and row['type'] == 'season': seasons.append({ 'id': str(id) + '###' + str(row['id']), 'seriesNumber': row['title'], 'description': data['description'], 'image': data['poster'].replace('[format]', '960x1433'), 'watchlist': ref }) return {'type': 'seasons', 'seasons': seasons}
def api_vod_download(type, start=0): if type == "moviesnpo": url = '{base_url}/v7/recommend/movies?limit=9999&offset=0&contentProvider=npo'.format( base_url=CONST_URLS['api'], start=start) elif type == "movies": url = '{base_url}/v7/recommend/movies?limit=9999&offset=0'.format( base_url=CONST_URLS['api'], start=start) elif type == "watchaheadnpo": url = '{base_url}/v7/watchinadvance?limit=9999&offset=0&contentProvider=npo'.format( base_url=CONST_URLS['api'], start=start) elif type == "watchahead": url = '{base_url}/v7/watchinadvance?limit=9999&offset=0'.format( base_url=CONST_URLS['api'], start=start) elif type == "seriesbingenpo": url = '{base_url}/v7/recommend/series?limit=9999&offset=0&contentProvider=npo'.format( base_url=CONST_URLS['api'], start=start) elif type == "seriesbinge": url = '{base_url}/v7/recommend/series?limit=9999&offset=0'.format( base_url=CONST_URLS['api'], start=start) elif type == "mostviewed": url = '{base_url}/v7/recommend/trendingvideos?limit=9999&offset=0'.format( base_url=CONST_URLS['api'], start=start) elif type == "tipfeednpo": url = '{base_url}/v7/recommend/recommendedvideos?limit=9999&offset=0&contentProvider=npo'.format( base_url=CONST_URLS['api'], start=start) elif type == "tipfeed": url = '{base_url}/v7/recommend/recommendedvideos?limit=9999&offset=0'.format( base_url=CONST_URLS['api'], start=start) else: return None type = "vod_{type}_{start}".format(type=type, start=start) type = encode32(txt=type) file = os.path.join("cache", "{type}.json".format(type=type)) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5): data = load_file(file=file, isJSON=True) else: download = api_download(url=url, type='get', headers=None, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) if not data: return None return data
def api_vod_seasons(id): if not api_get_session(): return None profile_settings = load_profile(profile_id=1) seasons = [] program_url = '{api_url}/CONTENT/DETAIL/GROUP_OF_BUNDLES/{id}'.format( api_url=profile_settings['api_url'], id=id) file = "cache" + os.sep + "vod_seasons_" + unicode(id) + ".json" if settings.getBool( key='enable_cache') and not is_file_older_than_x_minutes( file=ADDON_PROFILE + file, minutes=10): data = load_file(file=file, isJSON=True) else: download = api_download(url=program_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key( data, 'resultCode') and data['resultCode'] == 'OK' and check_key( data, 'resultObj') and check_key( data['resultObj'], 'containers') and settings.getBool(key='enable_cache'): write_file(file=file, data=data, isJSON=True) if not data or not check_key(data['resultObj'], 'containers'): return None for row in data['resultObj']['containers']: for currow in row['containers']: if check_key(currow, 'metadata') and check_key( currow['metadata'], 'season' ) and currow['metadata']['contentSubtype'] == 'SEASON': seasons.append({ 'id': currow['metadata']['contentId'], 'seriesNumber': currow['metadata']['season'], 'description': currow['metadata']['shortDescription'], 'image': "{image_url}/vod/{image}/1920x1080.jpg?blurred=false". format(image_url=CONST_IMAGE_URL, image=currow['metadata']['pictureUrl']) }) return {'type': 'seasons', 'seasons': seasons}
def api_vod_season(series, id): if not api_get_session(): return None season = [] type = "vod_season_" + unicode(id) encodedBytes = base64.b32encode(type.encode("utf-8")) type = unicode(encodedBytes, "utf-8") file = "cache" + os.sep + type + ".json" if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=0.5): data = load_file(file=file, isJSON=True) else: profile_settings = load_profile(profile_id=1) headers = {'Content-Type': 'application/json', 'X_CSRFToken': profile_settings['csrf_token']} session_post_data = { 'VODID': unicode(id), 'offset': '0', 'count': '35', } seasons_url = '{base_url}/VSP/V3/QueryEpisodeList?from=throughMSAAccess'.format(base_url=CONST_BASE_URL) download = api_download(url=seasons_url, type='post', headers=headers, data=session_post_data, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key(data, 'result') and check_key(data['result'], 'retCode') and data['result']['retCode'] == '000000000' and check_key(data, 'episodes'): write_file(file=file, data=data, isJSON=True) if not data or not check_key(data, 'episodes'): return None for row in data['episodes']: if check_key(row, 'VOD') and check_key(row['VOD'], 'ID') and check_key(row['VOD'], 'name') and check_key(row, 'sitcomNO'): image = '' duration = 0 if not check_key(row['VOD'], 'mediaFiles') or not check_key(row['VOD']['mediaFiles'][0], 'ID'): continue if check_key(row['VOD']['mediaFiles'][0], 'elapseTime'): duration = row['VOD']['mediaFiles'][0]['elapseTime'] if check_key(row['VOD'], 'picture') and check_key(row['VOD']['picture'], 'posters'): image = row['VOD']['picture']['posters'][0] label = '{episode} - {title}'.format(episode=row['sitcomNO'], title=row['VOD']['name']) season.append({'label': label, 'id': row['VOD']['ID'], 'media_id': row['VOD']['mediaFiles'][0]['ID'], 'duration': duration, 'title': row['VOD']['name'], 'episodeNumber': row['sitcomNO'], 'description': '', 'image': image}) return season
def plugin_vod_subscription_filter(): api_vod_subscription() subscription_filter = load_file(file='cache/vod_subscription.json', isJSON=True) if subscription_filter: subscription_filter = list(subscription_filter) return subscription_filter
def api_get_epg_by_idtitle(idtitle, start, end, channels): type = '{idtitle}'.format(idtitle=idtitle) encodedBytes = base64.b64encode(type.encode("utf-8")) type = unicode(encodedBytes, "utf-8") epg_url = '{dut_epg_url}/{type}.json'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = "cache" + os.sep + "{type}.json".format(type=type) if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=0.5): data = load_file(file=file, isJSON=True) else: download = api_download(url=epg_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) else: return None data2 = OrderedDict() for currow in data: row = data[currow] try: if int(row['start']) > start or int(row['end']) < end: continue except: pass try: found = False for station in row['channels']: if station in channels: found = True break if found == False: continue except: pass data2[currow] = row return data2
def plugin_vod_subscription_filter(): api_vod_subscription() subscription_filter = load_file(file=os.path.join('cache', 'vod_subscription.json'), isJSON=True) if subscription_filter: subscription_filter = list(subscription_filter) return subscription_filter
def api_get_epg_by_idtitle(idtitle, start, end, channels): type = str(idtitle) if check_key(CONST_MOD_CACHE, str(type)): days = CONST_MOD_CACHE[str(type)] else: days = 0.5 type = encode32(txt=type) epg_url = '{dut_epg_url}/{type}.json'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = os.path.join("cache", "{type}.json".format(type=type)) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=days): data = load_file(file=file, isJSON=True) else: download = api_download(url=epg_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) else: return None data2 = OrderedDict() for currow in data: row = data[currow] try: if int(row['start']) > start or int(row['end']) < end: continue except: pass if not row['channel'] in channels: continue data2[currow] = row return data2
def api_vod_seasons(type, id): if not api_get_session(): return None type = "vod_seasons_{id}".format(id=id) type = encode32(type) file = os.path.join("cache", "{type}.json".format(type=type)) cache = 0 if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5) and use_cache == True: data = load_file(file=file, isJSON=True) cache = 1 else: profile_settings = load_profile(profile_id=1) headers = { 'Content-Type': 'application/json', 'X_CSRFToken': profile_settings['csrf_token'] } session_post_data = { 'VODID': str(id), 'offset': '0', 'count': '50', } seasons_url = '{base_url}/VSP/V3/QueryEpisodeList?from=throughMSAAccess'.format( base_url=CONST_URLS['base']) download = api_download(url=seasons_url, type='post', headers=headers, data=session_post_data, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key( data, 'result') and check_key( data['result'], 'retCode' ) and data['result']['retCode'] == '000000000' and check_key( data, 'episodes'): write_file(file=file, data=data, isJSON=True) return {'data': data, 'cache': cache}