def api_vod_seasons(type, id, use_cache=True): type = "vod_seasons_{id}".format(id=id) type = encode32(txt=type) file = os.path.join("cache", "{type}.json".format(type=type)) cache = 0 if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5) and use_cache == True: data = load_file(file=file, isJSON=True) cache = 1 else: program_url = '{base_url}/v7/series/{id}'.format( base_url=CONST_URLS['api'], id=id) download = api_download(url=program_url, type='get', headers=None, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) return {'data': data, 'cache': cache}
def api_vod_seasons(type, id, use_cache=True): type = "vod_seasons_{id}".format(id=id) type = encode32(type) file = os.path.join("cache", "{type}.json".format(type=type)) id = id[1:] cache = 0 if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5) and use_cache == True: data = load_file(file=file, isJSON=True) cache = 1 else: headers = api_get_headers(personal=False) seasons_url = '{base_url}/api/v3/series/{series}'.format( base_url=CONST_URLS['base'], series=id) download = api_download(url=seasons_url, type='get', headers=headers, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key(data, 'title'): write_file(file=file, data=data, isJSON=True) return {'data': data, 'cache': cache}
def api_get_channels(): directory = os.path.dirname(ADDON_PROFILE + 'tmp' + os.sep + 'a.channels.zip') if not os.path.exists(directory): os.makedirs(directory) directory = os.path.dirname(ADDON_PROFILE + "cache" + os.sep + "a.channels.json") if not os.path.exists(directory): os.makedirs(directory) channels_url = '{dut_epg_url}/a.channels.zip'.format( dut_epg_url=CONST_DUT_EPG_BASE) file = "cache" + os.sep + "a.channels.json" tmp = ADDON_PROFILE + 'tmp' + os.sep + 'a.channels.zip' if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=1): return True else: resp = requests.get(channels_url, stream=True) if resp.status_code != 200: resp.close() return False with open(tmp, 'wb') as f: for chunk in resp.iter_content(chunk_size=SESSION_CHUNKSIZE): f.write(chunk) resp.close() if os.path.isfile(tmp): from zipfile import ZipFile try: with ZipFile(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep) except: try: fixBadZipfile(tmp) with ZipFile(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep) except: try: from resources.lib.base.l1.zipfile import ZipFile as ZipFile2 with ZipFile2(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep) except: return False else: return False clear_cache_connector() return True
def api_get_genre_list(type): if not os.path.isdir(ADDON_PROFILE + 'tmp'): os.makedirs(ADDON_PROFILE + 'tmp') type = type + 'genres' encodedBytes = base64.b32encode(type.encode("utf-8")) type = str(encodedBytes, "utf-8") genres_url = '{dut_epg_url}/{type}.json'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = "cache" + os.sep + "{type}.json".format(type=type) if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=7): data = load_file(file=file, isJSON=True) else: download = api_download(url=genres_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) else: return None return data
def api_vod_seasons(type, id): if not api_get_session(): return None seasons = [] program_url = '{api_url}/CONTENT/DETAIL/GROUP_OF_BUNDLES/{id}'.format(api_url=CONST_DEFAULT_API, id=id) type = "vod_seasons_" + unicode(id) encodedBytes = base64.b32encode(type.encode("utf-8")) type = unicode(encodedBytes, "utf-8") file = "cache" + os.sep + type + ".json" if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=0.5): data = load_file(file=file, isJSON=True) else: download = api_download(url=program_url, type='get', headers=None, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key(data, 'resultCode') and data['resultCode'] == 'OK' and check_key(data, 'resultObj') and check_key(data['resultObj'], 'containers'): write_file(file=file, data=data, isJSON=True) if not data or not check_key(data['resultObj'], 'containers'): return None for row in data['resultObj']['containers']: for currow in row['containers']: if check_key(currow, 'metadata') and check_key(currow['metadata'], 'season') and unicode(currow['metadata']['contentSubtype']) == 'SEASON': seasons.append({'id': unicode(currow['metadata']['contentId']), 'seriesNumber': unicode(currow['metadata']['season']), 'description': unicode(currow['metadata']['shortDescription']), 'image': "{image_url}/vod/{image}/1920x1080.jpg?blurred=false".format(image_url=CONST_IMAGE_URL, image=unicode(currow['metadata']['pictureUrl']))}) return {'type': 'seasons', 'seasons': seasons}
def api_get_channels(): channels_url = '{dut_epg_url}/channels.json'.format( dut_epg_url=CONST_DUT_EPG) file = "cache" + os.sep + "channels.json" if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=1): data = load_file(file=file, isJSON=True) else: download = api_download(url=channels_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) update_prefs(profile_id=1, channels=data) else: return None change_icon() clear_cache() data2 = OrderedDict() for currow in data: row = data[currow] data2[currow] = row return data2
def api_get_genre_list(type, add=1): add = int(add) if not os.path.isdir(os.path.join(ADDON_PROFILE, 'tmp')): os.makedirs(os.path.join(ADDON_PROFILE, 'tmp')) if add == 1: type = type + 'genres' type = encode32(txt=type) genres_url = '{dut_epg_url}/{type}.json'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = os.path.join("cache", "{type}.json".format(type=type)) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5): data = load_file(file=file, isJSON=True) else: download = api_download(url=genres_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) else: return None return data
def api_get_series_nfo(): type = 'seriesnfo' type = encode32(txt=type) vod_url = '{dut_epg_url}/{type}.zip'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = os.path.join("cache", "{type}.json".format(type=type)) tmp = os.path.join(ADDON_PROFILE, 'tmp', "{type}.zip".format(type=type)) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.45): data = load_file(file=file, isJSON=True) else: resp = Session().get(vod_url, stream=True) if resp.status_code != 200: resp.close() return None with open(tmp, 'wb') as f: for chunk in resp.iter_content(chunk_size=SESSION_CHUNKSIZE): f.write(chunk) resp.close() extract_zip(file=tmp, dest=os.path.join(ADDON_PROFILE, "cache", ""))
def api_get_epg_by_date_channel(date, channel): type = '{date}_{channel}'.format(date=date, channel=channel) encodedBytes = base64.b32encode(type.encode("utf-8")) type = str(encodedBytes, "utf-8") epg_url = '{dut_epg_url}/{type}.json'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = "cache" + os.sep + "{type}.json".format(type=type) if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=0.5): data = load_file(file=file, isJSON=True) else: download = api_download(url=epg_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) else: return None return data
def api_get_epg_by_date_channel(date, channel): type = '{date}_{channel}'.format(date=date, channel=channel) if check_key(CONST_MOD_CACHE, str(type)): days = CONST_MOD_CACHE[str(type)] else: days = 0.5 type = encode32(txt=type) epg_url = '{dut_epg_url}/{type}.json'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = os.path.join("cache", "{type}.json".format(type=type)) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=days): data = load_file(file=file, isJSON=True) else: download = api_download(url=epg_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) else: return None return data
def api_vod_subscription(): file = "cache" + os.sep + "vod_subscription.json" if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=1): load_file(file=file, isJSON=True) else: if not api_get_session(): return None subscription = [] series_url = '{api_url}/TRAY/SEARCH/VOD?from=1&to=9999&filter_contentType=GROUP_OF_BUNDLES,VOD&filter_contentSubtype=SERIES,VOD&filter_contentTypeExtended=VOD&filter_excludedGenres=erotiek&filter_technicalPackages=10078,10081,10258,10255&dfilter_packages=matchSubscription&orderBy=activationDate&sortOrder=desc'.format( api_url=CONST_DEFAULT_API) download = api_download(url=series_url, type='get', headers=None, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if not code or not code == 200 or not data or not check_key( data, 'resultCode' ) or not data['resultCode'] == 'OK' or not check_key( data, 'resultObj') or not check_key(data['resultObj'], 'containers'): return False for row in data['resultObj']['containers']: subscription.append(row['metadata']['contentId']) write_file(file=file, data=subscription, isJSON=True) return True
def api_vod_season(series, id, use_cache=True): type = "vod_season_{id}".format(id=id) type = encode32(type) file = os.path.join("cache", "{type}.json".format(type=type)) cache = 0 profile_settings = load_profile(profile_id=1) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5) and use_cache == True: data = load_file(file=file, isJSON=True) cache = 1 else: season_url = '{mediaitems_url}?byMediaType=Episode%7CFeatureFilm&byParentId={id}&includeAdult=true&range=1-1000&sort=seriesEpisodeNumber|ASC'.format( mediaitems_url=CONST_URLS['mediaitems_url'], id=id) download = api_download(url=season_url, type='get', headers=None, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] return {'data': data, 'cache': cache}
def api_vod_seasons(type, id, use_cache=True): type = "vod_seasons_{id}".format(id=id) type = encode32(type) file = os.path.join("cache", "{type}.json".format(type=type)) program_url = '{api_url}/CONTENT/DETAIL/GROUP_OF_BUNDLES/{id}'.format( api_url=CONST_URLS['api'], id=id) cache = 0 if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5) and use_cache == True: data = load_file(file=file, isJSON=True) cache = 1 else: download = api_download(url=program_url, type='get', headers=None, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key( data, 'resultCode') and data['resultCode'] == 'OK' and check_key( data, 'resultObj') and check_key(data['resultObj'], 'containers'): write_file(file=file, data=data, isJSON=True) return {'data': data, 'cache': cache}
def api_get_epg_by_addon(addon): type = addon[0] directory = os.path.dirname(ADDON_PROFILE + 'tmp' + os.sep + 'epg.zip') if not os.path.exists(directory): os.makedirs(directory) directory = os.path.dirname(ADDON_PROFILE + "cache" + os.sep + str(addon) + os.sep + 'epg.zip') if not os.path.exists(directory): os.makedirs(directory) epg_url = '{dut_epg_url}/{type}.epg.zip'.format( dut_epg_url=CONST_DUT_EPG_BASE, type=type) tmp = ADDON_PROFILE + 'tmp' + os.sep + '{type}.epg.zip'.format(type=type) if not is_file_older_than_x_days(file=tmp, days=0.5): return False else: resp = requests.get(epg_url, stream=True) if resp.status_code != 200: resp.close() return False with open(tmp, 'wb') as f: for chunk in resp.iter_content(chunk_size=SESSION_CHUNKSIZE): f.write(chunk) resp.close() if os.path.isfile(tmp): from zipfile import ZipFile try: with ZipFile(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep + str(addon) + os.sep) except: try: fixBadZipfile(tmp) with ZipFile(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep + str(addon) + os.sep) except: try: from resources.lib.base.l1.zipfile import ZipFile as ZipFile2 with ZipFile2(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep + str(addon) + os.sep) except: return False else: return False return True
def api_vod_seasons(type, id): if not api_get_session(): return None seasons = [] type = "vod_seasons_" + str(id) encodedBytes = base64.b32encode(type.encode("utf-8")) type = str(encodedBytes, "utf-8") file = "cache" + os.sep + type + ".json" ref = id id = id[1:] if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=0.5): data = load_file(file=file, isJSON=True) else: headers = { 'videoland-platform': 'videoland', } seasons_url = '{base_url}/api/v3/series/{series}'.format( base_url=CONST_BASE_URL, series=id) download = api_download(url=seasons_url, type='get', headers=headers, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key(data, 'title'): write_file(file=file, data=data, isJSON=True) if not data or not check_key(data, 'details'): return None for currow in data['details']: row = data['details'][currow] if check_key(row, 'type') and row['type'] == 'season': seasons.append({ 'id': str(id) + '###' + str(row['id']), 'seriesNumber': row['title'], 'description': data['description'], 'image': data['poster'].replace('[format]', '960x1433'), 'watchlist': ref }) return {'type': 'seasons', 'seasons': seasons}
def api_vod_season(series, id): if not api_get_session(): return None season = [] type = "vod_season_" + unicode(id) encodedBytes = base64.b32encode(type.encode("utf-8")) type = unicode(encodedBytes, "utf-8") file = "cache" + os.sep + type + ".json" if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=0.5): data = load_file(file=file, isJSON=True) else: profile_settings = load_profile(profile_id=1) headers = {'Content-Type': 'application/json', 'X_CSRFToken': profile_settings['csrf_token']} session_post_data = { 'VODID': unicode(id), 'offset': '0', 'count': '35', } seasons_url = '{base_url}/VSP/V3/QueryEpisodeList?from=throughMSAAccess'.format(base_url=CONST_BASE_URL) download = api_download(url=seasons_url, type='post', headers=headers, data=session_post_data, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key(data, 'result') and check_key(data['result'], 'retCode') and data['result']['retCode'] == '000000000' and check_key(data, 'episodes'): write_file(file=file, data=data, isJSON=True) if not data or not check_key(data, 'episodes'): return None for row in data['episodes']: if check_key(row, 'VOD') and check_key(row['VOD'], 'ID') and check_key(row['VOD'], 'name') and check_key(row, 'sitcomNO'): image = '' duration = 0 if not check_key(row['VOD'], 'mediaFiles') or not check_key(row['VOD']['mediaFiles'][0], 'ID'): continue if check_key(row['VOD']['mediaFiles'][0], 'elapseTime'): duration = row['VOD']['mediaFiles'][0]['elapseTime'] if check_key(row['VOD'], 'picture') and check_key(row['VOD']['picture'], 'posters'): image = row['VOD']['picture']['posters'][0] label = '{episode} - {title}'.format(episode=row['sitcomNO'], title=row['VOD']['name']) season.append({'label': label, 'id': row['VOD']['ID'], 'media_id': row['VOD']['mediaFiles'][0]['ID'], 'duration': duration, 'title': row['VOD']['name'], 'episodeNumber': row['sitcomNO'], 'description': '', 'image': image}) return season
def api_vod_download(type, start=0): if type == "moviesnpo": url = '{base_url}/v7/recommend/movies?limit=9999&offset=0&contentProvider=npo'.format( base_url=CONST_URLS['api'], start=start) elif type == "movies": url = '{base_url}/v7/recommend/movies?limit=9999&offset=0'.format( base_url=CONST_URLS['api'], start=start) elif type == "watchaheadnpo": url = '{base_url}/v7/watchinadvance?limit=9999&offset=0&contentProvider=npo'.format( base_url=CONST_URLS['api'], start=start) elif type == "watchahead": url = '{base_url}/v7/watchinadvance?limit=9999&offset=0'.format( base_url=CONST_URLS['api'], start=start) elif type == "seriesbingenpo": url = '{base_url}/v7/recommend/series?limit=9999&offset=0&contentProvider=npo'.format( base_url=CONST_URLS['api'], start=start) elif type == "seriesbinge": url = '{base_url}/v7/recommend/series?limit=9999&offset=0'.format( base_url=CONST_URLS['api'], start=start) elif type == "mostviewed": url = '{base_url}/v7/recommend/trendingvideos?limit=9999&offset=0'.format( base_url=CONST_URLS['api'], start=start) elif type == "tipfeednpo": url = '{base_url}/v7/recommend/recommendedvideos?limit=9999&offset=0&contentProvider=npo'.format( base_url=CONST_URLS['api'], start=start) elif type == "tipfeed": url = '{base_url}/v7/recommend/recommendedvideos?limit=9999&offset=0'.format( base_url=CONST_URLS['api'], start=start) else: return None type = "vod_{type}_{start}".format(type=type, start=start) type = encode32(txt=type) file = os.path.join("cache", "{type}.json".format(type=type)) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5): data = load_file(file=file, isJSON=True) else: download = api_download(url=url, type='get', headers=None, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) if not data: return None return data
def api_get_epg_by_idtitle(idtitle, start, end, channels): type = '{idtitle}'.format(idtitle=idtitle) encodedBytes = base64.b64encode(type.encode("utf-8")) type = unicode(encodedBytes, "utf-8") epg_url = '{dut_epg_url}/{type}.json'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = "cache" + os.sep + "{type}.json".format(type=type) if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=0.5): data = load_file(file=file, isJSON=True) else: download = api_download(url=epg_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) else: return None data2 = OrderedDict() for currow in data: row = data[currow] try: if int(row['start']) > start or int(row['end']) < end: continue except: pass try: found = False for station in row['channels']: if station in channels: found = True break if found == False: continue except: pass data2[currow] = row return data2
def clear_cache_connector(): clear_cache() addonlist = ['betelenet', 'canaldigitaal', 'kpn', 'nlziet', 'tmobile', 'ziggo'] for addon in addonlist: try: for file in glob.glob(os.path.join(ADDON_PROFILE, "cache", addon, "*.xml")): if is_file_older_than_x_days(file=file, days=1): os.remove(file) except: pass
def clear_cache_connector(): clear_cache() addonlist = ['canaldigitaal', 'kpn', 'nlziet', 'tmobile', 'ziggo'] for addon in addonlist: try: for file in glob.glob(ADDON_PROFILE + "cache" + os.sep + addon + os.sep + "*.xml"): if is_file_older_than_x_days(file=file, days=1): os.remove(file) except: pass
def service_timer(timer): if timer == 'daily': from resources.lib.api import api_vod_subscription from resources.lib.base.l1.constants import ADDON_PROFILE from resources.lib.base.l3.util import is_file_older_than_x_days if is_file_older_than_x_days(ADDON_PROFILE + 'vod_subscription.json', days=1): api_vod_subscription() elif timer == 'hourly': pass elif timer == 'startup': pass
def api_get_epg_by_idtitle(idtitle, start, end, channels): type = str(idtitle) if check_key(CONST_MOD_CACHE, str(type)): days = CONST_MOD_CACHE[str(type)] else: days = 0.5 type = encode32(txt=type) epg_url = '{dut_epg_url}/{type}.json'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = os.path.join("cache", "{type}.json".format(type=type)) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=days): data = load_file(file=file, isJSON=True) else: download = api_download(url=epg_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) else: return None data2 = OrderedDict() for currow in data: row = data[currow] try: if int(row['start']) > start or int(row['end']) < end: continue except: pass if not row['channel'] in channels: continue data2[currow] = row return data2
def api_vod_seasons(type, id): if not api_get_session(): return None type = "vod_seasons_{id}".format(id=id) type = encode32(type) file = os.path.join("cache", "{type}.json".format(type=type)) cache = 0 if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5) and use_cache == True: data = load_file(file=file, isJSON=True) cache = 1 else: profile_settings = load_profile(profile_id=1) headers = { 'Content-Type': 'application/json', 'X_CSRFToken': profile_settings['csrf_token'] } session_post_data = { 'VODID': str(id), 'offset': '0', 'count': '50', } seasons_url = '{base_url}/VSP/V3/QueryEpisodeList?from=throughMSAAccess'.format( base_url=CONST_URLS['base']) download = api_download(url=seasons_url, type='post', headers=headers, data=session_post_data, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key( data, 'result') and check_key( data['result'], 'retCode' ) and data['result']['retCode'] == '000000000' and check_key( data, 'episodes'): write_file(file=file, data=data, isJSON=True) return {'data': data, 'cache': cache}
def api_vod_download(type, start=0): if not api_get_session(): return None if type == "movies": url = '{base_url}/v6/tabs/GenreFilms?count=52&expand=true&expandlist=true&maxResults=52&offset={start}'.format( base_url=CONST_API_URL, start=start) elif type == "watchahead": url = '{base_url}/v6/tabs/VooruitKijken2?count=52&expand=true&expandlist=true&maxResults=52&offset={start}'.format( base_url=CONST_API_URL, start=start) elif type == "seriesbinge": url = '{base_url}/v6/tabs/SeriesBingewatch?count=52&expand=true&expandlist=true&maxResults=52&offset={start}'.format( base_url=CONST_API_URL, start=start) elif type == "mostviewed": url = '{base_url}/v6/tabs/MostViewed?count=52&expand=true&expandlist=true&maxResults=52&offset={start}'.format( base_url=CONST_API_URL, start=start) elif type == "tipfeed": url = '{base_url}/v6/tabs/Tipfeed?count=52&expand=true&expandlist=true&maxResults=52&offset={start}'.format( base_url=CONST_API_URL, start=start) else: return None type = "vod_" + type + "_" + unicode(start) encodedBytes = base64.b32encode(type.encode("utf-8")) type = unicode(encodedBytes, "utf-8") file = "cache" + os.sep + type + ".json" if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=0.5): data = load_file(file=file, isJSON=True) else: download = api_download(url=url, type='get', headers=None, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) if not data or not check_key(data, 'Items'): return None return api_process_vod(data=data)
def api_vod_season(series, id): if not api_get_session(): return None season = [] episodes = [] program_url = '{api_url}/CONTENT/DETAIL/BUNDLE/{id}'.format(api_url=CONST_DEFAULT_API, id=id) type = "vod_season_" + unicode(id) encodedBytes = base64.b32encode(type.encode("utf-8")) type = unicode(encodedBytes, "utf-8") file = "cache" + os.sep + type + ".json" if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=0.5): data = load_file(file=file, isJSON=True) else: download = api_download(url=program_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key(data, 'resultCode') and data['resultCode'] == 'OK' and check_key(data, 'resultObj') and check_key(data['resultObj'], 'containers'): write_file(file=file, data=data, isJSON=True) if not data or not check_key(data['resultObj'], 'containers'): return None for row in data['resultObj']['containers']: for currow in row['containers']: if check_key(currow, 'metadata') and check_key(currow['metadata'], 'season') and unicode(currow['metadata']['contentSubtype']) == 'EPISODE' and not unicode(currow['metadata']['episodeNumber']) in episodes: asset_id = '' for asset in currow['assets']: if check_key(asset, 'videoType') and asset['videoType'] == 'SD_DASH_PR' and check_key(asset, 'assetType') and asset['assetType'] == 'MASTER': asset_id = unicode(asset['assetId']) break episodes.append(unicode(currow['metadata']['episodeNumber'])) label = '{season}.{episode} - {title}'.format(season=unicode(currow['metadata']['season']), episode=unicode(currow['metadata']['episodeNumber']), title=unicode(currow['metadata']['episodeTitle'])) season.append({'label': label, 'id': unicode(currow['metadata']['contentId']), 'assetid': asset_id, 'duration': currow['metadata']['duration'], 'title': unicode(currow['metadata']['episodeTitle']), 'episodeNumber': '{season}.{episode}'.format(season=unicode(currow['metadata']['season']), episode=unicode(currow['metadata']['episodeNumber'])), 'description': unicode(currow['metadata']['shortDescription']), 'image': "{image_url}/vod/{image}/1920x1080.jpg?blurred=false".format(image_url=CONST_IMAGE_URL, image=unicode(currow['metadata']['pictureUrl']))}) return season
def api_get_vod_by_type(type, character, subscription_filter): encodedBytes = base64.b64encode(type.encode("utf-8")) type = unicode(encodedBytes, "utf-8") vod_url = '{dut_epg_url}/{type}.json'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = "cache" + os.sep + "{type}.json".format(type=type) if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=0.5): data = load_file(file=file, isJSON=True) else: download = api_download(url=vod_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) else: return None data2 = OrderedDict() for currow in data: row = data[currow] id = row['id'] if character: if not row['first'] == character: continue if subscription_filter and not int(id) in subscription_filter: continue data2[currow] = row return data2
def api_vod_seasons(type, id, use_cache=True): type2 = "vod_seasons_{id}".format(id=id) type2 = encode32(type2) file = os.path.join("cache", "{type}.json".format(type=type2)) cache = 0 if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5) and use_cache == True: data = load_file(file=file, isJSON=True) cache = 1 else: data = api_get_vod_by_type(type=type, character=None, genre=None, subscription_filter=None) return {'data': data, 'cache': cache}
def api_vod_season(series, id): if not api_get_session(): return None profile_settings = load_profile(profile_id=1) season = [] episodes = [] headers = {'Authorization': 'Bearer ' + profile_settings['session_token']} program_url = '{api_url}/assets?query={id}'.format(api_url=CONST_DEFAULT_API, id=id) type = "vod_season_" + str(id) encodedBytes = base64.b32encode(type.encode("utf-8")) type = str(encodedBytes, "utf-8") file = "cache" + os.sep + type + ".json" if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=0.5): data = load_file(file=file, isJSON=True) else: download = api_download(url=program_url, type='get', headers=headers, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key(data, 'assets'): write_file(file=file, data=data, isJSON=True) if not data or not check_key(data, 'assets'): return None for row in data['assets']: episodes.append(str(row['params']['seriesEpisode'])) label = '{season}.{episode} - {title}'.format(season=str(row['params']['seriesSeason']), episode=str(row['params']['seriesEpisode']), title=str(row['title'])) season.append({'label': label, 'id': str(row['id']), 'assetid': '', 'duration': row['params']['duration'], 'title': str(row['title']), 'episodeNumber': '{season}.{episode}'.format(season=str(row['params']['seriesSeason']), episode=str(row['params']['seriesEpisode'])), 'description': '', 'image': str(row['images'][0]['url'])}) return season
def api_vod_season(series, id, use_cache=True): if not api_get_session(): return None profile_settings = load_profile(profile_id=1) headers = { 'Authorization': 'Bearer {token}'.format(token=profile_settings['session_token']) } program_url = '{api_url}/assets?query={id}'.format( api_url=CONST_URLS['api'], id=id) type = "vod_seasons_{id}".format(id=id) type = encode32(type) file = os.path.join("cache", "{type}.json".format(type=type)) cache = 0 if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5) and use_cache == True: data = load_file(file=file, isJSON=True) cache = 1 else: download = api_download(url=program_url, type='get', headers=headers, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key(data, 'assets'): write_file(file=file, data=data, isJSON=True) return {'data': data, 'cache': cache}
def api_get_channels(): channels_url = '{dut_epg_url}/channels.json'.format( dut_epg_url=CONST_DUT_EPG) file = os.path.join("cache", "channels.json") if check_key(CONST_MOD_CACHE, 'channels'): days = CONST_MOD_CACHE['channels'] else: days = 1 if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=days): data = load_file(file=file, isJSON=True) else: download = api_download(url=channels_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) update_prefs(profile_id=1, channels=data) else: return None clear_cache() data2 = OrderedDict() for currow in data: row = data[currow] data2[currow] = row return data2