def api_get_series_nfo(): type = 'seriesnfo' type = encode32(txt=type) vod_url = '{dut_epg_url}/{type}.zip'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = os.path.join("cache", "{type}.json".format(type=type)) tmp = os.path.join(ADDON_PROFILE, 'tmp', "{type}.zip".format(type=type)) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.45): data = load_file(file=file, isJSON=True) else: resp = Session().get(vod_url, stream=True) if resp.status_code != 200: resp.close() return None with open(tmp, 'wb') as f: for chunk in resp.iter_content(chunk_size=SESSION_CHUNKSIZE): f.write(chunk) resp.close() extract_zip(file=tmp, dest=os.path.join(ADDON_PROFILE, "cache", ""))
def api_vod_seasons(type, id, use_cache=True): type = "vod_seasons_{id}".format(id=id) type = encode32(type) file = os.path.join("cache", "{type}.json".format(type=type)) id = id[1:] cache = 0 if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5) and use_cache == True: data = load_file(file=file, isJSON=True) cache = 1 else: headers = api_get_headers(personal=False) seasons_url = '{base_url}/api/v3/series/{series}'.format( base_url=CONST_URLS['base'], series=id) download = api_download(url=seasons_url, type='get', headers=headers, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key(data, 'title'): write_file(file=file, data=data, isJSON=True) return {'data': data, 'cache': cache}
def api_vod_season(series, id, use_cache=True): type = "vod_season_{id}".format(id=id) type = encode32(type) file = os.path.join("cache", "{type}.json".format(type=type)) cache = 0 profile_settings = load_profile(profile_id=1) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5) and use_cache == True: data = load_file(file=file, isJSON=True) cache = 1 else: season_url = '{mediaitems_url}?byMediaType=Episode%7CFeatureFilm&byParentId={id}&includeAdult=true&range=1-1000&sort=seriesEpisodeNumber|ASC'.format( mediaitems_url=CONST_URLS['mediaitems_url'], id=id) download = api_download(url=season_url, type='get', headers=None, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] return {'data': data, 'cache': cache}
def api_vod_seasons(type, id, use_cache=True): type = "vod_seasons_{id}".format(id=id) type = encode32(type) file = os.path.join("cache", "{type}.json".format(type=type)) program_url = '{api_url}/CONTENT/DETAIL/GROUP_OF_BUNDLES/{id}'.format( api_url=CONST_URLS['api'], id=id) cache = 0 if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5) and use_cache == True: data = load_file(file=file, isJSON=True) cache = 1 else: download = api_download(url=program_url, type='get', headers=None, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key( data, 'resultCode') and data['resultCode'] == 'OK' and check_key( data, 'resultObj') and check_key(data['resultObj'], 'containers'): write_file(file=file, data=data, isJSON=True) return {'data': data, 'cache': cache}
def api_get_epg_by_date_channel(date, channel): type = '{date}_{channel}'.format(date=date, channel=channel) if check_key(CONST_MOD_CACHE, str(type)): days = CONST_MOD_CACHE[str(type)] else: days = 0.5 type = encode32(txt=type) epg_url = '{dut_epg_url}/{type}.json'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = os.path.join("cache", "{type}.json".format(type=type)) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=days): data = load_file(file=file, isJSON=True) else: download = api_download(url=epg_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) else: return None return data
def api_get_genre_list(type, add=1): add = int(add) if not os.path.isdir(os.path.join(ADDON_PROFILE, 'tmp')): os.makedirs(os.path.join(ADDON_PROFILE, 'tmp')) if add == 1: type = type + 'genres' type = encode32(txt=type) genres_url = '{dut_epg_url}/{type}.json'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = os.path.join("cache", "{type}.json".format(type=type)) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5): data = load_file(file=file, isJSON=True) else: download = api_download(url=genres_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) else: return None return data
def api_vod_seasons(type, id, use_cache=True): type = "vod_seasons_{id}".format(id=id) type = encode32(txt=type) file = os.path.join("cache", "{type}.json".format(type=type)) cache = 0 if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5) and use_cache == True: data = load_file(file=file, isJSON=True) cache = 1 else: program_url = '{base_url}/v7/series/{id}'.format( base_url=CONST_URLS['api'], id=id) download = api_download(url=program_url, type='get', headers=None, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) return {'data': data, 'cache': cache}
def api_vod_download(type, start=0): if type == "moviesnpo": url = '{base_url}/v7/recommend/movies?limit=9999&offset=0&contentProvider=npo'.format( base_url=CONST_URLS['api'], start=start) elif type == "movies": url = '{base_url}/v7/recommend/movies?limit=9999&offset=0'.format( base_url=CONST_URLS['api'], start=start) elif type == "watchaheadnpo": url = '{base_url}/v7/watchinadvance?limit=9999&offset=0&contentProvider=npo'.format( base_url=CONST_URLS['api'], start=start) elif type == "watchahead": url = '{base_url}/v7/watchinadvance?limit=9999&offset=0'.format( base_url=CONST_URLS['api'], start=start) elif type == "seriesbingenpo": url = '{base_url}/v7/recommend/series?limit=9999&offset=0&contentProvider=npo'.format( base_url=CONST_URLS['api'], start=start) elif type == "seriesbinge": url = '{base_url}/v7/recommend/series?limit=9999&offset=0'.format( base_url=CONST_URLS['api'], start=start) elif type == "mostviewed": url = '{base_url}/v7/recommend/trendingvideos?limit=9999&offset=0'.format( base_url=CONST_URLS['api'], start=start) elif type == "tipfeednpo": url = '{base_url}/v7/recommend/recommendedvideos?limit=9999&offset=0&contentProvider=npo'.format( base_url=CONST_URLS['api'], start=start) elif type == "tipfeed": url = '{base_url}/v7/recommend/recommendedvideos?limit=9999&offset=0'.format( base_url=CONST_URLS['api'], start=start) else: return None type = "vod_{type}_{start}".format(type=type, start=start) type = encode32(txt=type) file = os.path.join("cache", "{type}.json".format(type=type)) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5): data = load_file(file=file, isJSON=True) else: download = api_download(url=url, type='get', headers=None, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) if not data: return None return data
def api_get_epg_by_idtitle(idtitle, start, end, channels): type = str(idtitle) if check_key(CONST_MOD_CACHE, str(type)): days = CONST_MOD_CACHE[str(type)] else: days = 0.5 type = encode32(txt=type) epg_url = '{dut_epg_url}/{type}.json'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = os.path.join("cache", "{type}.json".format(type=type)) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=days): data = load_file(file=file, isJSON=True) else: download = api_download(url=epg_url, type='get', headers=None, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) else: return None data2 = OrderedDict() for currow in data: row = data[currow] try: if int(row['start']) > start or int(row['end']) < end: continue except: pass if not row['channel'] in channels: continue data2[currow] = row return data2
def api_vod_seasons(type, id): if not api_get_session(): return None type = "vod_seasons_{id}".format(id=id) type = encode32(type) file = os.path.join("cache", "{type}.json".format(type=type)) cache = 0 if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5) and use_cache == True: data = load_file(file=file, isJSON=True) cache = 1 else: profile_settings = load_profile(profile_id=1) headers = { 'Content-Type': 'application/json', 'X_CSRFToken': profile_settings['csrf_token'] } session_post_data = { 'VODID': str(id), 'offset': '0', 'count': '50', } seasons_url = '{base_url}/VSP/V3/QueryEpisodeList?from=throughMSAAccess'.format( base_url=CONST_URLS['base']) download = api_download(url=seasons_url, type='post', headers=headers, data=session_post_data, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key( data, 'result') and check_key( data['result'], 'retCode' ) and data['result']['retCode'] == '000000000' and check_key( data, 'episodes'): write_file(file=file, data=data, isJSON=True) return {'data': data, 'cache': cache}
def api_vod_seasons(type, id, use_cache=True): type2 = "vod_seasons_{id}".format(id=id) type2 = encode32(type2) file = os.path.join("cache", "{type}.json".format(type=type2)) cache = 0 if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5) and use_cache == True: data = load_file(file=file, isJSON=True) cache = 1 else: data = api_get_vod_by_type(type=type, character=None, genre=None, subscription_filter=None) return {'data': data, 'cache': cache}
def api_vod_season(series, id, use_cache=True): if not api_get_session(): return None profile_settings = load_profile(profile_id=1) headers = { 'Authorization': 'Bearer {token}'.format(token=profile_settings['session_token']) } program_url = '{api_url}/assets?query={id}'.format( api_url=CONST_URLS['api'], id=id) type = "vod_seasons_{id}".format(id=id) type = encode32(type) file = os.path.join("cache", "{type}.json".format(type=type)) cache = 0 if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5) and use_cache == True: data = load_file(file=file, isJSON=True) cache = 1 else: download = api_download(url=program_url, type='get', headers=headers, data=None, json_data=True, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key(data, 'assets'): write_file(file=file, data=data, isJSON=True) return {'data': data, 'cache': cache}
def api_search(query): return False end = int(time.time() * 1000) start = end - (7 * 24 * 60 * 60 * 1000) vodstr = '' queryb32 = encode32(query) file = os.path.join("cache", "{query}.json".format(query=queryb32)) search_url = '{search_url}?byBroadcastStartTimeRange={start}~{end}&numItems=25&byEntitled=true&personalised=true&q={query}'.format( search_url=CONST_URLS['search_url'], start=start, end=end, query=quote_plus(query)) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5): data = load_file(file=file, isJSON=True) else: download = api_download(url=search_url, type='get', headers=api_get_headers(), data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and (check_key( data, 'tvPrograms') or check_key(data, 'moviesAndSeries')): write_file(file=file, data=data, isJSON=True) if not data or (not check_key(data, 'tvPrograms') and not check_key(data, 'moviesAndSeries')): return False items = [] items_vod = [] items_program = [] vod_links = {} if not settings.getBool('showMoviesSeries'): try: data.pop('moviesAndSeries', None) except: pass else: for entry in CONST_VOD_CAPABILITY: data2 = api_get_vod_by_type(type=entry['file'], character=None, genre=None, subscription_filter=None) for currow in data2: row = data2[currow] vod_links[row['id']] = {} vod_links[row['id']]['seasons'] = row['seasons'] vod_links[row['id']]['duration'] = row['duration'] vod_links[row['id']]['desc'] = row['description'] vod_links[row['id']]['type'] = row['type'] for currow in list(data): if currow == "moviesAndSeries": type = 'vod' else: type = 'program' for row in data[currow]['entries']: if not check_key(row, 'id') or not check_key(row, 'title'): continue item = {} id = row['id'] label = row['title'] description = '' duration = 0 program_image = '' program_image_large = '' start = '' if check_key(row, 'images'): program_image = get_image("boxart", row['images']) program_image_large = get_image("HighResLandscape", row['images']) if program_image_large == '': program_image_large = program_image else: program_image_large += '?w=1920&mode=box' if type == 'vod': if check_key(vod_links, row['id']): description = vod_links[row['id']]['desc'] item_type = vod_links[row['id']]['type'] else: item_type = 'Vod' label += " (Movies and Series)" else: item_type = 'Epg' label += " (ReplayTV)" if check_key(row, 'groupType') and row['groupType'] == 'show': if check_key(row, 'episodeMatch') and check_key( row['episodeMatch'], 'seriesEpisodeNumber') and check_key( row['episodeMatch'], 'secondaryTitle'): if len(description) == 0: description += label season = '' if check_key(row, 'seriesNumber'): season = "S" + row['seriesNumber'] description += " Episode Match: {season}E{episode} - {secondary}".format( season=season, episode=row['episodeMatch']['seriesEpisodeNumber'], secondary=row['episodeMatch']['secondaryTitle']) else: if check_key(row, 'duration'): duration = int(row['duration']) elif check_key(row, 'episodeMatch') and check_key( row['episodeMatch'], 'startTime') and check_key( row['episodeMatch'], 'endTime'): duration = int( int(row['episodeMatch']['endTime']) - int(row['episodeMatch']['startTime'])) // 1000 id = row['episodeMatch']['id'] elif check_key(vod_links, row['id']) and check_key( vod_links[row['id']], 'duration'): duration = vod_links[row['id']]['duration'] item['id'] = id item['title'] = label item['description'] = description item['duration'] = duration item['type'] = item_type item['icon'] = program_image_large item['start'] = start if type == "vod": items_vod.append(item) else: items_program.append(item) num = min(len(items_program), len(items_vod)) items = [None] * (num * 2) items[::2] = items_program[:num] items[1::2] = items_vod[:num] items.extend(items_program[num:]) items.extend(items_vod[num:]) return items
def create_epg(): order = load_order(profile_id=1) prefs = load_prefs(profile_id=1) new_xml_start = '<?xml version="1.0" encoding="utf-8" ?><tv generator-info-name="{addonid}">'.format(addonid=ADDON_ID) new_xml_end = '</tv>' new_xml_channels = '' new_xml_epg = '' addon_id = '' for currow in order: try: ch_no = str(order[currow]) row = prefs[str(currow)] if not check_key(row, 'live') or not check_key(row, 'live_channelid') or not check_key(row, 'live_addonid') or not check_key(row, 'channelname') or not int(row['live']) == 1: continue live_id = str(row['live_channelid']) if not check_key(row, 'replay'): replay = 0 else: replay = int(row['replay']) if not check_key(row, 'replay_channelid'): replay_id = '' else: replay_id = str(row['replay_channelid']) if not check_key(row, 'replay_addonid'): replay_addonid = '' else: replay_addonid = str(row['replay_addonid']) if replay == 1 and len(replay_id) > 0 and len(replay_addonid) > 0: directory = os.path.join("cache", replay_addonid.replace('plugin.video.', ''), "") replay_id = encode32(replay_id) addon_id = replay_addonid data = load_file(os.path.join(directory, replay_id + '.xml'), ext=False, isJSON=False) else: directory = os.path.join("cache", str(row['live_addonid'].replace('plugin.video.', '')), "") live_id = encode32(live_id) addon_id = row['live_addonid'] data = load_file(os.path.join(directory, live_id + '.xml'), ext=False, isJSON=False) if data: if len(addon_id) > 0: try: if settings.getBool('use_small_images', default=False, addon=addon_id): data = data.replace(CONST_IMAGES[addon_id]['replace'], CONST_IMAGES[addon_id]['small']) else: data = data.replace(CONST_IMAGES[addon_id]['replace'], CONST_IMAGES[addon_id]['large']) except: pass new_xml_epg += data try: if replay == 1 and len(replay_id) > 0 and len(replay_addonid) > 0: new_xml_channels += '<channel id="{channelid}"><display-name>{channelname}</display-name><icon src="{channelicon}"></icon><desc></desc></channel>'.format(channelid=str(row['replay_channelid']), channelname=str(row['channelname']), channelicon=str(row['channelicon'])) else: new_xml_channels += '<channel id="{channelid}"><display-name>{channelname}</display-name><icon src="{channelicon}"></icon><desc></desc></channel>'.format(channelid=str(row['live_channelid']), channelname=str(row['channelname']), channelicon=str(row['channelicon'])) except: pass except: pass write_file(file='epg.xml', data=new_xml_start + new_xml_channels + new_xml_epg + new_xml_end, isJSON=False)
def api_search(query): if not api_get_session(): return None profile_settings = load_profile(profile_id=1) queryb32 = encode32(query) file = os.path.join("cache", "{query}.json".format(query=queryb32)) headers = { 'Authorization': 'Bearer {token}'.format(token=profile_settings['session_token']) } search_url = '{api_url}/search?query={query}'.format( api_url=CONST_URLS['api'], query=quote_plus(query)) if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=0.5): data = load_file(file=file, isJSON=True) else: download = api_download(url=search_url, type='get', headers=headers, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data and check_key(data, 'collection'): write_file(file=file, data=data, isJSON=True) if not data or not check_key(data, 'collection'): return None items = [] items_vod = [] items_program = [] for currow in data['collection']: if not settings.getBool( 'showMoviesSeries') and currow['label'] == 'sg.ui.search.vod': continue elif currow['label'] == 'sg.ui.search.epg': continue if currow['label'] == 'sg.ui.search.vod': type = 'vod' else: type = 'program' for row in currow['assets']: if not check_key(row, 'id') or not check_key(row, 'title'): continue item = {} id = row['id'] label = row['title'] description = '' duration = 0 program_image = '' program_image_large = '' start = '' if check_key(row, 'images'): program_image = row['images'][0]['url'] program_image_large = row['images'][0]['url'] if type == 'vod': item_type = 'Vod' label += " (VOD)" else: item_type = 'Epg' label += " (ReplayTV)" if check_key(row, 'params'): if check_key(row['params'], 'duration'): duration = int(row['params']['duration']) elif check_key(row['params'], 'start') and check_key( row['params'], 'end'): duration = int(re.sub( '[^0-9]', '', row['params']['end'])) - int( re.sub('[^0-9]', '', row['params']['end'])) item['id'] = id item['title'] = label item['description'] = description item['duration'] = duration item['type'] = item_type item['icon'] = program_image_large item['start'] = start if type == "vod": items_vod.append(item) else: items_program.append(item) #num = min(len(items_program), len(items_vod)) #items = [None]*(num*2) #items[::2] = items_program[:num] #items[1::2] = items_vod[:num] #items.extend(items_program[num:]) #items.extend(items_vod[num:]) items = items_program return items
def api_get_vod_by_type(type, character, genre, subscription_filter, menu=0): menu = int(menu) if not os.path.isdir(os.path.join(ADDON_PROFILE, 'tmp')): os.makedirs(os.path.join(ADDON_PROFILE, 'tmp')) if check_key(CONST_MOD_CACHE, str(type)): days = CONST_MOD_CACHE[str(type)] else: days = 0.5 type = encode32(txt=type) vod_url = '{dut_epg_url}/{type}.zip'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = os.path.join("cache", "{type}.json".format(type=type)) tmp = os.path.join(ADDON_PROFILE, 'tmp', "{type}.zip".format(type=type)) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=days): data = load_file(file=file, isJSON=True) else: resp = Session().get(vod_url, stream=True) if resp.status_code != 200: resp.close() return None with open(tmp, 'wb') as f: for chunk in resp.iter_content(chunk_size=SESSION_CHUNKSIZE): f.write(chunk) resp.close() if not extract_zip(file=tmp, dest=os.path.join(ADDON_PROFILE, "cache", "")): return None else: data = load_file(file=file, isJSON=True) if menu == 1: return data data2 = OrderedDict() for currow in data: row = data[currow] id = row['id'] if genre and genre.startswith('C') and genre[1:].isnumeric(): if not row['vidcollection'] or not genre in row['vidcollection']: continue elif genre: if not row['category'] or not genre in row['category']: continue if character: if not row['first'] == character: continue if subscription_filter and not int(id) in subscription_filter: continue data2[currow] = row return data2
def api_search(query): type = "search_{query}".format(query=query) type = encode32(txt=type) file = os.path.join("cache", "{type}.json".format(type=type)) if not is_file_older_than_x_days(file=os.path.join(ADDON_PROFILE, file), days=0.5): data = load_file(file=file, isJSON=True) else: search_url = '{base_url}/v7/search/combined?searchterm={query}&maxSerieResults=99999999&maxVideoResults=99999999&expand=true&expandlist=true'.format( base_url=CONST_URLS['api'], query=quote_plus(query)) download = api_download(url=search_url, type='get', headers=None, data=None, json_data=False, return_json=True) data = download['data'] code = download['code'] if code and code == 200 and data: write_file(file=file, data=data, isJSON=True) if not data: return None items = {} if check_key(data, 'Series'): for row in data['Series']: if not check_key(row, 'SerieId') or not check_key(row, 'Name'): continue items[row['SerieId']] = {} desc = '' image = '' if check_key(row, 'Omschrijving'): desc = row['Omschrijving'] if check_key(row, 'ProgrammaAfbeelding'): image = row['ProgrammaAfbeelding'] if not 'http' in image: image_split = image.rsplit('/', 1) if len(image_split) == 2: image = '{image_url}/legacy/thumbnails/{image}'.format( image_url=CONST_URLS['image'], image=image.rsplit('/', 1)[1]) else: image = '{image_url}/{image}'.format( image_url=CONST_URLS['image'], image=image) items[row['SerieId']]['id'] = row['SerieId'] items[row['SerieId']]['title'] = row['Name'] items[row['SerieId']]['description'] = desc items[row['SerieId']]['duration'] = 0 items[row['SerieId']]['type'] = 'Serie' items[row['SerieId']]['icon'] = image if check_key(data, 'Videos'): for row in data['Videos']: if not check_key(row, 'Video') or not check_key( row['Video'], 'VideoId') or not check_key( row['Video'], 'VideoType') or ( not check_key(row, 'Titel') and (not check_key(row, 'Serie') or not check_key(row['Serie'], 'Titel'))): continue id = row['Video']['VideoId'] items[id] = {} if row['Video']['VideoType'] == 'VOD': type = 'Vod' elif row['Video']['VideoType'] == 'Replay': type = 'Epg' elif row['Video']['VideoType'] == 'Serie': type = 'Serie' else: continue basetitle = '' desc = '' start = '' duration = 0 image = '' if check_key(row, 'Serie') and check_key(row['Serie'], 'Titel'): basetitle = row['Serie']['Titel'] if check_key(row, 'Titel'): if len(row['Titel']) > 0 and basetitle != row['Titel']: if len(basetitle) > 0: basetitle += ": " + row['Titel'] else: basetitle = row['Titel'] if check_key(row, 'Omschrijving'): desc = row['Omschrijving'] if check_key(row, 'Duur'): duration = row['Duur'] if check_key(row, 'AfbeeldingUrl'): image = row['AfbeeldingUrl'] if not 'http' in image: image_split = image.rsplit('/', 1) if len(image_split) == 2: image = '{image_url}/legacy/thumbnails/{image}'.format( image_url=CONST_URLS['image'], image=image.rsplit('/', 1)[1]) else: image = '{image_url}/{image}'.format( image_url=CONST_URLS['image'], image=image) if check_key(row, 'Uitzenddatum'): start = row['Uitzenddatum'] items[id]['id'] = id items[id]['title'] = basetitle items[id]['description'] = desc items[id]['duration'] = duration items[id]['type'] = type items[id]['icon'] = image items[id]['start'] = start return items