def api_get_channels(): directory = os.path.dirname(ADDON_PROFILE + 'tmp' + os.sep + 'a.channels.zip') if not os.path.exists(directory): os.makedirs(directory) directory = os.path.dirname(ADDON_PROFILE + "cache" + os.sep + "a.channels.json") if not os.path.exists(directory): os.makedirs(directory) channels_url = '{dut_epg_url}/a.channels.zip'.format( dut_epg_url=CONST_DUT_EPG_BASE) file = "cache" + os.sep + "a.channels.json" tmp = ADDON_PROFILE + 'tmp' + os.sep + 'a.channels.zip' if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=1): return True else: resp = requests.get(channels_url, stream=True) if resp.status_code != 200: resp.close() return False with open(tmp, 'wb') as f: for chunk in resp.iter_content(chunk_size=SESSION_CHUNKSIZE): f.write(chunk) resp.close() if os.path.isfile(tmp): from zipfile import ZipFile try: with ZipFile(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep) except: try: fixBadZipfile(tmp) with ZipFile(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep) except: try: from resources.lib.base.l1.zipfile import ZipFile as ZipFile2 with ZipFile2(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep) except: return False else: return False clear_cache_connector() return True
def api_get_epg_by_addon(addon): type = addon[0] directory = os.path.dirname(ADDON_PROFILE + 'tmp' + os.sep + 'epg.zip') if not os.path.exists(directory): os.makedirs(directory) directory = os.path.dirname(ADDON_PROFILE + "cache" + os.sep + str(addon) + os.sep + 'epg.zip') if not os.path.exists(directory): os.makedirs(directory) epg_url = '{dut_epg_url}/{type}.epg.zip'.format( dut_epg_url=CONST_DUT_EPG_BASE, type=type) tmp = ADDON_PROFILE + 'tmp' + os.sep + '{type}.epg.zip'.format(type=type) if not is_file_older_than_x_days(file=tmp, days=0.5): return False else: resp = requests.get(epg_url, stream=True) if resp.status_code != 200: resp.close() return False with open(tmp, 'wb') as f: for chunk in resp.iter_content(chunk_size=SESSION_CHUNKSIZE): f.write(chunk) resp.close() if os.path.isfile(tmp): from zipfile import ZipFile try: with ZipFile(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep + str(addon) + os.sep) except: try: fixBadZipfile(tmp) with ZipFile(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep + str(addon) + os.sep) except: try: from resources.lib.base.l1.zipfile import ZipFile as ZipFile2 with ZipFile2(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep + str(addon) + os.sep) except: return False else: return False return True
def api_get_vod_by_type(type, character, genre, subscription_filter): if not os.path.isdir(ADDON_PROFILE + 'tmp'): os.makedirs(ADDON_PROFILE + 'tmp') encodedBytes = base64.b32encode(type.encode("utf-8")) type = str(encodedBytes, "utf-8") vod_url = '{dut_epg_url}/{type}.zip'.format(dut_epg_url=CONST_DUT_EPG, type=type) file = "cache" + os.sep + "{type}.json".format(type=type) tmp = ADDON_PROFILE + 'tmp' + os.sep + "{type}.zip".format(type=type) if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=0.5): data = load_file(file=file, isJSON=True) else: resp = Session().get(vod_url, stream=True) if resp.status_code != 200: resp.close() return None with open(tmp, 'wb') as f: for chunk in resp.iter_content(chunk_size=SESSION_CHUNKSIZE): f.write(chunk) resp.close() if os.path.isfile(tmp): from zipfile import ZipFile try: with ZipFile(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep) except: try: fixBadZipfile(tmp) with ZipFile(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep) except: try: from resources.lib.base.l1.zipfile import ZipFile as ZipFile2 with ZipFile2(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep) except: return None if os.path.isfile(ADDON_PROFILE + file): data = load_file(file=file, isJSON=True) else: return None else: return None data2 = OrderedDict() for currow in data: row = data[currow] id = row['id'] if genre and row['category']: if not genre in row['category']: continue if character: if not row['first'] == character: continue if subscription_filter and not int(id) in subscription_filter: continue data2[currow] = row return data2
def api_get_list_by_first(first, start, end, channels): if not os.path.isdir(ADDON_PROFILE + 'tmp'): os.makedirs(ADDON_PROFILE + 'tmp') list_url = '{dut_epg_url}/list.zip'.format(dut_epg_url=CONST_DUT_EPG) tmp = ADDON_PROFILE + 'tmp' + os.sep + 'list.zip' file = "cache" + os.sep + "list.json" if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=0.5): data = load_file(file=file, isJSON=True) else: resp = Session().get(list_url, stream=True) if resp.status_code != 200: resp.close() return None with open(tmp, 'wb') as f: for chunk in resp.iter_content(chunk_size=SESSION_CHUNKSIZE): f.write(chunk) resp.close() if os.path.isfile(tmp): from zipfile import ZipFile try: with ZipFile(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep) except: try: fixBadZipfile(tmp) with ZipFile(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep) except: try: from resources.lib.base.l1.zipfile import ZipFile as ZipFile2 with ZipFile2(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep) except: return None if os.path.isfile(ADDON_PROFILE + file): data = load_file(file=file, isJSON=True) else: return None else: return None data2 = OrderedDict() data = data[str(first)] for currow in data: row = data[currow] try: if not int(row['startl']) < start or not int(row['starth']) > end: continue except: pass try: found = False for station in row['channels']: if station in channels: found = True break if found == False: continue except: pass data2[currow] = row return data2
def api_get_epg_by_addon(addon): type = addon[0] directory = os.path.dirname(ADDON_PROFILE + 'tmp' + os.sep + 'epg.zip') if not os.path.exists(directory): os.makedirs(directory) directory = os.path.dirname(ADDON_PROFILE + "cache" + os.sep + unicode(addon) + os.sep + 'epg.zip') if not os.path.exists(directory): os.makedirs(directory) epg_url = '{dut_epg_url}/{type}.epg.zip'.format( dut_epg_url=CONST_DUT_EPG_BASE, type=type) if addon == 'ziggo': VIDEO_ADDON_PROFILE = ADDON_PROFILE.replace(ADDON_ID, 'plugin.video.ziggo') profile = load_file(VIDEO_ADDON_PROFILE + 'profile.json', ext=True, isJSON=True) try: if int(profile['v3']) == 1: epg_url = '{dut_epg_url}/{type}.epg.v3.zip'.format( dut_epg_url=CONST_DUT_EPG_BASE, type=type) except: pass tmp = ADDON_PROFILE + 'tmp' + os.sep + '{type}.epg.zip'.format(type=type) if not is_file_older_than_x_days(file=tmp, days=0.5): return False else: resp = requests.get(epg_url, stream=True) if resp.status_code != 200: return False with open(tmp, 'wb') as f: for chunk in resp.iter_content(chunk_size=SESSION_CHUNKSIZE): f.write(chunk) if os.path.isfile(tmp): from zipfile import ZipFile try: with ZipFile(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep + unicode(addon) + os.sep) except: try: fixBadZipfile(tmp) with ZipFile(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep + unicode(addon) + os.sep) except: try: from resources.lib.base.l1.zipfile import ZipFile as ZipFile2 with ZipFile2(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "cache" + os.sep + unicode(addon) + os.sep) except: return False else: return False return True
def _download(url, dst, dst_path, arch, md5=None): filename = url.split('/')[-1] tmp = ADDON_PROFILE + "tmp" + os.sep + "widevine" downloaded = 0 if os.path.exists(dst_path): if md5 and md5sum(dst_path) == md5: #log.debug('MD5 of local file {} same. Skipping download'.format(filename)) return True elif not gui.yes_no(_.NEW_IA_VERSION): return False else: if os.path.exists(dst_path): os.remove(dst_path) with gui.progress(_(_.IA_DOWNLOADING_FILE, url=filename), heading=_.IA_WIDEVINE_DRM) as progress: resp = Session().get(url, stream=True) if resp.status_code != 200: resp.close() raise InputStreamError( _(_.ERROR_DOWNLOADING_FILE, filename=filename)) total_length = float(resp.headers.get('content-length', 1)) with open(tmp, 'wb') as f: for chunk in resp.iter_content(chunk_size=SESSION_CHUNKSIZE): f.write(chunk) downloaded += len(chunk) downloadperc = int(downloaded) * 100 downloadperc2 = int(downloadperc) // int(total_length) percent = int(downloadperc2) if progress.iscanceled(): progress.close() progress.update(percent) resp.close() if os.path.isfile(tmp): if 'arm' in arch: with open(tmp, "rb") as encoded_file: import base64 decoded_string = base64.b64decode(encoded_file.read()) with open(dst_path, "wb") as decoded_file: decoded_file.write(decoded_string) else: from zipfile import ZipFile try: with ZipFile(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "tmp" + os.sep) except: try: fixBadZipfile(tmp) with ZipFile(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "tmp" + os.sep) except: try: from resources.lib.base.l1.zipfile import ZipFile as ZipFile2 with ZipFile2(tmp, 'r') as zipObj: zipObj.extractall(ADDON_PROFILE + "tmp" + os.sep) except: return False if os.path.isfile(ADDON_PROFILE + "tmp" + os.sep + dst): shutil.copy(ADDON_PROFILE + "tmp" + os.sep + dst, dst_path) for file in glob.glob(ADDON_PROFILE + "tmp" + os.sep + "*"): os.remove(file) if progress.iscanceled(): return False checksum = md5sum(dst_path) if checksum != md5: if os.path.exists(dst_path): os.remove(dst_path) raise InputStreamError( _(_.MD5_MISMATCH, filename=filename, local_md5=checksum, remote_md5=md5)) return True