def updateTitleDb(force=False): if not Config.autoUpdateTitleDb and not force: return try: os.mkdir('titledb') except: pass Print.info('downloading titledb files') try: with open('titledb/db.bin', 'wb') as f: bytes = download('http://tinfoil.media/repo/db/db.bin', f, checkSize=False) decompressZstd('titledb/db.bin', 'titledb/db.nza') container = Fs.Nsp('titledb/db.nza') container.open('titledb/db.nza', 'rb') for nspf in container: with open(os.path.join('titledb', nspf._path), 'wb') as f: while not nspf.eof(): f.write(nspf.read(8 * 1000 * 1000)) container.close() try: os.remove('titledb/db.nza') except: pass refreshRegions(save=False) importRegion(Config.region, Config.language) return except BaseException as e: Print.error('error getting tinfoil.io titledb: ' + str(e)) fileList = ['demos.txt', 'dlcNames.txt', 'retailOnly.txt', 'ranks.txt'] for region, languages in Config.regionLanguages().items(): for language in languages: fileList.append('%s.%s.json' % (region.upper(), language.lower())) for path in fileList: downloadRepoFile(path) refreshRegions(save=False) importRegion(Config.region, Config.language)
def _ftpsync(url): if Config.reverse: q = queue.LifoQueue() else: q = queue.Queue() fileList = [] for f in Fs.driver.openDir(url).ls(): if f.isFile(): fileList.append(f.url) for path in fileList: try: #print('checking ' + path) nsp = Fs.Nsp() nsp.setPath(urllib.parse.unquote(path)) nsp.downloadPath = path if not nsp.titleId: continue if not Titles.contains(nsp.titleId) or (not len(Titles.get(nsp.titleId).getFiles( path[-3:])) and Titles.get(nsp.titleId).isActive(skipKeyCheck=True)): if path[-3:] == 'nsx': if len(Titles.get(nsp.titleId).getFiles('nsp')) or len(Titles.get(nsp.titleId).getFiles('nsz')): continue q.put(nsp) except BaseException as e: Print.error(str(e)) # raise #TODO numThreads = Config.threads threads = [] s = Status.create(q.qsize(), 'Total File Pulls') if numThreads > 0: Print.info('creating pull threads, items: ' + str(q.qsize())) for i in range(numThreads): t = threading.Thread(target=pullWorker, args=[q, s]) t.daemon = True t.start() threads.append(t) for t in threads: t.join() else: pullWorker(q, s) s.close()
def route(request, response, verb='get'): try: if len(request.bits) > 0 and request.bits[0] in mappings: i = request.bits[1] methodName = verb + i[0].capitalize() + i[1:] Print.info('routing to ' + methodName) method = getattr(mappings[request.bits[0]], methodName, Response404) method(request, response, **request.query) return True except BaseException as e: Print.error('route exception: ' + str(e)) return None return False
def AllowedToWriteOutfile(filePath, targetFileExtension, targetDict, removeOld, overwrite, parseCnmt): (filesAtTarget, alreadyExists) = targetDict extractedIdVersion = ExtractTitleIDAndVersion(filePath, parseCnmt) if extractedIdVersion == None: Print.error( "Failed to extract TitleID/Version from filename {0}. Use -p to extract from Cnmt." .format(os.path.basename(filePath))) return fileNameCheck(filePath, targetFileExtension, filesAtTarget, removeOld, overwrite) (titleID, version) = extractedIdVersion if removeOld: titleIDEntry = alreadyExists.get(titleID) if not titleIDEntry == None: exitFlag = False for versionEntry in titleIDEntry: if versionEntry < titleIDEntry: for (fileName, filePath) in filesAtTarget: Print.info( 'Delete outdated version: {0}'.format(filePath)) filesAtTarget.remove( os.path.basename(filePath).name.lower()) os.remove(file) else: exitFlag = True if exitFlag: Print.info('{0} with a the same ID and newer version already exists in the output directory.\n'\ 'If you want to process it do not use --rm-old-version!'.format(os.path.basename(filePath))) return False titleIDEntry = alreadyExists.get(titleID) if not titleIDEntry == None: for versionEntry in titleIDEntry: if versionEntry == titleIDEntry: if overwrite: for (fileName, filePath) in filesAtTarget: Print.info('Delete dublicate: {0}'.format(filePath)) filesAtTarget.remove( os.path.basename(filePath).name.lower()) os.remove(filePath) else: Print.info('{0} with the same ID and version already exists in the output directory.\n'\ 'If you want to overwrite it use the -w parameter!'.format(os.path.basename(filePath))) return False return fileNameCheck(filePath, targetFileExtension, filesAtTarget, removeOld, overwrite)
def downloadAll(wait=True): initTitles() initFiles() global activeDownloads global status i = 0 Print.info('Downloading All') try: for k, t in Titles.items(): i = i + 1 if not t.isActive(): continue if t.isUpdateAvailable(): if not t.id or t.id == '0' * 16: Print.warning('no valid id? id: %s version: %s' % (str(t.id), str(t.lastestVersion()))) continue Titles.queue.add(t.id) Print.info("%d titles scanned, downloading %d" % (i, Titles.queue.size())) if Titles.queue.size() > 0: Titles.save() #status = Status.create(Titles.queue.size(), 'Total Download') if Config.threads <= 1: activeDownloads.append(1) downloadThread(0) else: startDownloadThreads() while wait and (not Titles.queue.empty() or sum(activeDownloads) > 0): time.sleep(1) Print.info('%d downloads, is empty %d' % (sum(activeDownloads), int(Titles.queue.empty()))) except KeyboardInterrupt: pass except BaseException as e: Print.error(str(e)) Print.info('Downloads finished') # if status: # status.close() Print.info('DownloadAll finished')
def load(fileName): try: global keyAreaKeys global titleKeks global loadedKeysPath loadedKeysPath = fileName with open(fileName, encoding="utf8") as f: for line in f.readlines(): r = re.match('\s*([a-z0-9_]+)\s*=\s*([A-F0-9]+)\s*', line, re.I) if r: keys[r.group(1)] = r.group(2) aes_kek_generation_source = getKey('aes_kek_generation_source') aes_key_generation_source = getKey('aes_key_generation_source') titlekek_source = getKey('titlekek_source') key_area_key_application_source = getKey( 'key_area_key_application_source') key_area_key_ocean_source = getKey('key_area_key_ocean_source') key_area_key_system_source = getKey('key_area_key_system_source') keyAreaKeys = [] for i in range(32): keyAreaKeys.append([None, None, None]) for i in range(32): if not existsMasterKey(i): continue masterKey = getMasterKey(i) crypto = aes128.AESECB(masterKey) titleKeks.append(crypto.decrypt(titlekek_source).hex()) keyAreaKeys[i][0] = generateKek(key_area_key_application_source, masterKey, aes_kek_generation_source, aes_key_generation_source) keyAreaKeys[i][1] = generateKek(key_area_key_ocean_source, masterKey, aes_kek_generation_source, aes_key_generation_source) keyAreaKeys[i][2] = generateKek(key_area_key_system_source, masterKey, aes_kek_generation_source, aes_key_generation_source) except BaseException as e: Print.error(format_exc()) Print.error(str(e))
def moveDupe(self): try: newPath = self.fileName() os.makedirs(Config.paths.duplicates, exist_ok=True) origDupePath = Config.paths.duplicates + os.path.basename(newPath) dupePath = origDupePath Print.info('moving duplicate ' + os.path.basename(newPath)) c = 0 while os.path.isfile(dupePath): dupePath = Config.paths.duplicates + os.path.basename( newPath) + '.' + str(c) c = c + 1 shutil.move(self.path, dupePath) return True except BaseException as e: Print.error('failed to move to duplicates! ' + str(e)) return False
def isUpdateAvailable(self, localOnly=False): nsp = self.getLatestFile() if not nsp: return True try: latest = self.lastestVersion(localOnly=localOnly) if latest is None: return False if int(nsp.version) < int(latest): return True except BaseException as e: Print.error('isUpdateAvailable exception %s: %s' % (self.id, str(e))) pass return False
def submitKeys(): for id, t in Titles.items(): if t.key and len(t.getFiles()) > 0: try: #blockchain.blockchain.suggest(t.id, t.key) if not blockchain.verifyKey(t.id, t.key): Print.error('Key verification failed for %s / %s' % (str(t.id), str(t.key))) for f in t.getFiles(): f.hasValidTicket = False f.move() except LookupError as e: Print.info(str(e)) except OSError as e: Print.info(str(e)) except BaseException as e: Print.info(str(e)) raise
def get(self): if not self.token: try: with open(self.fileName(), encoding='utf8') as f: self.token = f.read().strip() self.expires = os.path.getmtime(self.fileName()) + (60 * 60) except BaseException as e: # pylint: disable=broad-except Print.error(str(e)) # pylint: disable=undefined-variable if not self.token or not self.expires or time.time() > self.expires: import cdn.Auth # pylint: disable=redefined-outer-name,import-outside-toplevel,import-error self.token = cdn.Auth.getEdgeToken(self.clientId) self.expires = os.path.getmtime(self.fileName()) + (60 * 60) if not self.token: raise IOError('No edge token') return self.token
def lastestVersion(self, force=False, localOnly=False): # if self.isDLC: # return '0' try: if not self.id: return None if (self.version is None or force) and (not localOnly): self.version = cdn.version(self.id) Print.info('Grabbed %s [%s] version, %s' % (str(self.name), str(self.id), str(self.version))) #Print.info('version: ' + str(self.version)) return self.version except BaseException as e: Print.error(str(e)) return None
def load(fileName='titledb/files.json', verify=True): global hasLoaded # pylint: disable=global-statement if hasLoaded: return hasLoaded = True timestamp = time.perf_counter() try: if os.path.isfile(fileName): with open(fileName, encoding="utf-8-sig") as f: for k in json.loads(f.read()): t = Fs.Nsp(k['path'], None) t.timestamp = k['timestamp'] t.titleId = k['titleId'] t.version = k['version'] if 'extractedNcaMeta' in k and k['extractedNcaMeta'] == 1: t.extractedNcaMeta = True else: t.extractedNcaMeta = False if 'fileSize' in k: t.fileSize = k['fileSize'] if 'cr' in k: t.cr = k['cr'] else: t.cr = None if not t.path: continue path = os.path.abspath(t.path) if verify and Config.isScanning: if os.path.isfile(path) and os.path.exists(path) and not _is_file_hidden(path): files[path] = t else: files[path] = t Print.info('loaded file list in ' + str(time.perf_counter() - timestamp) + ' seconds') except BaseException as e: Print.error('error loading titledb/files.json: ' + str(e))
def load(fileName): try: global keyAreaKeys global titleKeks with open(fileName, encoding="utf8") as f: for line in f.readlines(): r = re.match('\s*([a-z0-9_]+)\s*=\s*([A-F0-9]+)\s*', line, re.I) if r: keys[r.group(1)] = r.group(2) aes_kek_generation_source = uhx(keys['aes_kek_generation_source']) aes_key_generation_source = uhx(keys['aes_key_generation_source']) keyAreaKeys = [] for i in range(10): keyAreaKeys.append([None, None, None]) for i in range(10): masterKeyName = 'master_key_0' + str(i) if masterKeyName in keys.keys(): masterKey = uhx(keys[masterKeyName]) crypto = aes128.AESECB(masterKey) titleKeks.append( crypto.decrypt(uhx(keys['titlekek_source'])).hex()) keyAreaKeys[i][0] = generateKek( uhx(keys['key_area_key_application_source']), masterKey, aes_kek_generation_source, aes_key_generation_source) keyAreaKeys[i][1] = generateKek( uhx(keys['key_area_key_ocean_source']), masterKey, aes_kek_generation_source, aes_key_generation_source) keyAreaKeys[i][2] = generateKek( uhx(keys['key_area_key_system_source']), masterKey, aes_kek_generation_source, aes_key_generation_source) else: titleKeks.append('0' * 32) raise IOError( '{0} missing from keys.txt'.format(masterKeyName)) except BaseException as e: Print.error(format_exc()) Print.error(str(e))
def recv(self, timeout=60000): Print.info('begin recv') header = bytes(self.i.read(32, timeout=timeout)) Print.info('read complete') magic = header[:4] self.command = int.from_bytes(header[4:8], byteorder='little') self.size = int.from_bytes(header[8:16], byteorder='little') self.threadId = int.from_bytes(header[16:20], byteorder='little') self.packetIndex = int.from_bytes(header[20:22], byteorder='little') self.packetCount = int.from_bytes(header[22:24], byteorder='little') self.timestamp = int.from_bytes(header[24:32], byteorder='little') if magic != b'\x12\x12\x12\x12': Print.error('invalid magic! ' + str(magic)) return False Print.info('receiving %d bytes' % self.size) self.payload = bytes(self.i.read(self.size, timeout=0)) return True
def loadTxtDatabases(): confLock.acquire() if os.path.isfile("titles.txt"): loadTitleFile('titles.txt', True) try: files = [f for f in os.listdir(Config.paths.titleDatabase) if f.endswith('.txt')] files.sort() for file in files: if file.endswith('personal_keys.txt'): parsePersonalKeys(Config.paths.titleDatabase + '/' + file) else: loadTitleFile(Config.paths.titleDatabase + '/' + file, False) except BaseException as e: Print.error('title load error: ' + str(e)) confLock.release()
def downloadAll(wait=True): initTitles() initFiles() global activeDownloads global status i = 0 Print.info('Downloading All') try: for k, t in Titles.items(): i = i + 1 if not t.isActive(): continue if t.isUpdateAvailable(): if not t.id or t.id == '0' * 16 or ( t.isUpdate and t.lastestVersion() in [None]): Print.warning('no valid id? id: %s version: %s' % (str(t.id), str(t.lastestVersion()))) continue if t.lastestVersion() is None: Print.info('Could not get version for ' + str(t.name) + ' [' + str(t.id) + ']') continue Titles.queue.add(t.id) Print.info("%d titles scanned, downloading %d" % (i, Titles.queue.size())) Titles.save() status = Status.create(Titles.queue.size(), 'Total Download') startDownloadThreads() while wait and (not Titles.queue.empty() or sum(activeDownloads) > 0): time.sleep(1) except KeyboardInterrupt: pass except BaseException as e: Print.error(str(e)) if status: status.close()
def downloadAll(wait=True): nut.initTitles() nut.initFiles() global activeDownloads global status try: for k, t in Titles.items(): if t.isUpdateAvailable() and ( t.isDLC or t.isUpdate or Config.download.base) and ( not t.isDLC or Config.download.DLC) and ( not t.isDemo or Config.download.demo) and ( not t.isUpdate or Config.download.update) and ( t.key or Config.download.sansTitleKey ) and (len(Config.titleWhitelist) == 0 or t.id in Config.titleWhitelist ) and t.id not in Config.titleBlacklist: if not t.id or t.id == '0' * 16 or ( t.isUpdate and t.lastestVersion() in [None, '0']): #Print.warning('no valid id? ' + str(t.path)) continue if not t.lastestVersion(): Print.info('Could not get version for ' + str(t.name) + ' [' + str(t.id) + ']') continue Titles.queue.add(t.id) Titles.save() status = Status.create(Titles.queue.size(), 'Total Download') startDownloadThreads() while wait and (not Titles.queue.empty() or sum(activeDownloads) > 0): time.sleep(1) except KeyboardInterrupt: pass except BaseException as e: Print.error(str(e)) if status: status.close()
def init(): global initialized if initialized: return False initialized = True path = os.path.abspath(os.path.join(__file__, '../../plugins')) for f in os.listdir(path): try: if not os.path.isfile( os.path.join(os.path.join( path, f), '__init__.py')) or os.path.isfile( os.path.join(os.path.join(path, f), 'disabled')): continue name = f importlib.import_module('plugins.%s' % name) except BaseException as e: Print.error("plugin loader exception: %s" % str(e)) return True
def call(*argv): global _hooks argv = list(argv) if len(argv) == 0: return False name = argv.pop(0) if name not in _hooks: return False for func in _hooks[name]: try: func(*argv) except BaseException as e: Print.error('plugin exception: %s' % str(e)) return True
def get(self): if not self.token: try: with open(self.fileName(), encoding='utf8') as f: self.token = f.read().strip() self.expires = os.path.getmtime( self.fileName()) + (60 * 60) except BaseException as e: Print.error(str(e)) pass if not self.token or not self.expires or time.time() > self.expires: import cdn.Auth self.token = cdn.Auth.getEdgeToken(self.clientId) self.expires = os.path.getmtime(self.fileName()) + (60 * 60) if not self.token: raise IOError('No edge token') return self.token
def AllowedToWriteOutfile(filePath, targetFileExtension, targetDict, removeOld, overwrite, parseCnmt): (filesAtTarget, alreadyExists) = targetDict extractedIdVersion = ExtractTitleIDAndVersion(filePath, parseCnmt) if extractedIdVersion == None: Print.error("Failed to extract TitleID/Version from filename {0}. Use -p to extract from Cnmt.".format(Path(filePath).name)) return fileNameCheck(filePath, targetFileExtension, filesAtTarget, removeOld, overwrite) (titleIDExtracted, versionExtracted) = extractedIdVersion titleIDEntry = alreadyExists.get(titleIDExtracted) if removeOld: if titleIDEntry != None: exitFlag = False for versionEntry in titleIDEntry.keys(): print(versionEntry, versionExtracted) if versionEntry < versionExtracted: for delFilePath in titleIDEntry[versionEntry]: Print.info('Delete outdated version: {0}'.format(delFilePath)) remove(delFilePath) del filesAtTarget[Path(delFilePath).name.lower()] else: exitFlag = True if exitFlag: Print.info('{0} with a the same ID and newer version already exists in the output directory.\n'\ 'If you want to process it do not use --rm-old-version!'.format(Path(filePath).name)) return False if not titleIDEntry == None: for versionEntry in titleIDEntry: if versionEntry == titleIDEntry: if overwrite: for (fileName, filePath) in filesAtTarget: # NEEDS TO BE FIXED Print.info('Delete duplicate: {0}'.format(filePath)) filesAtTarget.remove(Path(filePath).name.lower()) remove(filePath) else: Print.info('{0} with the same ID and version already exists in the output directory.\n'\ 'If you want to overwrite it use the -w parameter!'.format(Path(filePath).name)) return False return fileNameCheck(filePath, targetFileExtension, filesAtTarget, removeOld, overwrite)
def pullWorker(q, s): while True: if q.empty(): break nsp = q.get() if not nsp: break try: hasValidTicket = nsp.hasValidTicket tmpFile = getName(nsp.titleId, nsp.version, path=nsp.path) Print.info('Downloading ' + nsp.path) if Config.dryRun: continue if Config.download.fileSizeMax is not None and nsp.getFileSize( ) > Config.download.fileSizeMax: continue if Config.download.fileSizeMin is not None and nsp.getFileSize( ) < Config.download.fileSizeMin: continue with open(tmpFile, 'wb') as f: serveFile(f, nsp.downloadPath, os.path.basename(nsp.path)) nsp = Fs.Nsp(tmpFile, None) nsp.hasValidTicket = hasValidTicket nsp.move(forceNsp=hasValidTicket) Nsps.files[nsp.path] = nsp Nsps.save() except BaseException as e: Print.error('FTP SYNC EXCEPTION: ' + str(e)) #raise #TODO s.add() Print.info('thread exiting')
def verify_NCA(ncaFile, titleKey): if not titleKey: return False commandLine = hactoolPath + ' "' + ncaFile + '"' + keysArg + ' --titlekey="' + titleKey + '"' try: output = str( subprocess.check_output(commandLine, stderr=subprocess.STDOUT, shell=True)) except subprocess.CalledProcessError as exc: Print.error("Status : FAIL" + str(exc.returncode) + ', ' + str(exc.output)) return False else: if "Error: section 0 is corrupted!" in output or "Error: section 1 is corrupted!" in output: Print.error("\nNCA Verification failed. Probably a bad titlekey.") return False Print.debug("\nTitlekey verification successful.") return True
def move(self): if not self.path: Print.error('no path set') return False if not self.fileName(): Print.error('could not get filename for ' + self.path) return False if os.path.abspath(self.fileName()).lower() == os.path.abspath( self.path).lower(): return False if os.path.isfile(self.fileName()) and os.path.abspath( self.path) == os.path.abspath(self.fileName()): Print.info('duplicate title: ') Print.info(os.path.abspath(self.path)) Print.info(os.path.abspath(self.fileName())) return False try: Print.info(self.path + ' -> ' + self.fileName()) os.makedirs(os.path.dirname(self.fileName()), exist_ok=True) newPath = self.fileName() shutil.move(self.path, newPath) self.path = newPath except BaseException as e: Print.error('failed to rename file! %s -> %s : %s' % (self.path, self.fileName(), e)) self.moveDupe() return True
def blockCompressNsp(filePath, compressionLevel, blockSizeExponent, outputDir, threads): filePath = filePath.resolve() container = factory(filePath) container.open(str(filePath), 'rb') nszPath = outputDir.joinpath(filePath.stem + '.nsz') Print.info('Block compressing (level {0}) {1} -> {2}'.format( compressionLevel, filePath, nszPath)) try: with Pfs0.Pfs0Stream(str(nszPath)) as nsp: blockCompressContainer(container, nsp, compressionLevel, blockSizeExponent, threads) except BaseException as ex: if not ex is KeyboardInterrupt: Print.error(format_exc()) if nszPath.is_file(): nszPath.unlink() container.close() return nszPath
def __init__(self, f, fs=None): self.version = f.readInt16() self.fsType = f.readInt8() self.hashType = f.readInt8() self.encryptionType = f.readInt8() self.padding = f.read(3) self.hashOffset = f.tell() self.hashInfo = f.read(0xF8) self.patchInfo = f.read(0x40) self.generation = f.readInt32() self.secureValue = f.readInt32() self.sparseInfo = f.read(0x30) self.reserved = f.read(0x88) self.hash = None self.fs = fs try: if self.hashType == 2: self.hash = HierarchicalSha256(self.hashInfo, f, self) except BaseException as e: Print.error(str(e))
def downloadRepoFile(path): baseUrl = 'https://github.com/blawar/titledb/raw/master/' finalFile = os.path.join('titledb', path) tmpFile = finalFile + '.tmp' try: with open(tmpFile, 'wb') as f: bytes = download(baseUrl + path, f, checkSize=False) if bytes == 0: raise IOError('downloaded empty file') try: os.remove(finalFile) except BaseException: pass os.rename(tmpFile, finalFile) return True except BaseException as e: Print.error(str(e)) try: os.remove(tmpFile) except BaseException: pass
def solidCompressNsp(filePath, compressionLevel, outputDir, threads, stusReport, id, pleaseNoPrint): filePath = filePath.resolve() container = factory(filePath) container.open(str(filePath), 'rb') nszPath = outputDir.joinpath(filePath.stem + '.nsz') Print.info( 'Solid compressing (level {0}) {1} -> {2}'.format( compressionLevel, filePath, nszPath), pleaseNoPrint) try: with Pfs0.Pfs0Stream(str(nszPath)) as nsp: processContainer(container, nsp, compressionLevel, threads, stusReport, id, pleaseNoPrint) except BaseException as ex: if not ex is KeyboardInterrupt: Print.error(format_exc()) if nszPath.is_file(): nszPath.unlink() container.close() return nszPath
def exportNcaMap(path): nut.initTitles() nut.initFiles() map = {} i = 0 for id, title in Titles.items(): print(id) try: nsp = title.getLatestFile() if not nsp: continue nsp.open(args.info, 'r+b') map[id] = {} map[id]['version'] = int(title.version) map[id]['files'] = [] for f in nsp: if isinstance(f, Fs.Nca): map[id]['files'].append(f._path) i += 1 if i > 100: i = 0 with open(path, 'w') as outfile: json.dump(map, outfile, indent=4) except BaseException as e: Print.error(str(e)) with open(path, 'w') as outfile: json.dump(map, outfile, indent=4)
def decompress(filePath, outputDir, statusReportInfo=None): if isNspNsz(filePath): return __decompressNsz(filePath, outputDir, True, False, statusReportInfo) elif isCompressedGameFile(filePath): filename = changeExtension(filePath, '.nca') outPath = filename if outputDir is None else str( Path(outputDir).joinpath(filename)) Print.info('Decompressing %s -> %s' % (filePath, outPath)) if Config.dryRun: return None container = factory(filePath) container.open(filePath, 'rb') try: with open(outPath, 'wb') as outFile: written, hexHash = __decompressNcz(container, outFile) except BaseException as ex: if ex is not KeyboardInterrupt: Print.error(format_exc()) if outFile.is_file(): outFile.unlink() finally: container.close() fileNameHash = Path(filePath).stem.lower() if hexHash[:32] == fileNameHash: Print.info('[VERIFIED] {0}'.format(filename)) else: Print.info( '[MISMATCH] Filename startes with {0} but {1} was expected - hash verified failed!' .format(fileNameHash, hexHash[:32])) else: raise NotImplementedError( "Can't decompress {0} as that file format isn't implemented!". format(filePath))