def setPath(self, path): self.path = path self.version = '0' z = re.match('.*\[([a-zA-Z0-9]{16})\].*', path, re.I) if z: self.titleId = z.groups()[0].upper() else: Print.info( 'could not get title id from filename, name needs to contain [titleId] : ' + path) self.titleId = None z = re.match('.*\[v([0-9]+)\].*', path, re.I) if z: self.version = z.groups()[0] if path.endswith('.nsp'): if self.hasValidTicket is None: self.setHasValidTicket(True) elif path.endswith('.nsx'): if self.hasValidTicket is None: self.setHasValidTicket(False) else: print('unknown extension ' + str(path)) return
def solidCompressTask(in_queue, statusReport, readyForWork, pleaseNoPrint, pleaseKillYourself, id): while True: readyForWork.increment() item = in_queue.get() readyForWork.decrement() if pleaseKillYourself.value() > 0: break try: filePath, compressionLevel, outputDir, threadsToUse, verifyArg = item outFile = solidCompress(filePath, compressionLevel, outputDir, threadsToUse, statusReport, id, pleaseNoPrint) if verifyArg: Print.info("[VERIFY NSZ] {0}".format(outFile)) try: verify(outFile, True, [statusReport, id], pleaseNoPrint) except VerificationException: Print.error("[BAD VERIFY] {0}".format(outFile)) Print.error("[DELETE NSZ] {0}".format(outFile)) remove(outFile) except KeyboardInterrupt: Print.info('Keyboard exception') except BaseException as e: Print.info('nut exception: {0}'.format(str(e))) raise
def blockCompressXci(filePath, compressionLevel, blockSizeExponent, outputDir, threads): filePath = filePath.resolve() container = factory(filePath) container.open(str(filePath), 'rb') secureIn = container.hfs0['secure'] xczPath = outputDir.joinpath(filePath.stem + '.xcz') Print.info('Block compressing (level {0}) {1} -> {2}'.format( compressionLevel, filePath, xczPath)) try: with Xci.XciStream( str(xczPath), originalXciPath=filePath ) as xci: # need filepath to copy XCI container settings with Hfs0.Hfs0Stream(xci.hfs0.add('secure', 0), xci.f.tell()) as secureOut: blockCompressContainer(secureIn, secureOut, compressionLevel, blockSizeExponent, threads) xci.hfs0.resize('secure', secureOut.actualSize) except BaseException as ex: if not ex is KeyboardInterrupt: Print.error(format_exc()) if xczPath.is_file(): xczPath.unlink() container.close() return xczPath
def __decompressXcz(filePath, outputDir, write, raiseVerificationException, statusReportInfo, pleaseNoPrint): fileHashes = FileExistingChecks.ExtractHashes(filePath) container = factory(filePath) container.open(str(filePath), 'rb') secureIn = container.hfs0['secure'] if write: filename = changeExtension(filePath, '.xci') outPath = filename if outputDir == None else str( Path(outputDir).joinpath(filename)) Print.info('Decompressing %s -> %s' % (filePath, outPath), pleaseNoPrint) with Xci.XciStream( outPath, originalXciPath=filePath ) as xci: # need filepath to copy XCI container settings with Hfs0.Hfs0Stream(xci.hfs0.add('secure', 0, pleaseNoPrint), xci.f.tell()) as secureOut: __decompressContainer(secureIn, secureOut, fileHashes, write, raiseVerificationException, statusReportInfo, pleaseNoPrint) xci.hfs0.resize('secure', secureOut.actualSize) else: __decompressContainer(secureIn, None, fileHashes, write, raiseVerificationException, statusReportInfo, pleaseNoPrint) container.close()
def __decompressNsz(filePath, outputDir, write, raiseVerificationException, statusReportInfo, pleaseNoPrint): fileHashes = FileExistingChecks.ExtractHashes(filePath) container = factory(filePath) container.open(str(filePath), 'rb') try: if write: filename = changeExtension(filePath, '.nsp') outPath = filename if outputDir == None else str( Path(outputDir).joinpath(filename)) Print.info('Decompressing %s -> %s' % (filePath, outPath), pleaseNoPrint) with Pfs0.Pfs0Stream(outPath) as nsp: __decompressContainer(container, nsp, fileHashes, write, raiseVerificationException, statusReportInfo, pleaseNoPrint) else: __decompressContainer(container, None, fileHashes, write, raiseVerificationException, statusReportInfo, pleaseNoPrint) except BaseException: raise finally: container.close()
def add(self, name, size, pleaseNoPrint=None): Print.info('[ADDING] {0} {1} bytes to NSP'.format(name, size), pleaseNoPrint) self.files.append({ 'name': name, 'size': size, 'offset': self.f.tell() }) return self.partition(self.f.tell(), size, n=BaseFile())
def printInfo(self, maxDepth=3, indent=0): tabs = '\t' * indent Print.info('\n%sCnmt\n' % (tabs)) Print.info('%stitleId = %s' % (tabs, self.titleId)) Print.info('%sversion = %x' % (tabs, self.version)) Print.info('%stitleType = %x' % (tabs, self.titleType)) for i in self.contentEntries: Print.info('%s\tncaId: %s type = %x' % (tabs, i.ncaId, i.type)) super(Cnmt, self).printInfo(maxDepth, indent)
def getKey(key): if key not in keys: raise IOError('{0} missing from {1}'.format(key, loadedKeysFile)) foundKey = uhx(keys[key]) foundKeyChecksum = crc32(foundKey) if key in crc32_checksum: if crc32_checksum[key] != foundKeyChecksum: raise IOError('{0} from {1} is invalid (crc32 missmatch)'.format(key, loadedKeysFile)) elif current_process().name == 'MainProcess': Print.info('Unconfirmed: crc32({0}) = {1}'.format(key, foundKeyChecksum)) return foundKey
def fileNameCheck(filePath, targetFileExtension, filesAtTarget, removeOld, overwrite): outFile = str(Path(changeExtension(filePath, targetFileExtension)).name).lower() filePath = filesAtTarget.get(outFile) if filePath == None: return True if overwrite: remove(filePath) return True Print.info('{0} with the same file name already exists in the output directory.\n'\ 'If you want to overwrite it use the -w parameter!'.format(Path(filePath).name)) return False
def printInfo(self, maxDepth=3, indent=0): if not self.bktr_size: return tabs = '\t' * indent Print.info('\n%sBKTR' % (tabs)) Print.info('%soffset = %d' % (tabs, self.bktr_offset)) Print.info('%ssize = %d' % (tabs, self.bktr_size)) Print.info('%sentry count = %d' % (tabs, self.enctryCount)) Print.info('\n')
def printInfo(self, maxDepth=3, indent=0): super(Bktr, self).printInfo(maxDepth, indent) tabs = '\t' * indent Print.info('%sOffsets' % (tabs)) i = 0 for off in self.basePhysicalOffsets: i += 1 if off == 0 and i != 1: break Print.info('%s %x' % (tabs, off))
def unlock(self): #if not self.isOpen(): # self.open('r+b') if not Titles.contains(self.titleId): raise IOError('No title key found in database!') self.ticket().setTitleKeyBlock(int(Titles.get(self.titleId).key, 16)) Print.info('setting title key to ' + Titles.get(self.titleId).key) self.ticket().flush() self.close() self.hasValidTicket = True self.move()
def verify(self): success = True for f in self: if not isinstance(f, Nca): continue hash = str(f.sha256()) if hash[0:16] != str(f._path)[0:16]: Print.error('BAD HASH %s = %s' % (str(f._path), str(f.sha256()))) success = False return success
def compress(filePath, outputDir, args, work, amountOfTastkQueued): compressionLevel = 18 if args.level is None else args.level if filePath.suffix == ".xci" and not args.solid or args.block: threadsToUseForBlockCompression = args.threads if args.threads > 0 else cpu_count() outFile = blockCompress(filePath, compressionLevel, args.bs, outputDir, threadsToUseForBlockCompression) if args.verify: Print.info("[VERIFY NSZ] {0}".format(outFile)) verify(outFile, True) else: threadsToUseForSolidCompression = args.threads if args.threads > 0 else 3 work.put([filePath, compressionLevel, outputDir, threadsToUseForSolidCompression, args.verify]) amountOfTastkQueued.increment()
def setGameCard(self, isGameCard=False): if isGameCard: targetValue = 1 else: targetValue = 0 for nca in self: if type(nca) == Nca: if nca.header.getIsGameCard() == targetValue: continue Print.info('writing isGameCard for %s, %d' % (str(nca._path), targetValue)) nca.header.setIsGameCard(targetValue)
def decompress(filePath, outputDir, statusReportInfo, pleaseNoPrint = None): if isNspNsz(filePath): __decompressNsz(filePath, outputDir, True, False, statusReportInfo, pleaseNoPrint) elif isXciXcz(filePath): __decompressXcz(filePath, outputDir, True, False, statusReportInfo, pleaseNoPrint) elif isCompressedGameFile(filePath): filename = changeExtension(filePath, '.nca') outPath = filename if outputDir == None else str(Path(outputDir).joinpath(filename)) Print.info('Decompressing %s -> %s' % (filePath, outPath), pleaseNoPrint) try: inFile = factory(filePath) inFile.open(str(filePath), 'rb') with open(outPath, 'wb') as outFile: written, hexHash = __decompressNcz(inFile, outFile, statusReportInfo, pleaseNoPrint) fileNameHash = Path(filePath).stem.lower() if hexHash[:32] == fileNameHash: Print.info('[VERIFIED] {0}'.format(filename), pleaseNoPrint) else: Print.info('[MISMATCH] Filename startes with {0} but {1} was expected - hash verified failed!'.format(fileNameHash, hexHash[:32]), pleaseNoPrint) except BaseException as ex: if not ex is KeyboardInterrupt: Print.error(format_exc()) if Path(outPath).is_file(): Path(outPath).unlink() finally: inFile.close() else: raise NotImplementedError("Can't decompress {0} as that file format isn't implemented!".format(filePath))
def load(fileName): try: global keyAreaKeys global titleKeks global loadedKeysFile loadedKeysFile = fileName with open(fileName, encoding="utf8") as f: for line in f.readlines(): r = re.match('\s*([a-z0-9_]+)\s*=\s*([A-F0-9]+)\s*', line, re.I) if r: keys[r.group(1)] = r.group(2) aes_kek_generation_source = getKey('aes_kek_generation_source') aes_key_generation_source = getKey('aes_key_generation_source') titlekek_source = getKey('titlekek_source') key_area_key_application_source = getKey( 'key_area_key_application_source') key_area_key_ocean_source = getKey('key_area_key_ocean_source') key_area_key_system_source = getKey('key_area_key_system_source') keyAreaKeys = [] for i in range(32): keyAreaKeys.append([None, None, None]) for i in range(32): if not existsMasterKey(i): continue masterKey = getMasterKey(i) crypto = aes128.AESECB(masterKey) titleKeks.append(crypto.decrypt(titlekek_source).hex()) keyAreaKeys[i][0] = generateKek(key_area_key_application_source, masterKey, aes_kek_generation_source, aes_key_generation_source) keyAreaKeys[i][1] = generateKek(key_area_key_ocean_source, masterKey, aes_kek_generation_source, aes_key_generation_source) keyAreaKeys[i][2] = generateKek(key_area_key_system_source, masterKey, aes_kek_generation_source, aes_key_generation_source) except BaseException as e: Print.error(format_exc()) Print.error(str(e))
def loadCsv(self, line, map=[ 'id', 'path', 'version', 'timestamp', 'hasValidTicket', 'extractedNcaMeta' ]): split = line.split('|') for i, value in enumerate(split): if i >= len(map): Print.info('invalid map index: ' + str(i) + ', ' + str(len(map))) continue i = str(map[i]) methodName = 'set' + i[0].capitalize() + i[1:] method = getattr(self, methodName, lambda x: None) method(value.strip())
def blockCompressNsp(filePath, compressionLevel , blockSizeExponent, outputDir, threads): filePath = filePath.resolve() container = factory(filePath) container.open(str(filePath), 'rb') nszPath = outputDir.joinpath(filePath.stem + '.nsz') Print.info('Block compressing (level {0}) {1} -> {2}'.format(compressionLevel, filePath, nszPath)) try: with Pfs0.Pfs0Stream(str(nszPath)) as nsp: blockCompressContainer(container, nsp, compressionLevel, blockSizeExponent, threads) except BaseException as ex: if not ex is KeyboardInterrupt: Print.error(format_exc()) if nszPath.is_file(): nszPath.unlink() container.close() return nszPath
def __init__(self, path=None, mode='rb'): self.path = None self.titleId = None self.hasValidTicket = None self.timestamp = None self.version = None self.fileSize = None self.fileModified = None self.extractedNcaMeta = False super(Nsp, self).__init__(None, path, mode) if path: self.setPath(path) #if files: # self.pack(files) if self.titleId and self.isUnlockable(): Print.info('unlockable title found ' + self.path)
def printInfo(self, maxDepth=3, indent=0): tabs = '\t' * indent Print.info('\n%sXCI Archive\n' % (tabs)) super(Xci, self).printInfo(maxDepth, indent) Print.info(tabs + 'magic = ' + str(self.magic)) Print.info(tabs + 'titleKekIndex = ' + str(self.titleKekIndex)) Print.info(tabs + 'gamecardCert = ' + str( hx(self.gamecardCert.magic + self.gamecardCert.unknown1 + self.gamecardCert.unknown2 + self.gamecardCert.data))) self.hfs0.printInfo(maxDepth, indent)
def unpack(self, path, extractregex="*"): os.makedirs(str(path), exist_ok=True) for nspf in self: filePath_str = str(path.joinpath(nspf._path)) if not re.match(extractregex, filePath_str): continue f = open(filePath_str, 'wb') nspf.rewind() i = 0 pageSize = 0x100000 while True: buf = nspf.read(pageSize) if len(buf) == 0: break i += len(buf) f.write(buf) f.close() Print.info(filePath_str)
def removeTitleRights(self): if not Titles.contains(self.titleId): raise IOError('No title key found in database! ' + self.titleId) ticket = self.ticket() masterKeyRev = ticket.getMasterKeyRevision() titleKeyDec = Keys.decryptTitleKey( ticket.getTitleKeyBlock().to_bytes(16, byteorder='big'), Keys.getMasterKeyIndex(masterKeyRev)) rightsId = ticket.getRightsId() Print.info('rightsId =\t' + hex(rightsId)) Print.info('titleKeyDec =\t' + str(hx(titleKeyDec))) Print.info('masterKeyRev =\t' + hex(masterKeyRev)) for nca in self: if type(nca) == Nca: if nca.header.getCryptoType2() != masterKeyRev: pass raise IOError('Mismatched masterKeyRevs!') ticket.setRightsId(0) for nca in self: if type(nca) == Nca: if nca.header.getRightsId() == 0: continue kek = Keys.keyAreaKey(Keys.getMasterKeyIndex(masterKeyRev), nca.header.keyIndex) Print.info('writing masterKeyRev for %s, %d' % (str(nca._path), masterKeyRev)) Print.info('kek =\t' + hx(kek).decode()) crypto = aes128.AESECB(kek) encKeyBlock = crypto.encrypt(titleKeyDec * 4) nca.header.setRightsId(0) nca.header.setKeyBlock(encKeyBlock) Hex.dump(encKeyBlock)
def printInfo(self, maxDepth=3, indent=0): tabs = '\t' * indent Print.info('\n%sBKTR Bucket' % tabs) Print.info('%sentries: %d' % (tabs, self.entryCount)) Print.info('%send offset: %d' % (tabs, self.endOffset)) for entry in self.entries: entry.printInfo(maxDepth, indent + 1)
def solidCompressNsp(filePath, compressionLevel, outputDir, threads, stusReport, id, pleaseNoPrint): filePath = filePath.resolve() container = factory(filePath) container.open(str(filePath), 'rb') nszPath = outputDir.joinpath(filePath.stem + '.nsz') Print.info( 'Solid compressing (level {0}) {1} -> {2}'.format( compressionLevel, filePath, nszPath), pleaseNoPrint) try: with Pfs0.Pfs0Stream(str(nszPath)) as nsp: processContainer(container, nsp, compressionLevel, threads, stusReport, id, pleaseNoPrint) except BaseException as ex: if not ex is KeyboardInterrupt: Print.error(format_exc()) if nszPath.is_file(): nszPath.unlink() container.close() return nszPath
def open(self, path=None, mode='rb', cryptoType=-1, cryptoKey=-1, cryptoCounter=-1): r = super(BaseFs, self).open(path, mode, cryptoType, cryptoKey, cryptoCounter) if self.bktr1Buffer: try: self.bktrRelocation = Bktr.Bktr1(MemoryFile(self.bktr1Buffer), 'rb', nca=self) except BaseException as e: Print.info('bktr reloc exception: ' + str(e)) if self.bktr2Buffer: try: self.bktrSubsection = Bktr.Bktr2(MemoryFile(self.bktr2Buffer), 'rb', nca=self) except BaseException as e: Print.info('bktr subsection exception: ' + str(e))
def pack(self, files): if not self.path: return False Print.info('\tRepacking to NSP...') hd = self.generateHeader(files) totalSize = len(hd) + sum(os.path.getsize(file) for file in files) if os.path.exists(self.path) and os.path.getsize( self.path) == totalSize: Print.info('\t\tRepack %s is already complete!' % self.path) return t = enlighten.Counter(total=totalSize, unit='B', desc=os.path.basename(self.path), leave=False) t.write('\t\tWriting header...') outf = open(self.path, 'wb') outf.write(hd) t.update(len(hd)) done = 0 for file in files: t.write('\t\tAppending %s...' % os.path.basename(file)) with open(file, 'rb') as inf: while True: buf = inf.read(4096) if not buf: break outf.write(buf) t.update(len(buf)) t.close() Print.info('\t\tRepacked to %s!' % outf.name) outf.close()
def blockCompressContainer(readContainer, writeContainer, compressionLevel, blockSizeExponent, threads): CHUNK_SZ = 0x100000 UNCOMPRESSABLE_HEADER_SIZE = 0x4000 if blockSizeExponent < 14 or blockSizeExponent > 32: raise ValueError("Block size must be between 14 and 32") blockSize = 2**blockSizeExponent manager = Manager() results = manager.list() readyForWork = Counter(0) pleaseKillYourself = Counter(0) TasksPerChunk = 209715200 // blockSize for i in range(TasksPerChunk): results.append(b"") pool = [] work = manager.Queue(threads) for i in range(threads): p = Process(target=compressBlockTask, args=(work, results, readyForWork, pleaseKillYourself)) p.start() pool.append(p) for nspf in readContainer: if isinstance( nspf, Nca.Nca) and nspf.header.contentType == Type.Content.DATA: Print.info('Skipping delta fragment {0}'.format(nspf._path)) continue if isinstance(nspf, Nca.Nca) and ( nspf.header.contentType == Type.Content.PROGRAM or nspf.header.contentType == Type.Content.PUBLICDATA ) and nspf.size > UNCOMPRESSABLE_HEADER_SIZE: if isNcaPacked(nspf): offsetFirstSection = sortedFs(nspf)[0].offset newFileName = nspf._path[0:-1] + 'z' f = writeContainer.add(newFileName, nspf.size) startPos = f.tell() nspf.seek(0) f.write(nspf.read(UNCOMPRESSABLE_HEADER_SIZE)) sections = [] for fs in sortedFs(nspf): sections += fs.getEncryptionSections() if len(sections) == 0: for p in pool: #Process.terminate() might corrupt the datastructure but we do't care p.terminate() raise Exception( "NCA can't be decrypted. Outdated keys.txt?") header = b'NCZSECTN' header += len(sections).to_bytes(8, 'little') i = 0 for fs in sections: i += 1 header += fs.offset.to_bytes(8, 'little') header += fs.size.to_bytes(8, 'little') header += fs.cryptoType.to_bytes(8, 'little') header += b'\x00' * 8 header += fs.cryptoKey header += fs.cryptoCounter f.write(header) blockID = 0 chunkRelativeBlockID = 0 startChunkBlockID = 0 blocksHeaderFilePos = f.tell() bytesToCompress = nspf.size - UNCOMPRESSABLE_HEADER_SIZE blocksToCompress = bytesToCompress // blockSize + ( bytesToCompress % blockSize > 0) compressedblockSizeList = [0] * blocksToCompress header = b'NCZBLOCK' #Magic header += b'\x02' #Version header += b'\x01' #Type header += b'\x00' #Unused header += blockSizeExponent.to_bytes( 1, 'little') #blockSizeExponent in bits: 2^x header += blocksToCompress.to_bytes( 4, 'little') #Amount of Blocks header += bytesToCompress.to_bytes( 8, 'little') #Decompressed Size header += b'\x00' * (blocksToCompress * 4) f.write(header) decompressedBytes = UNCOMPRESSABLE_HEADER_SIZE compressedBytes = f.tell() BAR_FMT = u'{desc}{desc_pad}{percentage:3.0f}%|{bar}| {count:{len_total}d}/{total:d} {unit} [{elapsed}<{eta}, {rate:.2f}{unit_pad}{unit}/s]' bar = enlighten.Counter(total=nspf.size // 1048576, desc='Compressing', unit='MiB', color='cyan', bar_format=BAR_FMT) subBars = bar.add_subcounter('green', all_fields=True) partitions = [] if offsetFirstSection - UNCOMPRESSABLE_HEADER_SIZE > 0: partitions.append( nspf.partition(offset=UNCOMPRESSABLE_HEADER_SIZE, size=offsetFirstSection - UNCOMPRESSABLE_HEADER_SIZE, cryptoType=Type.Crypto.CTR.NONE, autoOpen=True)) for section in sections: #Print.info('offset: %x\t\tsize: %x\t\ttype: %d\t\tiv%s' % (section.offset, section.size, section.cryptoType, str(hx(section.cryptoCounter))), pleaseNoPrint) partitions.append( nspf.partition(offset=section.offset, size=section.size, cryptoType=section.cryptoType, cryptoKey=section.cryptoKey, cryptoCounter=bytearray( section.cryptoCounter), autoOpen=True)) if UNCOMPRESSABLE_HEADER_SIZE - offsetFirstSection > 0: partitions[0].seek(UNCOMPRESSABLE_HEADER_SIZE - offsetFirstSection) partNr = 0 bar.count = nspf.tell() // 1048576 subBars.count = f.tell() // 1048576 bar.refresh() while True: buffer = partitions[partNr].read(blockSize) while (len(buffer) < blockSize and partNr < len(partitions) - 1): partitions[partNr].close() partitions[partNr] = None partNr += 1 buffer += partitions[partNr].read(blockSize - len(buffer)) if chunkRelativeBlockID >= TasksPerChunk or len( buffer) == 0: while readyForWork.value() < threads: sleep(0.02) for i in range( min(TasksPerChunk, blocksToCompress - startChunkBlockID)): lenResult = len(results[i]) compressedBytes += lenResult compressedblockSizeList[startChunkBlockID + i] = lenResult f.write(results[i]) results[i] = b"" if len(buffer) == 0: break chunkRelativeBlockID = 0 startChunkBlockID = blockID work.put([ buffer, compressionLevel, compressedblockSizeList, chunkRelativeBlockID ]) blockID += 1 chunkRelativeBlockID += 1 decompressedBytes += len(buffer) bar.count = decompressedBytes // 1048576 subBars.count = compressedBytes // 1048576 bar.refresh() partitions[partNr].close() partitions[partNr] = None endPos = f.tell() bar.count = decompressedBytes // 1048576 subBars.count = compressedBytes // 1048576 bar.close() written = endPos - startPos f.seek(blocksHeaderFilePos + 24) header = b"" for compressedblockSize in compressedblockSizeList: header += compressedblockSize.to_bytes(4, 'little') f.write(header) f.seek(endPos) #Seek to end of file. Print.info('compressed %d%% %d -> %d - %s' % (int(written * 100 / nspf.size), decompressedBytes, written, nspf._path)) writeContainer.resize(newFileName, written) continue else: Print.info('Skipping not packed {0}'.format(nspf._path)) f = writeContainer.add(nspf._path, nspf.size) nspf.seek(0) while not nspf.eof(): buffer = nspf.read(CHUNK_SZ) f.write(buffer) #Ensures that all threads are started and compleaded before being requested to quit while readyForWork.value() < threads: sleep(0.02) pleaseKillYourself.increment() for i in range(readyForWork.value()): work.put(None) while readyForWork.value() > 0: sleep(0.02)
def AllowedToWriteOutfile(filePath, targetFileExtension, targetDict, args): (filesAtTarget, alreadyExists) = targetDict extractedIdVersion = ExtractTitleIDAndVersion(filePath, args) if extractedIdVersion == None: if args.parseCnmt or args.alwaysParseCnmt: Print.error('Failed to extract TitleID/Version from booth filename "{0}" and Cnmt - Outdated keys.txt?'.format(Path(filePath).name)) else: Print.error('Failed to extract TitleID/Version from filename "{0}". Use -p to extract from Cnmt.'.format(Path(filePath).name)) return fileNameCheck(filePath, targetFileExtension, filesAtTarget, args.rm_old_version, args.overwrite) (titleIDExtracted, versionExtracted) = extractedIdVersion titleIDEntry = alreadyExists.get(titleIDExtracted) if titleIDEntry != None: DuplicateEntriesToDelete = [] OutdatedEntriesToDelete = [] exitFlag = False for versionEntry in titleIDEntry.keys(): if versionEntry == versionExtracted: if args.overwrite: DuplicateEntriesToDelete.append(versionEntry) else: Print.info('{0} with the same ID and version already exists in the output directory.\n'\ 'If you want to overwrite it use the -w parameter!'.format(titleIDEntry[versionEntry])) return False elif versionEntry < versionExtracted: if args.rm_old_version: if versionEntry == 0: raise ValueError("rm-old-version: A titleID containing updates should never have any version v0 with the same titleID!") OutdatedEntriesToDelete.append(versionEntry) else: #versionEntry > versionExtracted if args.rm_old_version: exitFlag = True if exitFlag: Print.info('{0} with a the same ID and newer version already exists in the output directory.\n'\ 'If you want to process it do not use --rm-old-version!'.format(titleIDEntry[versionEntry])) return False for versionEntry in DuplicateEntriesToDelete: for delFilePath in titleIDEntry[versionEntry]: Print.info('Delete duplicate: {0}'.format(delFilePath)) remove(delFilePath) del filesAtTarget[Path(delFilePath).name.lower()] del titleIDEntry[versionEntry] for versionEntry in OutdatedEntriesToDelete: for delFilePath in titleIDEntry[versionEntry]: Print.info('Delete outdated version: {0}'.format(delFilePath)) remove(delFilePath) del filesAtTarget[Path(delFilePath).name.lower()] del titleIDEntry[versionEntry] return fileNameCheck(filePath, targetFileExtension, filesAtTarget, args.rm_old_version, args.overwrite)
def CreateTargetDict(targetFolder, args, extension, filesAtTarget = {}, alreadyExists = {}): for filePath in expandFiles(targetFolder): try: filePath_str = str(filePath) if (isGame(filePath) or filePath.suffix == ".nspz" or filePath.suffix == ".nsx") and (extension == None or filePath.suffix == extension): print(filePath) Print.infoNoNewline('Extract TitleID/Version: {0} '.format(filePath.name)) filesAtTarget[filePath.name.lower()] = filePath_str extractedIdVersion = ExtractTitleIDAndVersion(filePath, args) if extractedIdVersion == None: if args.parseCnmt or args.alwaysParseCnmt: Print.error('Failed to extract TitleID/Version from booth filename "{0}" and Cnmt - Outdated keys.txt?'.format(Path(filePath).name)) else: Print.error('Failed to extract TitleID/Version from filename "{0}". Use -p to extract from Cnmt.'.format(Path(filePath).name)) continue titleID, version = extractedIdVersion titleIDEntry = alreadyExists.get(titleID) if titleIDEntry == None: titleIDEntry = {version: [filePath_str]} elif not version in titleIDEntry: titleIDEntry[version] = [filePath_str] else: titleIDEntry[version].append(filePath_str) alreadyExists[titleID] = titleIDEntry Print.info('=> {0} {1}'.format(titleID, version)) except BaseException as e: Print.info("") print_exc() Print.error('Error: ' + str(e)) return(filesAtTarget, alreadyExists)