def logMissingTitles(file): nut.initTitles() nut.initFiles() f = open(file, "w", encoding="utf-8-sig") for k, t in Titles.items(): if t.isUpdateAvailable() and ( t.isDLC or t.isUpdate or Config.download.base) and ( not t.isDLC or Config.download.DLC) and ( not t.isDemo or Config.download.demo) and ( not t.isUpdate or Config.download.update) and ( t.key or Config.download.sansTitleKey) and ( len(Config.titleWhitelist) == 0 or t.id in Config.titleWhitelist ) and t.id not in Config.titleBlacklist: if not t.id or t.id == '0' * 16 or ( t.isUpdate and t.lastestVersion() in [None, '0']): continue f.write((t.id or ('0' * 16)) + '|' + (t.key or ('0' * 32)) + '|' + (t.name or '') + "\r\n") f.close()
def getSearch(request, response): o = [] region = request.query.get('region') publisher = request.query.get('publisher') dlc = request.query.get('dlc') if dlc: dlc = int(dlc[0]) update = request.query.get('update') if update: update = int(update[0]) demo = request.query.get('demo') if demo: demo = int(demo[0]) for k, t in Titles.items(): f = t.getLatestFile() if f and f.hasValidTicket and (region == None or t.region in region) and (dlc == None or t.isDLC == dlc) and (update == None or t.isUpdate == update) and (demo == None or t.isDemo == demo) and (publisher == None or t.publisher in publisher): o.append({'id': t.getId(), 'name': t.getName(), 'version': int(f.version) if f.version else None , 'region': t.getRegion(),'size': f.getFileSize(), 'mtime': f.getFileModified() }) response.write(json.dumps(o))
def exportNcaMap(path): nut.initTitles() nut.initFiles() map = {} i = 0 for id, title in Titles.items(): print(id) try: nsp = title.getLatestFile() if not nsp: continue nsp.open(args.info, 'r+b') map[id] = {} map[id]['version'] = int(title.version) map[id]['files'] = [] for f in nsp: if isinstance(f, Fs.Nca): map[id]['files'].append(f._path) i += 1 if i > 100: i = 0 with open(path, 'w') as outfile: json.dump(map, outfile, indent=4) except BaseException as e: Print.error(str(e)) with open(path, 'w') as outfile: json.dump(map, outfile, indent=4)
def scanDLC(id, showErr=True, dlcStatus=None): id = id.upper() title = Titles.get(id) baseDlc = Title.baseDlcId(id) for i in range(0x1FF): scanId = format(baseDlc + i, 'X').zfill(16) if Titles.contains(scanId): continue ver = CDNSP.get_version(scanId.lower()) if ver != None: t = Title() t.setId(scanId) Titles.set(scanId, t) Titles.save() Print.info('Found new DLC ' + str(title.name) + ' : ' + scanId) elif showErr: Print.info('nothing found at ' + scanId + ', ' + str(ver)) if dlcStatus: dlcStatus.add()
def updateVersions(force=True): initTitles() initFiles() i = 0 for k, t in tqdm(Titles.items()): if force or t.version is None: if t.isActive(): v = t.lastestVersion(True) Print.info("%s[%s] v = %s" % (str(t.name), str(t.id), str(v))) for t in list(Titles.data().values()): if not t.isUpdate and not t.isDLC and t.updateId and t.updateId and not Titles.contains( t.updateId): u = Title.Title() u.setId(t.updateId) if u.lastestVersion(): Titles.set(t.updateId, u) Print.info("%s[%s] FOUND" % (str(t.name), str(u.id))) Titles.save()
def importRegion(region='US', language='en', save=True): if region not in Config.regionLanguages() or language not in Config.regionLanguages()[region]: Print.info('Could not locate %s/%s !' % (region, language)) return False regionLanguages = [] for region2 in Config.regionLanguages(): for language2 in Config.regionLanguages()[region2]: regionLanguages.append(RegionLanguage(region2, language2, region, language)) for rl in sorted(regionLanguages): data = Titles.data(rl.region, rl.language) for nsuId in sorted(data.keys(), reverse=True): regionTitle = data[nsuId] if not regionTitle.id: continue title = Titles.get(regionTitle.id, None, None) title.importFrom(regionTitle, rl.region, rl.language, preferredRegion=region, preferredLanguage=language) Titles.loadTxtDatabases() if save: Titles.save()
def scrapeShogunUnnamed(): initTitles() initFiles() result = {} for k, t in Titles.data().items(): if not t.isDLC: continue if not t.name and t.baseId != '0100069000078000': result[t.baseId] = True if not Config.dryRun: for id, j in tqdm(result.items()): try: for region, languages in Config.regionLanguages().items(): for language in languages: t = Titles.getTitleId(id, region, language) if t: s = cdn.Shogun.scrapeTitle(int(t.nsuId), region=region, language=language, force=False) #print(json.dumps(s.__dict__)) except: pass for region, languages in Config.regionLanguages().items(): for language in languages: Titles.saveRegion(region, language) Titles.save() else: print(result)
def getTitles(request, response): o = [] for k, t in Titles.items(): o.append(t.__dict__) response.write(json.dumps(o))
def getUpdateDb(request, response): for url in Config.titleUrls: nut.updateDb(url) Titles.loadTxtDatabases() Titles.save() return success(request, response, "Fin")
def updateVersions(force=True): initTitles() initFiles() i = 0 for k, t in Titles.items(): if force or t.version is None: if (t.isDLC or t.isUpdate or Config.download.base) and (not t.isDLC or Config.download.DLC) and (not t.isDemo or Config.download.demo) and (not t.isUpdate or Config.download.update) and ( t.key or Config.download.sansTitleKey) and (len(Config.titleWhitelist) == 0 or t.id in Config.titleWhitelist) and t.id not in Config.titleBlacklist: v = t.lastestVersion(True) Print.info("%s[%s] v = %s" % (str(t.name), str(t.id), str(v))) i = i + 1 if i % 20 == 0: Titles.save() for t in list(Titles.data().values()): if not t.isUpdate and not t.isDLC and t.updateId and t.updateId and not Titles.contains(t.updateId): u = Title.Title() u.setId(t.updateId) if u.lastestVersion(): Titles.set(t.updateId, u) Print.info("%s[%s] FOUND" % (str(t.name), str(u.id))) i = i + 1 if i % 20 == 0: Titles.save() Titles.save()
def getBase(self): baseId = getBaseId(self.id) if Titles.contains(baseId): return Titles.get(baseId) return None
def organize(): initTitles() initFiles() # scan() Print.info('organizing') # for k, f in Nsps.files.items(): #print('moving ' + f.path) #Print.info(str(f.hasValidTicket) +' = ' + f.path) # f.move() for id, t in Titles.data().items(): if not t.isActive(True): continue files = {} for f in t.getFiles(): ext = f.path[-4:] if ext not in files: files[ext] = [] files[ext].append(f) hasNsp = False if '.nsp' in files and len(files['.nsp']) > 0: latest = t.getLatestNsp() if latest: for f in files['.nsp']: if f.path != latest.path: f.moveDupe() hasNsp = True latest.move() if '.nsz' in files and len(files['.nsz']) > 0: latest = t.getLatestNsz() if latest: for f in files['.nsz']: if f.path != latest.path: f.moveDupe() hasNsp = True latest.move() if '.nsx' in files and len(files['.nsx']) > 0: latest = t.getLatestNsx() if latest: for f in files['.nsx']: if f.path != latest.path: f.moveDupe() if hasNsp: latest.moveDupe() else: latest.move() if '.xci' in files and len(files['.xci']) > 0: latest = t.getLatestXci() if latest: for f in files['.xci']: if f.path != latest.path: f.moveDupe() latest.move() Print.info('removing empty directories') Nsps.removeEmptyDir('.', False) Nsps.save()
def export(file, cols=['id', 'rightsId', 'isUpdate', 'isDLC', 'isDemo', 'baseName', 'name', 'version', 'region']): # def export(file, cols = ['rightsId', 'key', 'name']): initTitles() Titles.export(file, cols)
def fileName(self, forceNsp=False): bt = None if self.titleId not in Titles.keys(): if not Title.getBaseId(self.titleId) in Titles.keys(): Print.error('could not find base title for ' + str(self.titleId) + ' or ' + str(Title.getBaseId(self.titleId))) return None bt = Titles.get(Title.getBaseId(self.titleId)) t = Title.Title() if bt.name is not None: t.loadCsv(self.titleId + '0000000000000000|0000000000000000|' + bt.name) else: t.setId(self.titleId) else: t = Titles.get(self.titleId) if not t: Print.error('could not find title id ' + str(self.titleId)) return None try: if t.baseId not in Titles.keys(): Print.info('could not find baseId for ' + self.path) return None except BaseException as e: Print.error('exception: could not find title id ' + str(self.titleId) + ' ' + str(e)) return None bt = Titles.get(t.baseId) isNsx = not self.hasValidTicket and not forceNsp try: if t.isDLC: format = Config.paths.getTitleDLC(isNsx, self.path) elif t.isDemo: if t.idExt != 0: format = Config.paths.getTitleDemoUpdate(isNsx, self.path) else: format = Config.paths.getTitleDemo(isNsx, self.path) elif t.idExt != 0: if bt and bt.isDemo: format = Config.paths.getTitleDemoUpdate(isNsx, self.path) else: format = Config.paths.getTitleUpdate(isNsx, self.path) else: format = Config.paths.getTitleBase(isNsx, self.path) except BaseException as e: Print.error('calc path exception: ' + str(e)) return None if not format: return None newName = self.cleanFilename(t.getName() or '') format = format.replace('{id}', self.cleanFilename(t.id)) format = format.replace( '{region}', self.cleanFilename(t.getRegion() or bt.getRegion())) format = format.replace('{name}', newName) format = format.replace('{version}', str(self.getVersion() or 0)) format = format.replace('{baseId}', self.cleanFilename(bt.id)) if '{cr}' in format: format = format.replace('{cr}', str(self.getCr())) if '{icr}' in format: format = format.replace('{icr}', str(self.getCr(True))) bn = os.path.basename(self.path) if (not newName or len(newName) == 0) and not bn.upper().startswith(t.id.upper()): Print.error('could not get new name for ' + bn) return os.path.join(os.path.dirname(format), os.path.basename(self.path)) baseName = self.cleanFilename(bt.getName() or '') if not baseName or len(baseName) == 0: baseName = os.path.basename(self.path) result = format.replace('{baseName}', baseName) while (len(os.path.basename(result).encode('utf-8')) > 240 and len(baseName) > 3): baseName = baseName[:-1] result = format.replace('{baseName}', baseName) return os.path.abspath(result)
def restore(self): rightsIds = {} ticketCount = 0 certCount = 0 lst = [[], []] for f in self: if type(f).__name__ == 'Nca': if f._path.endswith('.cnmt.nca'): lst[1].append(f) else: lst[0].append(f) elif type(f).__name__ == 'Ticket': ticketCount += 1 elif f._path.endswith('.cert'): certCount += 1 for l in lst: for f in l: if type(f).__name__ == 'Nca': if f.header.key() == b'\x04' * 16 or f.header.signature1 == b'\x00' * 0x100: raise IOError('junk file') if f.restore(): oldName = os.path.basename(f._path) if str(f.header.contentType) == 'Content.META': newName = f.sha256()[0:32] + '.cnmt.nca' else: newName = f.sha256()[0:32] + '.nca' if f.header.hasTitleRights(): rightsIds[f.header.rightsId] = True if len(rightsIds) > ticketCount: raise IOError('missing tickets in NSP, expected %d got %d in %s' % (len(rightsIds), ticketCount, self._path)) if len(rightsIds) > certCount: raise IOError('missing certs in NSP') for rightsId in rightsIds: rightsId = rightsId.decode() title = Titles.get(rightsId[0:16].upper()) if not title.key: raise IOError("could not get title key for " + rightsId) if ticketCount == 1: ticket = self.ticket() else: ticket = self.ticket(rightsId) if ticketCount == 1: cert = self.cert() else: cert = self.cert(rightsId) ticket.setRightsId(int(rightsId, 16)) ticket.setTitleKeyBlock(int(title.key, 16)) ticket.setMasterKeyRevision(int(rightsId[16:32], 16)) self.rename(os.path.basename(ticket._path), rightsId.lower() + '.tik') self.rename(os.path.basename(cert._path), rightsId.lower() + '.cert') return True
if args.cetk: cdn.Tigers.cetk(args.cetk) if args.threads: Config.threads = args.threads if args.shard is not None: Config.shardIndex = args.shard if args.shards is not None: Config.shardCount = args.shards if args.rights_id and args.key: nut.initTitles() title = Titles.get(args.rights_id[0:16].upper()) title.key = args.key if args.extract: nut.initTitles() for filePath in args.extract: #f = Fs.Nsp(filePath, 'rb') f = Fs.factory(filePath) f.open(filePath, 'rb') dir = os.path.splitext(os.path.basename(filePath))[0] f.unpack(dir) f.close() if args.create: Print.info('creating ' + args.create) nsp = Fs.Nsp(None, None)
def getBaseName(self): baseId = getBaseId(self.id) if Titles.contains(baseId): return (Titles.get(baseId).name or '').replace('\n', ' ') return ''
def getTitles(request, response): o = [] for k, t in Titles.items(): o.append(t.__dict__) response.headers['Content-Type'] = 'application/json' response.write(json.dumps(o))
def open(self, path = None, mode = 'rb', cryptoType = -1, cryptoKey = -1, cryptoCounter = -1): r = super(Pfs0, self).open(path, mode, cryptoType, cryptoKey, cryptoCounter) self.rewind() #self.setupCrypto() #Print.info('cryptoType = ' + hex(self.cryptoType)) #Print.info('titleKey = ' + (self.cryptoKey.hex())) #Print.info('cryptoCounter = ' + (self.cryptoCounter.hex())) self.magic = self.read(4) if self.magic != b'PFS0': raise IOError('Not a valid PFS0 partition ' + str(self.magic)) fileCount = self.readInt32() stringTableSize = self.readInt32() self.readInt32() # junk data self.seek(0x10 + fileCount * 0x18) stringTable = self.read(stringTableSize) stringEndOffset = stringTableSize headerSize = 0x10 + 0x18 * fileCount + stringTableSize self.files = [] for i in range(fileCount): i = fileCount - i - 1 self.seek(0x10 + i * 0x18) offset = self.readInt64() size = self.readInt64() nameOffset = self.readInt32() # just the offset name = stringTable[nameOffset:stringEndOffset].decode('utf-8').rstrip(' \t\r\n\0') stringEndOffset = nameOffset self.readInt32() # junk data f = Fs.factory(name) f._path = name f.offset = offset f.size = size self.files.append(self.partition(offset + headerSize, f.size, f, autoOpen = False)) ticket = None try: ticket = self.ticket() ticket.open(None, None) #key = format(ticket.getTitleKeyBlock(), 'X').zfill(32) if ticket.titleKey() != ('0' * 32): Titles.get(ticket.titleId()).key = ticket.titleKey() except: pass for i in range(fileCount): if self.files[i] != ticket: try: self.files[i].open(None, None) except: pass self.files.reverse()
def restore(self): rightsIds = {} ticketCount = 0 certCount = 0 lst = [[], []] for f in self: if type(f).__name__ == 'Nca': if f._path.endswith('.cnmt.nca'): lst[1].append(f) else: lst[0].append(f) elif type(f).__name__ == 'Ticket': ticketCount += 1 elif f._path.endswith('.cert'): certCount += 1 renameNcas = {} renameNcaHashes = {} for l in lst: for f in l: if type(f).__name__ == 'Nca': if f.header.key() == b'\x04' * 16 or f.header.signature1 == b'\x00' * 0x100: raise IOError('junk file') if str(f.header.contentType) == 'Content.META': continue if f.restore(): if f.header.hasTitleRights(): rightsIds[f.header.rightsId] = True oldName = os.path.basename(f._path) hash = f.sha256() newName = hash[0:32] + '.nca' renameNcaHashes[oldName] = hash if newName != oldName: renameNcas[oldName] = newName if len(rightsIds) > ticketCount: Print.error('missing tickets in NSP, expected %d got %d in %s' % (len(rightsIds), ticketCount, self._path)) if len(rightsIds) > certCount: Print.error('missing certs in NSP') for rightsId in rightsIds: rightsId = rightsId.decode() title = Titles.get(rightsId[0:16].upper()) if not title.key: raise IOError("could not get title key for " + rightsId) if ticketCount == 1: ticket = self.ticket() else: ticket = self.ticket(rightsId, autoGenerate = True) if ticketCount == 1: cert = self.cert() else: cert = self.cert(rightsId, autoGenerate = True) ticket.setRightsId(int(rightsId, 16)) ticket.setTitleKeyBlock(int(title.key, 16)) ticket.setMasterKeyRevision(int(rightsId[16:32], 16)) self.rename(os.path.basename(ticket._path), rightsId.lower() + '.tik') self.rename(os.path.basename(cert._path), rightsId.lower() + '.cert') self.flush() for l in lst: for f in l: if type(f).__name__ == 'Nca': if f.header.key() == b'\x04' * 16 or f.header.signature1 == b'\x00' * 0x100: raise IOError('junk file') if str(f.header.contentType) != 'Content.META': continue flush = False for pfs0 in f: for cnmt in pfs0: if type(cnmt).__name__ == 'Cnmt': for oldName, newName in renameNcas.items(): if cnmt.renameNca(oldName, newName, renameNcaHashes[oldName]): self.rename(oldName, newName) flush = True for contentId, hash in renameNcaHashes.items(): cnmt.setHash(contentId, hash) #if flush: cnmt.flush() #if flush: f.updateFsHashes() if f.restore(): oldName = os.path.basename(f._path) if str(f.header.contentType) == 'Content.META': newName = f.sha256()[0:32] + '.cnmt.nca' if f.header.hasTitleRights(): rightsIds[f.header.rightsId] = True return True
Config.download.demo = bool(args.demo) Config.download.sansTitleKey = bool(args.nsx) Config.download.update = bool(args.update) if args.threads: Config.threads = args.threads if args.shard is not None: Config.shardIndex = args.shard if args.shards is not None: Config.shardCount = args.shards if args.rights_id and args.key: nut.initTitles() title = Titles.get(args.rights_id[0:16].upper()) title.key = args.key if args.extract: nut.initTitles() for filePath in args.extract: #f = Fs.Nsp(filePath, 'rb') f = Fs.factory(filePath) f.open(filePath, 'rb') dir = os.path.splitext(os.path.basename(filePath))[0] f.unpack(dir) f.close() if args.create: Print.info('creating ' + args.create) nsp = Fs.Nsp(None, None)
f.unpack(dir) f.close() if args.create: Print.info('creating ' + args.create) nsp = Fs.Nsp(None, None) nsp.path = args.create nsp.pack(args.file) #for filePath in args.file: # Print.info(filePath) if args.update_titles: nut.initTitles() for url in Config.titleUrls: nut.updateDb(url) Titles.loadTxtDatabases() Titles.save() if args.update_check: nut.initTitles() nut.initFiles() for _, game in Nsps.files.items(): title = game.title() if title.isUpdate or title.isDLC: if game.isUpdateAvailable(): Print.info(title.getName()) Print.info(game.isUpdateAvailable()) exit(0) if args.submit_keys: nut.initTitles()
def compressionStats(): nut.initTitles() nut.initFiles() results = {} i = 0 sum = 0 for k, t in Titles.items(): try: if not t.isActive(skipKeyCheck=True): continue lastestNsz = t.getLatestNsz() if not lastestNsz: continue lastestNsp = t.getLatestNsp(lastestNsz.version) if not lastestNsp: continue nspSize = lastestNsp.getFileSize() nszSize = lastestNsz.getFileSize() if nspSize > 0 and nszSize > 0: cr = nszSize / nspSize if t.isDLC: type = 'DLC' elif t.isUpdate: type = 'UPD' else: type = 'BASE' results[k] = { 'id': k, 'name': cleanCsv(t.name), 'publisher': cleanCsv(t.publisher), 'type': type, 'nsp': nspSize, 'nsz': nszSize, 'cr': cr } i += 1 sum += cr except BaseException as e: Print.info(str(e)) if i == 0: Print.info('No data found') return Print.info('files: %d average compression ratio: %.2f' % (i, sum / i)) path = 'compression.stats.csv' with open(path, 'w', encoding='utf8') as f: f.write('title id,name,publisher,type,nsp,nsz,cr\n') for id, data in results.items(): f.write('%s,%s,%s,%s,%d,%d,%.2f\n' % (data['id'], data['name'], data['publisher'], data['type'], data['nsp'], data['nsz'], data['cr'])) Print.info('saved compression stats to %s' % path)
f.unpack(dir) f.close() if args.create: Print.info('creating ' + args.create) nsp = Fs.Nsp(None, None) nsp.path = args.create nsp.pack(args.file) #for filePath in args.file: # Print.info(filePath) if args.update_titles: nut.initTitles() for url in Config.titleUrls: updateDb(url) Titles.loadTxtDatabases() Titles.save() if args.submit_keys: nut.initTitles() nut.initFiles() submitKeys() if args.seteshop: #nut.initTitles() #nut.initFiles() f = Fs.factory(args.seteshop) f.open(args.seteshop, 'r+b') f.setGameCard(False) f.close()
def getName(self): baseId = getBaseId(self.id) if hasattr(self, 'isUpdate') and self.isUpdate and Titles.contains(baseId): return (Titles.get(baseId).name or '').replace('\n', ' ') return (self.name or '').replace('\n', ' ')
def parseShogunJson(self, _json, region=None, language=None, canGrabFromShogun=False): if not _json: return None if 'hero_banner_url' in _json: self.bannerUrl = _json['hero_banner_url'] if "release_date_on_eshop" in _json: try: self.releaseDate = int(_json["release_date_on_eshop"].replace( '-', '')) except: pass ''' if "id" in _json: try: self.setNsuId("%s" % _json["id"]) except BaseException as e: Print.info('error setting nsuid: ' + str(e)) pass ''' if "formal_name" in _json: self.name = _json["formal_name"].strip() if 'screenshots' in _json: self.screenshots = [] for i, k in enumerate(_json["screenshots"]): self.screenshots.append(k["images"][0]["url"]) if "languages" in _json: self.languages = [] for language in _json["languages"]: self.languages.append(language['iso_code']) if "genre" in _json: self.category = _json["genre"].split(' / ') if "total_rom_size" in _json: self.size = _json["total_rom_size"] if "rating_info" in _json: if "rating" in _json["rating_info"]: if "age" in _json["rating_info"]['rating']: self.rating = _json["rating_info"]['rating']['age'] if "content_descriptors" in _json["rating_info"]: content = [] for descriptor in _json["rating_info"]["content_descriptors"]: content.append(descriptor['name']) self.ratingContent = content if "player_number" in _json: if 'local_max' in _json["player_number"]: self.numberOfPlayers = _json["player_number"]["local_max"] if 'offline_max' in _json["player_number"]: self.numberOfPlayers = _json["player_number"]["offline_max"] if "publisher" in _json: if 'name' in _json["publisher"]: self.publisher = _json["publisher"]["name"] if 'title' in _json["publisher"]: self.publisher = _json["publisher"]["title"] if "applications" in _json and isinstance(_json["applications"], list): for a in _json["applications"]: ''' if "id" in a: self.setId(a['id']) ''' if "image_url" in a: self.iconUrl = a['image_url'] break if "applications" in _json and "image_url" in _json["applications"]: self.iconUrl = _json["applications"]['image_url'] if "catch_copy" in _json: #self.intro = htmlToText(_json["catch_copy"]) self.intro = _json["catch_copy"] if "description" in _json: #self.description = htmlToText(_json["description"]) self.description = _json["description"] try: if "target_titles" in _json: for nsu in _json["target_titles"]: if not Titles.hasNsuid(nsu["id"], region, language): if canGrabFromShogun: baseTitle = Titles.getNsuid( nsu["id"], region, language) baseTitle.parseShogunJson( cdn.Shogun.getTitleByNsuid( nsu["id"], region, language), region, language, True) #Titles.saveRegion(region, language) else: baseTitle = Titles.getNsuid(nsu["id"], region, language) if baseTitle.id: pass #self.setId(baseTitle.id) #Print.info("setting appid " + str(baseTitle.id)) #else: # Print.error("Could not find title for " + str(nsu["id"])) except: Print.error('target titles error') raise
def compressAll(level=19): initTitles() initFiles() global activeDownloads global status i = 0 Print.info('Compressing All') if Config.reverse: q = queue.LifoQueue() else: q = queue.Queue() for k, t in Titles.items(): try: i = i + 1 if not t.isActive(skipKeyCheck=True): continue lastestNsp = t.getLatestNsp() if not lastestNsp: continue if lastestNsp.titleId.endswith('000') and lastestNsp.version and int(lastestNsp.version) > 0: Print.info('Cannot compress sparse file: ' + str(lastestNsp.path)) continue lastestNsz = t.getLatestNsz() if lastestNsz is not None and int(lastestNsz.version) >= int(lastestNsp.version): continue if Config.download.fileSizeMax is not None and lastestNsp.getFileSize() > Config.download.fileSizeMax: continue if Config.download.fileSizeMin is not None and lastestNsp.getFileSize() < Config.download.fileSizeMin: continue q.put(lastestNsp.path) except BaseException as e: Print.info('COMPRESS ALL EXCEPTION: ' + str(e)) numThreads = Config.threads threads = [] s = Status.create(q.qsize(), desc="NSPs", unit='B') if numThreads > 0: Print.info('creating compression threads ' + str(q.qsize())) for i in range(numThreads): t = threading.Thread(target=compressWorker, args=[q, level, Config.paths.nspOut, s]) t.daemon = True t.start() threads.append(t) for t in threads: t.join() else: compressWorker(q, level, Config.paths.nspOut, s) s.close()
def open(self, file=None, mode='rb', cryptoType=-1, cryptoKey=-1, cryptoCounter=-1): super(NcaHeader, self).open(file, mode, cryptoType, cryptoKey, cryptoCounter) self.rewind() self.signature1 = self.read(0x100) self.signature2 = self.read(0x100) self.magic = self.read(0x4) self.isGameCard = self.readInt8() self.contentType = self.readInt8() try: self.contentType = Fs.Type.Content(self.contentType) except: pass self.cryptoType = self.readInt8() self.keyIndex = self.readInt8() self.size = self.readInt64() self.titleId = hx(self.read(8)[::-1]).decode('utf-8').upper() self.contentIndex = self.readInt32() self.sdkVersion = self.readInt32() self.cryptoType2 = self.readInt8() self.read(0xF) # padding self.rightsId = hx(self.read(0x10)) if self.magic not in [b'NCA3', b'NCA2']: raise Exception('Failed to decrypt NCA header: ' + str(self.magic)) self.sectionHashes = [] for i in range(4): self.sectionTables.append(SectionTableEntry(self.read(0x10))) for i in range(4): self.sectionHashes.append(self.sectionTables[i]) self.masterKey = (self.cryptoType if self.cryptoType > self.cryptoType2 else self.cryptoType2) - 1 if self.masterKey < 0: self.masterKey = 0 self.encKeyBlock = self.getKeyBlock() #for i in range(4): # offset = i * 0x10 # key = encKeyBlock[offset:offset+0x10] # Print.info('enc %d: %s' % (i, hx(key))) #crypto = aes128.AESECB(Keys.keyAreaKey(self.masterKey, 0)) self.keyBlock = Keys.unwrapAesWrappedTitlekey(self.encKeyBlock, self.masterKey) self.keys = [] for i in range(4): offset = i * 0x10 key = self.keyBlock[offset:offset + 0x10] #Print.info('dec %d: %s' % (i, hx(key))) self.keys.append(key) if self.hasTitleRights(): titleRightsTitleId = self.rightsId.decode()[0:16].upper() if titleRightsTitleId in Titles.keys() and Titles.get( titleRightsTitleId).key: self.titleKeyDec = Keys.decryptTitleKey( uhx(Titles.get(titleRightsTitleId).key), self.masterKey) else: Print.info('could not find title key!') else: self.titleKeyDec = self.key() return True
def decompressAll(): initTitles() initFiles() global activeDownloads global status i = 0 Print.info('De-compressing All') if Config.reverse: q = queue.LifoQueue() else: q = queue.Queue() for k, t in Titles.items(): try: i = i + 1 if not t.isActive(skipKeyCheck=True): continue lastestNsz = t.getLatestNsz() if not lastestNsz: continue lastestNsp = t.getLatestNsp() if lastestNsp is not None and int(lastestNsp.version) >= int(lastestNsz.version): continue if Config.dryRun: Print.info('nsp ver = %x, nsz ver = %x, %s' % (getVer(lastestNsp), getVer(lastestNsz), t.getName())) if Config.download.fileSizeMax is not None and lastestNsz.getFileSize() > Config.download.fileSizeMax: continue if Config.download.fileSizeMin is not None and lastestNsz.getFileSize() < Config.download.fileSizeMin: continue q.put(lastestNsz.path) except BaseException as e: Print.info('DECOMPRESS ALL EXCEPTION: ' + str(e)) numThreads = Config.threads threads = [] s = Status.create(q.qsize(), desc="NSPs", unit='B') if numThreads > 0: Print.info('creating decompression threads ' + str(q.qsize())) for i in range(numThreads): t = threading.Thread(target=decompressWorker, args=[q, Config.paths.nspOut, s]) t.daemon = True t.start() threads.append(t) for t in threads: t.join() else: decompressWorker(q, Config.paths.nspOut, s) s.close()