def getSearch(request, response): o = [] region = request.query.get('region') publisher = request.query.get('publisher') dlc = request.query.get('dlc') if dlc: dlc = int(dlc[0]) update = request.query.get('update') if update: update = int(update[0]) demo = request.query.get('demo') if demo: demo = int(demo[0]) for k, t in Titles.items(): f = t.getLatestFile() if f and f.hasValidTicket and ( region == None or t.region in region) and (dlc == None or t.isDLC == dlc) and ( update == None or t.isUpdate == update) and (demo == None or t.isDemo == demo) and ( publisher == None or t.publisher in publisher): o.append({ 'id': t.getId(), 'name': t.getName(), 'version': int(f.version) if f.version else None, 'region': t.getRegion(), 'size': f.getFileSize(), 'mtime': f.getFileModified() }) response.write(json.dumps(o))
def exportKeys(fileName): nut.initTitles() with open(fileName, 'w') as f: f.write('id|key|version\n') for tid, title in Titles.items(): if title and title.rightsId and title.key and title.isActive(): f.write(str(title.rightsId) + '|' + str(title.key) + '|' + str(title.version) + '\n')
def updateVersions(force=True): initTitles() initFiles() i = 0 for k, t in Titles.items(): if force or t.version is None: if (t.isDLC or t.isUpdate or Config.download.base) and (not t.isDLC or Config.download.DLC) and (not t.isDemo or Config.download.demo) and (not t.isUpdate or Config.download.update) and ( t.key or Config.download.sansTitleKey) and (len(Config.titleWhitelist) == 0 or t.id in Config.titleWhitelist) and t.id not in Config.titleBlacklist: v = t.lastestVersion(True) Print.info("%s[%s] v = %s" % (str(t.name), str(t.id), str(v))) i = i + 1 if i % 20 == 0: Titles.save() for t in list(Titles.data().values()): if not t.isUpdate and not t.isDLC and t.updateId and t.updateId and not Titles.contains(t.updateId): u = Title.Title() u.setId(t.updateId) if u.lastestVersion(): Titles.set(t.updateId, u) Print.info("%s[%s] FOUND" % (str(t.name), str(u.id))) i = i + 1 if i % 20 == 0: Titles.save() Titles.save()
def getTitles(request, response): o = [] map = [ 'id', 'key', 'isUpdate', 'isDLC', 'isDemo', 'name', 'version', 'region', 'baseId' ] for k, t in Titles.items(): o.append(t.__dict__) response.write(json.dumps(o))
def compressionStats(): nut.initTitles() nut.initFiles() results = {} i = 0 sum = 0 for k, t in Titles.items(): try: if not t.isActive(skipKeyCheck=True): continue lastestNsz = t.getLatestNsz() if not lastestNsz: continue lastestNsp = t.getLatestNsp(lastestNsz.version) if not lastestNsp: continue nspSize = lastestNsp.getFileSize() nszSize = lastestNsz.getFileSize() if nspSize > 0 and nszSize > 0: cr = nszSize / nspSize if t.isDLC: type = 'DLC' elif t.isUpdate: type = 'UPD' else: type = 'BASE' results[k] = {'id': k, 'name': cleanCsv(t.name), 'publisher': cleanCsv(t.publisher), 'type': type, 'nsp': nspSize, 'nsz': nszSize, 'cr': cr} i += 1 sum += cr except BaseException as e: Print.info(str(e)) if i == 0: Print.info('No data found') return Print.info('files: %d average compression ratio: %.2f' % (i, sum / i)) path = 'compression.stats.csv' with open(path, 'w', encoding='utf8') as f: f.write('title id,name,publisher,type,nsp,nsz,cr\n') for id, data in results.items(): f.write('%s,%s,%s,%s,%d,%d,%.2f\n' % (data['id'], data['name'], data['publisher'], data['type'], data['nsp'], data['nsz'], data['cr'])) Print.info('saved compression stats to %s' % path)
def downloadAll(wait=True): initTitles() initFiles() global activeDownloads global status i = 0 Print.info('Downloading All') try: for k, t in Titles.items(): i = i + 1 if not t.isActive(): continue if t.isUpdateAvailable(): if not t.id or t.id == '0' * 16: Print.warning('no valid id? id: %s version: %s' % (str(t.id), str(t.lastestVersion()))) continue Titles.queue.add(t.id) Print.info("%d titles scanned, downloading %d" % (i, Titles.queue.size())) if Titles.queue.size() > 0: Titles.save() #status = Status.create(Titles.queue.size(), 'Total Download') if Config.threads <= 1: activeDownloads.append(1) downloadThread(0) else: startDownloadThreads() while wait and (not Titles.queue.empty() or sum(activeDownloads) > 0): time.sleep(1) Print.info( '%d downloads, is empty %d' % (sum(activeDownloads), int(Titles.queue.empty()))) except KeyboardInterrupt: pass except BaseException as e: Print.error(str(e)) Print.info('Downloads finished') #if status: # status.close() Print.info('DownloadAll finished')
def getFiles(): for k, t in Titles.items(): f = t.getLatestFile() if f and f.hasValidTicket: o.append({ 'id': t.id, 'name': t.name, 'version': int(f.version) if f.version else None, 'size': f.getFileSize(), 'mtime': f.getFileModified() }) return json.dumps(o)
def submitKeys(): for id, t in Titles.items(): if t.key and len(t.getFiles()) > 0: try: #blockchain.blockchain.suggest(t.id, t.key) if not blockchain.verifyKey(t.id, t.key): Print.error('Key verification failed for %s / %s' % (str(t.id), str(t.key))) for f in t.getFiles(): f.hasValidTicket = False f.move() except LookupError as e: Print.info(str(e)) except OSError as e: Print.info(str(e)) except BaseException as e: Print.info(str(e)) raise
def ganymede(config): initTitles() initFiles() with Ganymede(config) as g: for k, t in Titles.items(): try: if not t.isActive(skipKeyCheck=True): continue lastestNsz = t.getLatestNsz() if lastestNsz is None: continue g.push(t.id, lastestNsz.version, lastestNsz.path, lastestNsz.size) except BaseException: raise
def downloadAll(wait=True): initTitles() initFiles() global activeDownloads global status i = 0 Print.info('Downloading All') try: for k, t in Titles.items(): i = i + 1 if not t.isActive(): continue if t.isUpdateAvailable(): if not t.id or t.id == '0' * 16 or ( t.isUpdate and t.lastestVersion() in [None]): Print.warning('no valid id? id: %s version: %s' % (str(t.id), str(t.lastestVersion()))) continue if t.lastestVersion() is None: Print.info('Could not get version for ' + str(t.name) + ' [' + str(t.id) + ']') continue Titles.queue.add(t.id) Print.info("%d titles scanned, downloading %d" % (i, Titles.queue.size())) Titles.save() status = Status.create(Titles.queue.size(), 'Total Download') startDownloadThreads() while wait and (not Titles.queue.empty() or sum(activeDownloads) > 0): time.sleep(1) except KeyboardInterrupt: pass except BaseException as e: Print.error(str(e)) if status: status.close()
def downloadAll(wait=True): nut.initTitles() nut.initFiles() global activeDownloads global status try: for k, t in Titles.items(): if t.isUpdateAvailable() and ( t.isDLC or t.isUpdate or Config.download.base) and ( not t.isDLC or Config.download.DLC) and ( not t.isDemo or Config.download.demo) and ( not t.isUpdate or Config.download.update) and ( t.key or Config.download.sansTitleKey ) and (len(Config.titleWhitelist) == 0 or t.id in Config.titleWhitelist ) and t.id not in Config.titleBlacklist: if not t.id or t.id == '0' * 16 or ( t.isUpdate and t.lastestVersion() in [None, '0']): #Print.warning('no valid id? ' + str(t.path)) continue if not t.lastestVersion(): Print.info('Could not get version for ' + str(t.name) + ' [' + str(t.id) + ']') continue Titles.queue.add(t.id) Titles.save() status = Status.create(Titles.queue.size(), 'Total Download') startDownloadThreads() while wait and (not Titles.queue.empty() or sum(activeDownloads) > 0): time.sleep(1) except KeyboardInterrupt: pass except BaseException as e: Print.error(str(e)) if status: status.close()
def logMissingTitles(file): nut.initTitles() nut.initFiles() f = open(file, "w", encoding="utf-8-sig") for k, t in Titles.items(): if t.isUpdateAvailable() and ( t.isDLC or t.isUpdate or Config.download.base) and ( not t.isDLC or Config.download.DLC) and ( not t.isDemo or Config.download.demo) and ( not t.isUpdate or Config.download.update) and ( t.key or Config.download.sansTitleKey) and ( len( Config.titleWhitelist) == 0 or t.id in Config.titleWhitelist) and t.id not in Config.titleBlacklist: if not t.id or t.id == '0' * 16 or (t.isUpdate and t.lastestVersion() in [None, '0']): continue f.write((t.id or ('0'*16)) + '|' + (t.key or ('0'*32)) + '|' + (t.name or '') + "\r\n") f.close()
def updateVersions(force=True): initTitles() initFiles() i = 0 for k, t in tqdm(Titles.items()): if force or t.version is None: if t.isActive(): v = t.lastestVersion(True) Print.info("%s[%s] v = %s" % (str(t.name), str(t.id), str(v))) for t in list(Titles.data().values()): if not t.isUpdate and not t.isDLC and t.updateId and t.updateId and not Titles.contains( t.updateId): u = Title.Title() u.setId(t.updateId) if u.lastestVersion(): Titles.set(t.updateId, u) Print.info("%s[%s] FOUND" % (str(t.name), str(u.id))) Titles.save()
def exportNcaMap(path): nut.initTitles() nut.initFiles() map = {} i = 0 for id, title in Titles.items(): print(id) try: nsp = title.getLatestFile() if not nsp: continue nsp.open(args.info, 'r+b') map[id] = {} map[id]['version'] = int(title.version) map[id]['files'] = [] for f in nsp: if isinstance(f, Fs.Nca): map[id]['files'].append(f._path) i += 1 if i > 100: i = 0 with open(path, 'w') as outfile: json.dump(map, outfile, indent=4) except BaseException as e: Print.error(str(e)) with open(path, 'w') as outfile: json.dump(map, outfile, indent=4)
if len(sys.argv) == 1: nut.scan() organize() downloadAll() scanLatestTitleUpdates() export('titledb/versions.txt', ['rightsId', 'version']) if args.scan_dlc != None: nut.initTitles() nut.initFiles() queue = Titles.Queue() if len(args.scan_dlc) > 0: for id in args.scan_dlc: queue.add(id) else: for i, k in Titles.items(): if not k.isDLC and not k.isUpdate and k.id: queue.add(k.id) startDlcScan(queue) if args.scan_base != None: nut.initTitles() nut.initFiles() startBaseScan() if args.export_verified_keys: exportVerifiedKeys(args.export_verified_keys) if args.export_keys: exportKeys(args.export_keys)
def getTitles(request, response): o = [] for k, t in Titles.items(): o.append(t.__dict__) response.write(json.dumps(o))
def decompressAll(): initTitles() initFiles() global activeDownloads global status i = 0 Print.info('De-compressing All') if Config.reverse: q = queue.LifoQueue() else: q = queue.Queue() for k, t in Titles.items(): try: i = i + 1 if not t.isActive(skipKeyCheck=True): continue lastestNsz = t.getLatestNsz() if not lastestNsz: continue lastestNsp = t.getLatestNsp() if lastestNsp is not None and int(lastestNsp.version) >= int(lastestNsz.version): continue if Config.dryRun: Print.info('nsp ver = %x, nsz ver = %x, %s' % (getVer(lastestNsp), getVer(lastestNsz), t.getName())) if Config.download.fileSizeMax is not None and lastestNsz.getFileSize() > Config.download.fileSizeMax: continue if Config.download.fileSizeMin is not None and lastestNsz.getFileSize() < Config.download.fileSizeMin: continue q.put(lastestNsz.path) except BaseException as e: Print.info('DECOMPRESS ALL EXCEPTION: ' + str(e)) numThreads = Config.threads threads = [] s = Status.create(q.qsize(), desc="NSPs", unit='B') if numThreads > 0: Print.info('creating decompression threads ' + str(q.qsize())) for i in range(numThreads): t = threading.Thread(target=decompressWorker, args=[q, Config.paths.nspOut, s]) t.daemon = True t.start() threads.append(t) for t in threads: t.join() else: decompressWorker(q, Config.paths.nspOut, s) s.close()
def getTitles(request, response): o = [] for k, t in Titles.items(): o.append(t.__dict__) response.headers['Content-Type'] = 'application/json' response.write(json.dumps(o))
def compressAll(level=19): initTitles() initFiles() global activeDownloads global status i = 0 Print.info('Compressing All') if Config.reverse: q = queue.LifoQueue() else: q = queue.Queue() for k, t in Titles.items(): try: i = i + 1 if not t.isActive(skipKeyCheck=True): continue lastestNsp = t.getLatestNsp() if not lastestNsp: continue if lastestNsp.titleId.endswith('000') and lastestNsp.version and int(lastestNsp.version) > 0: Print.info('Cannot compress sparse file: ' + str(lastestNsp.path)) continue lastestNsz = t.getLatestNsz() if lastestNsz is not None and int(lastestNsz.version) >= int(lastestNsp.version): continue if Config.download.fileSizeMax is not None and lastestNsp.getFileSize() > Config.download.fileSizeMax: continue if Config.download.fileSizeMin is not None and lastestNsp.getFileSize() < Config.download.fileSizeMin: continue q.put(lastestNsp.path) except BaseException as e: Print.info('COMPRESS ALL EXCEPTION: ' + str(e)) numThreads = Config.threads threads = [] s = Status.create(q.qsize(), desc="NSPs", unit='B') if numThreads > 0: Print.info('creating compression threads ' + str(q.qsize())) for i in range(numThreads): t = threading.Thread(target=compressWorker, args=[q, level, Config.paths.nspOut, s]) t.daemon = True t.start() threads.append(t) for t in threads: t.join() else: compressWorker(q, level, Config.paths.nspOut, s) s.close()