def organize(): nut.initTitles() nut.initFiles() #scan() Print.info('organizing') for k, f in Nsps.files.items(): #print('moving ' + f.path) #Print.info(str(f.hasValidTicket) +' = ' + f.path) f.move() for id, t in Titles.data().items(): files = t.getFiles() if len(files) > 1: #Print.info("%d - %s - %s" % (len(files), t.id, t.name)) latest = t.getLatestFile() if not latest: continue for f in files: if f.path != latest.path: f.moveDupe() Print.info('removing empty directories') Nsps.removeEmptyDir('.', False) Nsps.save()
def downloadThread(i): if not hasCdn: return Print.info('starting thread ' + str(i)) global status while Config.isRunning and not Titles.queue.empty(): try: id = Titles.queue.shift() if id and Titles.contains(id): activeDownloads[i] = 1 t = Titles.get(id) path = cdn.downloadTitle(t.id.lower(), None, t.key) if path and os.path.isfile(path): nsp = Fs.Nsp(path, None) nsp.move() Nsps.save() if status is not None: status.add() activeDownloads[i] = 0 else: time.sleep(1) except KeyboardInterrupt: pass except BaseException as e: Print.error('downloadThread exception: ' + str(e)) traceback.print_exc(file=sys.stdout) activeDownloads[i] = 0 Print.info('ending thread ' + str(i))
def downloadThread(i): Print.info('starting thread ' + str(i)) global status while Config.isRunning: try: id = Titles.queue.shift() if id and Titles.contains(id): activeDownloads[i] = 1 t = Titles.get(id) path = CDNSP.download_game(t.id.lower(), t.lastestVersion(), t.key, True, '', True) if os.path.isfile(path): nsp = Fs.Nsp(path, None) nsp.move() Nsps.files[nsp.path] = nsp Nsps.save() status.add() activeDownloads[i] = 0 else: time.sleep(1) except KeyboardInterrupt: pass except BaseException as e: Print.error(str(e)) activeDownloads[i] = 0 Print.info('ending thread ' + str(i))
def initFiles(): global isInitFiles if isInitFiles: return isInitFiles = True Nsps.load()
def initFiles(verify=True): global isInitFiles if isInitFiles: return isInitFiles = True Nsps.load(verify=verify)
def test_load_with_empty_filesize_in_files_json(self): _title_id = '0000000000000000' _nsp_hbl_name = f"hbl [{_title_id}].nsp" self.__prepare_hbl_title_fixture(_nsp_hbl_name) Nsps.load() _nsp = Nsps.get(os.path.sep + _nsp_hbl_name) self.assertIsNotNone(_nsp) self.assertGreater(_nsp.fileSize, 0) self.assertEqual(_nsp.titleId, _title_id)
def initTitles(): global isInitTitles if isInitTitles: return isInitTitles = True Titles.load() Nsps.load() Titles.queue.load()
def scan(): global hasScanned # if hasScanned: # return hasScanned = True initTitles() initFiles() for path in Config.paths.scan: Nsps.scan(path)
def scan(): global hasScanned hasScanned = True initFiles() r = 0 for path in Config.paths.scan: r += Nsps.scan(path) Nsps.save() return r
def getInstall(request, response): nsp = Nsps.getByTitleId(request.bits[2]) try: url = ('%s:%s@%s:%d/api/download/%s/title.nsp' % (request.user.id, request.user.password, Config.server.hostname, Config.server.port, request.bits[2])) Print.info('Installing ' + url) file_list_payloadBytes = url.encode('ascii') sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) #sock.settimeout(1) sock.connect((request.user.switchHost, request.user.switchPort)) #sock.settimeout(99999) sock.sendall( struct.pack('!L', len(file_list_payloadBytes)) + file_list_payloadBytes) while len(sock.recv(1)) < 1: time.sleep(0.05) sock.close() response.write( json.dumps({ 'success': True, 'message': 'install successful' })) except BaseException as e: response.write(json.dumps({'success': False, 'message': str(e)}))
def updateinfobox(self): self.updatelistboxcursor() if self.softwarelist: sc = self.softwarelist[self.currentselection] tid = sc["titleid"] # bid = Title.getBaseId(tid) # ttle = Titles.get(bid) api_dict = api.getTitle(Nsps.getBaseId(tid)) self.updateAuthorImage(api_dict) typ = sc["type"] or None ttl = sc["name"] or "n/a" if typ: ttl += " [{}]".format(typ) if 'publisher' in api_dict: author = sc["author"] or "unknown" else: author = api_dict['publisher'] if 'description' in api_dict: desc = api_dict['description'] else: desc = sc["description"] or "no data" self.updatetitle(ttl) self.updateauthor(author) self.updatedescription(desc) self.controller.after(10, self.infobox.reset_placement)
def suggest(self, titleId, titleKey): if not titleId or not titleKey: raise IndexError('Missing values') titleId = titleId.upper() nsp = Nsps.getByTitleId(titleId) if not nsp: raise IOError('Title not found: ' + titleId) nsp.open() for f in nsp: if type(f) == Fs.Nca and f.header.contentType == Type.Content.PROGRAM: for fs in f.sectionFilesystems: if fs.fsType == Type.Fs.PFS0 and fs.cryptoType == Type.Crypto.CTR: f.seek(0) ncaHeader = f.read(0x400) sectionHeaderBlock = fs.buffer f.seek(fs.offset) pfs0Header = f.read(0x10) entry = KeyEntry(titleId, titleKey.upper(), ncaHeader, sectionHeaderBlock, pfs0Header, fs.offset) index = blockchain.new_transaction(entry) blockchain.new_block() return True for f in nsp: if type(f) == Fs.Nca: for fs in f.sectionFilesystems: if fs.fsType == Type.Fs.ROMFS and fs.cryptoType == Type.Crypto.CTR: f.seek(0) ncaHeader = f.read(0x400) sectionHeaderBlock = fs.buffer levelOffset = int.from_bytes(sectionHeaderBlock[0x18:0x20], byteorder='little', signed=False) levelSize = int.from_bytes(sectionHeaderBlock[0x20:0x28], byteorder='little', signed=False) offset = fs.offset + levelOffset f.seek(offset) pfs0Header = f.read(levelSize) entry = KeyEntry(titleId, titleKey.upper(), ncaHeader, sectionHeaderBlock, pfs0Header, offset, fs = fs) index = blockchain.new_transaction(entry) blockchain.new_block() return True return False
def getInfo(request, response): try: nsp = Nsps.getByTitleId(request.bits[2]) t = {'id': request.bits[2]} t['size'] = nsp.getFileSize() t['mtime'] = nsp.getFileModified() response.write(json.dumps(t)) except BaseException as e: response.write(json.dumps({'success': False, 'message': str(e)}))
def scan(scanTitles=False): global hasScanned hasScanned = True if scanTitles == True: initTitles() refreshRegions() importRegion(Config.region, Config.language) initFiles() r = 0 for path in Config.paths.scan: r += Nsps.scan(path) Nsps.save() return r
def compressWorker(q, level, output, totalStatus): while not q.empty(): try: path = q.get(block=False) totalStatus.add(1) nszFile = compress(path, level, output) if nszFile: nsp = Fs.Nsp(nszFile, None) nsp.hasValidTicket = True nsp.move(forceNsp=True) Nsps.files[nsp.path] = nsp Nsps.save() except queue.Empty as e: return except BaseException as e: Print.info('COMPRESS WORKER EXCEPTION: ' + str(e)) traceback.print_exc(file=sys.stdout)
def getInfo(request, response): try: response.headers['Content-Type'] = 'application/json' nsp = Nsps.getByTitleId(request.bits[2]) t = Titles.get(request.bits[2]).__dict__ t['size'] = nsp.getFileSize() t['mtime'] = nsp.getFileModified() response.write(json.dumps(t)) except BaseException as e: response.write(json.dumps({'success': False, 'message': str(e)}))
def pullWorker(q, s): while True: if q.empty(): break nsp = q.get() if not nsp: break try: hasValidTicket = nsp.hasValidTicket tmpFile = getName(nsp.titleId, nsp.version, path=nsp.path) Print.info('Downloading ' + nsp.path) if Config.dryRun: continue if Config.download.fileSizeMax is not None and nsp.getFileSize( ) > Config.download.fileSizeMax: continue if Config.download.fileSizeMin is not None and nsp.getFileSize( ) < Config.download.fileSizeMin: continue with open(tmpFile, 'wb') as f: serveFile(f, nsp.downloadPath, os.path.basename(nsp.path)) nsp = Fs.Nsp(tmpFile, None) nsp.hasValidTicket = hasValidTicket nsp.move(forceNsp=hasValidTicket) Nsps.files[nsp.path] = nsp Nsps.save() except BaseException as e: Print.error('FTP SYNC EXCEPTION: ' + str(e)) #raise #TODO s.add() Print.info('thread exiting')
def new_suggestion(): try: titleId = request.args.get('titleId') titleKey = request.args.get('titleKey') # Check that the required fields are in the POST'ed data required = ['titleId', 'titleKey'] if not titleId or not titleKey: return 'Missing values', 400 titleId = titleId.upper() nsp = Nsps.getByTitleId(titleId) if not nsp: return 'Title not found', 400 nsp.open() for f in nsp: if type( f ) == Fs.Nca and f.header.contentType == Type.Content.PROGRAM: for fs in f.sectionFilesystems: if fs.fsType == Type.Fs.PFS0 and fs.cryptoType == Type.Crypto.CTR: f.seek(0) ncaHeader = f.read(0x400) sectionHeaderBlock = fs.buffer f.seek(fs.offset) pfs0Header = f.read(0x10) entry = KeyEntry(titleId, titleKey.upper(), ncaHeader, sectionHeaderBlock, pfs0Header, fs.offset) index = blockchain.new_transaction(entry) blockchain.new_block() response = { 'message': f'Transaction will be added to Block {index}' } return jsonify(response), 201 return 'Verification failed: unable to locate correct title rights partition', 400 except BaseException as e: return str(e), 400
def scan(): global hasScanned #if hasScanned: # return hasScanned = True initTitles() initFiles() refreshRegions() importRegion(Config.region, Config.language) r = Nsps.scan(Config.paths.scan) Titles.save() return r
nsp.verified = False raise IOError('bad file') nsp.verified = True Print.info('good file: ' + str(path)) bf.write('good file: %s\n' % str(path)) f.close() except: f.close() Print.error('bad file: ' + str(path)) bf.write('bad file: %s\n' % str(path)) s.add() s.close() Nsps.save() if args.verify_title_key: nut.initTitles() nut.initFiles() if blockchain.verifyKey(args.verify[0], args.verify[1]): Print.info('Title key is valid') else: Print.info('Title key is INVALID %s - %s' % (args.verify[0], args.verify[1])) if args.restore: nut.initTitles() nut.initFiles() prev = Config.extractVersion Config.extractVersion = True
def organize(): initTitles() initFiles() # scan() Print.info('organizing') # for k, f in Nsps.files.items(): #print('moving ' + f.path) #Print.info(str(f.hasValidTicket) +' = ' + f.path) # f.move() for id, t in Titles.data().items(): if not t.isActive(True): continue files = {} for f in t.getFiles(): ext = f.path[-4:] if ext not in files: files[ext] = [] files[ext].append(f) hasNsp = False if '.nsp' in files and len(files['.nsp']) > 0: latest = t.getLatestNsp() if latest: for f in files['.nsp']: if f.path != latest.path: f.moveDupe() hasNsp = True latest.move() if '.nsz' in files and len(files['.nsz']) > 0: latest = t.getLatestNsz() if latest: for f in files['.nsz']: if f.path != latest.path: f.moveDupe() hasNsp = True latest.move() if '.nsx' in files and len(files['.nsx']) > 0: latest = t.getLatestNsx() if latest: for f in files['.nsx']: if f.path != latest.path: f.moveDupe() if hasNsp: latest.moveDupe() else: latest.move() if '.xci' in files and len(files['.xci']) > 0: latest = t.getLatestXci() if latest: for f in files['.xci']: if f.path != latest.path: f.moveDupe() latest.move() Print.info('removing empty directories') Nsps.removeEmptyDir('.', False) Nsps.save()
def verifyKey(titleId=None, titleKey=None): try: if not titleId: titleId = request.args.get('titleId') if not titleKey: titleKey = request.args.get('titleKey') # Check that the required fields are in the POST'ed data required = ['titleId', 'titleKey'] if not titleId or not titleKey: return False titleId = titleId.upper() if blockchain.hasTitle(titleId): if blockchain.hasTitle(titleId) == titleKey: return True else: return False nsp = Nsps.getByTitleId(titleId) if not nsp: Print.info('404 ' + titleId) return False nsp.open() for f in nsp: if type( f ) == Fs.Nca and f.header.contentType == Type.Content.PROGRAM: for fs in f.sectionFilesystems: if fs.fsType == Type.Fs.PFS0 and fs.cryptoType == Type.Crypto.CTR: f.seek(0) ncaHeader = f.read(0x400) sectionHeaderBlock = fs.buffer f.seek(fs.offset) pfs0Header = f.read(0x10) entry = KeyEntry(titleId, titleKey.upper(), ncaHeader, sectionHeaderBlock, pfs0Header, fs.offset) index = blockchain.new_transaction(entry) blockchain.new_block() nsp.close() return True for f in nsp: if type(f) == Fs.Nca: for fs in f.sectionFilesystems: if fs.fsType == Type.Fs.ROMFS and fs.cryptoType == Type.Crypto.CTR: f.seek(0) ncaHeader = f.read(0x400) sectionHeaderBlock = fs.buffer levelOffset = int.from_bytes( sectionHeaderBlock[0x18:0x20], byteorder='little', signed=False) levelSize = int.from_bytes( sectionHeaderBlock[0x20:0x28], byteorder='little', signed=False) offset = fs.offset + levelOffset f.seek(offset) pfs0Header = f.read(levelSize) entry = KeyEntry(titleId, titleKey.upper(), ncaHeader, sectionHeaderBlock, pfs0Header, offset) index = blockchain.new_transaction(entry) blockchain.new_block() nsp.close() return True nsp.close() return False except BaseException as e: print('key exception: ' + str(e)) nsp.close() return False
traceback.print_exc(file=sys.stdout) raise if args.move: nut.initTitles() nut.initFiles() for path in expandFiles(args.file): try: f = Fs.Nsp() f.setPath(str(path)) f.move() except BaseException as e: Print.info('error moving: ' + str(e)) traceback.print_exc(file=sys.stdout) raise Nsps.save() if args.export_nca_map: exportNcaMap(args.export_nca_map) if args.compress_all: nut.initTitles() nut.initFiles() nut.compressAll(19 if args.level is None else args.level) if args.export: nut.initTitles() nut.initFiles() nut.export(args.export) if args.export_versions:
def test_scan_empty_dir(self): self.fs.makedir(_SCAN_PATH) self.fs.makedir('titledb') Nsps.scan(_SCAN_PATH)
def scan(path): if path: Nsps.scan(path, True)
def on_scan(self): self.tableWidget.setRowCount(0) Nsps.scan(Config.paths.scan, True) self.refreshTable()
def move(self, forceNsp=False): if not self.path: Print.error('no path set') return False if os.path.abspath(self.path).startswith( os.path.abspath(Config.paths.nspOut) ) and not self.path.endswith('.nsz') and not self.path.endswith( '.xcz') and Config.compression.auto: nszFile = nut.compress(self.path, Config.compression.level, os.path.abspath(Config.paths.nspOut)) if nszFile: nsp = Fs.Nsp(nszFile, None) nsp.hasValidTicket = True nsp.move(forceNsp=True) Nsps.files[nsp.path] = nsp Nsps.save() newPath = self.fileName(forceNsp=forceNsp) if not newPath: Print.error('could not get filename for ' + self.path) return False if os.path.abspath(newPath).lower().replace( '\\', '/') == os.path.abspath(self.path).lower().replace('\\', '/'): return False if os.path.isfile(newPath): Print.info('\nduplicate title: ') Print.info(os.path.abspath(self.path)) Print.info(os.path.abspath(newPath)) Print.info('\n') return False if not self.verifyNcaHeaders(): Print.error('verification failed: could not move title for ' + str(self.titleId) + ' or ' + str(Title.getBaseId(self.titleId))) return False try: Print.info(self.path + ' -> ' + newPath) if not Config.dryRun: os.makedirs(os.path.dirname(newPath), exist_ok=True) #newPath = self.fileName(forceNsp = forceNsp) if not Config.dryRun: if self.isOpen(): self.close() shutil.move(self.path, newPath) if self.path in Nsps.files: del Nsps.files[self.path] Nsps.files[newPath] = self self.path = newPath except BaseException as e: Print.error('failed to rename file! %s -> %s : %s' % (self.path, newPath, e)) if not Config.dryRun: self.moveDupe() return True
def extractNcaMeta(files = []): initTitles() initFiles() loadNcaData() global ncaData q = {} if not files or len(files) == 0: for path, nsp in Nsps.files.items(): if not nsp.path.endswith('.nsp'): # and not nsp.path.endswith('.xci'): continue try: if hasattr(nsp, 'extractedNcaMeta') and (nsp.extractedNcaMeta or nsp.extractedNcaMeta == 1) or '0100000000000816' in path: # Print.info('skipping') continue if hasCnmt(nsp.titleId, nsp.version): continue q[path] = nsp except BaseException: Print.info('exception: %s' % (path)) raise else: for path in files: try: nsp = Nsps.registerFile(path, registerLUT = False) if hasCnmt(nsp.titleId, nsp.version): continue q[path] = nsp except BaseException: Print.info('exception: %s' % (path)) raise c = 0 for path, nsp in tqdm(q.items()): if not nsp.path.endswith('.nsp'): # and not nsp.path.endswith('.xci'): continue try: c += 1 nsp.open(path, 'rb') if nsp.title().key == '': nsp.title().key = None if not nsp.title().key: Print.info('could not find title key for %s' % nsp.path) ncaDataMap = {} for n in nsp: if not isinstance(n, Nca): continue ncaId = n._path.split('.')[0] data = getNca(ncaId) data.contentType = int(n.header.contentType) data.isGameCard = n.header.isGameCard data.cryptoType = n.header.cryptoType data.keyIndex = n.header.keyIndex data.size = n.header.size data.titleId = n.header.titleId data.contentIndex = n.header.contentIndex data.sdkVersion = n.header.sdkVersion data.cryptoType2 = n.header.cryptoType2 data.rightsId = n.header.rightsId data.buildId = n.buildId() if data.rightsId == b'00000000000000000000000000000000': data.rightsId = None else: data.rightsId = data.rightsId.decode() ncaDataMap[ncaId.upper()] = data # print(ncaDataMap) for n in nsp: if not isinstance(n, Nca): continue ncaId = n._path.split('.')[0] data = getNca(ncaId) data.contentType = int(n.header.contentType) data.isGameCard = n.header.isGameCard data.cryptoType = n.header.cryptoType data.keyIndex = n.header.keyIndex data.size = n.header.size data.titleId = n.header.titleId data.contentIndex = n.header.contentIndex data.sdkVersion = n.header.sdkVersion data.cryptoType2 = n.header.cryptoType2 data.rightsId = n.header.rightsId if data.rightsId == b'00000000000000000000000000000000': data.rightsId = None else: data.rightsId = data.rightsId.decode() if data.contentType == 1: for p in n: for m in p: if not isinstance(m, Cnmt): continue cnmt = getCnmt(m.titleId, m.version) cnmt.contentEntries = [] cnmt.metaEntries = [] cnmt.titleType = m.titleType for e in m.contentEntries: if not e.ncaId.upper() in ncaDataMap: Print.info(ncaDataMap) Print.info('nca missing: ' + e.ncaId.upper()) continue mapData = ncaDataMap[e.ncaId.upper()] if mapData is not None and (mapData.buildId is not None): cnmt.contentEntries.append({'ncaId': e.ncaId, 'type': e.type, 'buildId': mapData.buildId}) else: cnmt.contentEntries.append({'ncaId': e.ncaId, 'type': e.type}) for e in m.metaEntries: cnmt.metaEntries.append({'titleId': e.titleId, 'version': e.version, 'type': e.type, 'install': e.install}) cnmt.requiredSystemVersion = m.requiredSystemVersion cnmt.requiredApplicationVersion = m.requiredApplicationVersion cnmt.otherApplicationId = m.otherApplicationId # print(str(data.__dict__)) Print.info('processed %s' % nsp.path) nsp.extractedNcaMeta = True except BaseException as e: Print.info('exception: %s %s' % (path, str(e))) finally: nsp.close() # save remaining files saveNcaData()
def getDownload(request, response, start=None, end=None): try: nsp = Nsps.getByTitleId(request.bits[2]) response.attachFile(nsp.titleId + '.nsp') if len(request.bits) >= 5: start = int(request.bits[-2]) end = int(request.bits[-1]) #chunkSize = 0x1000000 chunkSize = 0x400000 with open(nsp.path, "rb") as f: f.seek(0, 2) size = f.tell() if 'Range' in request.headers: start, end = request.headers.get('Range').strip().strip( 'bytes=').split('-') if end == '': end = size - 1 else: end = int(end) + 1 if start == '': start = size - end else: start = int(start) if start >= size or start < 0 or end <= 0: return Server.Response400( request, response, 'Invalid range request %d - %d' % (start, end)) response.setStatus(206) else: if start == None: start = 0 if end == None: end = size if end >= size: end = size if end <= start: response.write(b'') return print('ranged request for %d - %d' % (start, end)) f.seek(start, 0) response.setMime(nsp.path) response.setHeader('Accept-Ranges', 'bytes') response.setHeader('Content-Range', 'bytes %s-%s/%s' % (start, end - 1, size)) response.setHeader('Content-Length', str(end - start)) #Print.info(response.headers['Content-Range']) response.sendHeader() if not response.head: size = end - start i = 0 status = Status.create( size, 'Downloading ' + os.path.basename(nsp.path)) while i < size: chunk = f.read(min(size - i, chunkSize)) i += len(chunk) status.add(len(chunk)) if chunk: pass response.write(chunk) else: break status.close() except BaseException as e: Print.error('NSP download exception: ' + str(e)) traceback.print_exc(file=sys.stdout) if response.bytesSent == 0: response.write(b'')
def test_scan_missing_path(self): with self.assertRaises(FileNotFoundError): Nsps.scan(_SCAN_PATH)