def add(self, name, size, bar=False, isthreaded=False): try: if bar == False: t = tqdm(total=0, unit='B', unit_scale=True, leave=False, position=0) t.write('- Appending: %s (%s)' % (name, str(sq_tools.getSize(int(size))))) t.close() except: try: if isthreaded == False: bar.write('- Appending: %s (%s)' % (name, str(sq_tools.getSize(int(size))))) except: Print.info('- Appending: %s (%s)' % (name, str(sq_tools.getSize(int(size))))) self.files.append({ 'name': name, 'size': size, 'offset': self.f.tell() }) t = {'name': name, 'size': size, 'offset': self.f.tell()} return self.f
def add(self, name, size): Print.info('- Appending: %s (%s)' % (name, str(sq_tools.getSize(int(size))))) self.files.append({ 'name': name, 'size': size, 'offset': self.f.tell() }) t = {'name': name, 'size': size, 'offset': self.f.tell()} return self.f
def search_folder(path, TD=None, ext=None, filter=None, order=None, mime='files', Pick=True, Print=True, pickmode='multi'): file_list = list() userfilter = filter isroot = False TDlist = False file_listv2 = list() if isinstance(path, list): paths = path else: paths = path.split('.+') if isinstance(TD, list): TDlist = TD index = 0 for path in paths: # print(path) try: if userfilter == None or filter == "": filter = "" else: filter = " and name contains '{}'".format(userfilter) if TDlist != False: TD = TDlist[index] index += 1 if TD == "pick": TD = TD_picker(path) if TD != None: remote = location(route=path, TD_Name=TD) else: remote = location(route=path) drive_service = remote.drive_service if drive_service == None: if remote.token_name == None: auth = auth() else: auth = auth(token=token_name) drive_service = auth.drive_service tk, fl = get_path_parameters(path) if not fl and TD == None: root = 'root' remote.root = root remote.ID = root elif not fl: root = remote.ID remote.root = root remote.ID = root else: root = remote.root # print(remote.ID) if mime == 'files': page_token = None pagesize = 1000 while True: if root != 'root': results = drive_service.files().list( q="mimeType!='application/vnd.google-apps.folder' and '{}' in parents{}" .format(remote.ID, filter), pageSize=pagesize, pageToken=page_token, fields= "nextPageToken, files(id, name, size, createdTime)", includeItemsFromAllDrives=True, supportsAllDrives=True).execute() else: results = drive_service.files().list( q="mimeType!='application/vnd.google-apps.folder' and '{}' in parents{}" .format(remote.ID, filter), pageSize=pagesize, pageToken=page_token, fields= "nextPageToken, files(id, name, size, createdTime)" ).execute() items = results.get('files', []) try: page_token = results.get('nextPageToken', None) except: pass for file in items: try: file_list.append([ file['name'], file['size'], path, file['createdTime'] ]) except: pass if Print == True: print(f'- {path}: Total Retrieved ' + str(len(file_list))) if page_token == None: break elif mime == 'folders': page_token = None pagesize = 100 while True: if root != 'root': results = drive_service.files().list( q="mimeType='application/vnd.google-apps.folder' and '{}' in parents{}" .format(remote.ID, filter), pageSize=pagesize, pageToken=page_token, fields="nextPageToken, files(id, name)", includeItemsFromAllDrives=True, supportsAllDrives=True).execute() else: results = drive_service.files().list( q="mimeType='application/vnd.google-apps.folder' and '{}' in parents{}" .format(remote.ID, filter), pageSize=pagesize, pageToken=page_token, fields="nextPageToken, files(id, name)").execute() items = results.get('files', []) try: page_token = results.get('nextPageToken', None) except: pass for file in items: try: file_list.append([file['name'], path]) except: pass if Print == True: print(f'- {path}: Total Retrieved ' + str(len(file_list))) if page_token == None: break except: print(f'- {path}: Retrieved 0') return False if not file_list: return False file_list.sort(key=lambda x: x[0]) if Pick == True: if pickmode != 'single': title = 'Select results (press SPACE\RIGHT to mark\\unmark, ENTER to continue): ' elif mime == "files": title = 'Select result:' else: title = 'Select result:\n + Press space or right to select content \n + Press E to finish selection' oplist = list() cleanlist = list() if mime == 'folders': for item in file_list: oplist.append(item[0]) cleanlist.append(clean_name(item[0])) else: for item in file_list: sz = str(sq_tools.getSize(int(item[1]))) oplist.append(item[0] + ' | ' + sz) cleanlist.append(clean_name(item[0]) + ' | ' + sz) options = cleanlist if pickmode != 'single': selected = pick(options, title, multi_select=True, min_selection_count=0) elif mime == "files": selected = pick(options, title, min_selection_count=1) if selected[0] == False: return False else: picker = Picker(options, title, min_selection_count=1) def end_selection(picker): return False, -1 picker.register_custom_handler(ord('e'), end_selection) picker.register_custom_handler(ord('E'), end_selection) selected = picker.start() if selected[0] == False: return False # print (selected) oplist = file_list file_list = list() if pickmode == 'single': if mime == 'folders': basepath = oplist[selected[1]][1] if basepath[-1] != "/" and basepath[-1] != "\\": basepath = basepath + '/' pth = basepath + oplist[selected[1]][0] else: basepath = oplist[selected[1]][2] if basepath[-1] != "/" and basepath[-1] != "\\": basepath = basepath + '/' pth = basepath + oplist[selected[1]][0] return pth if mime == 'folders': for file in selected: basepath = oplist[file[1]][1] if basepath[-1] != "/" and basepath[-1] != "\\": basepath = basepath + '/' pth = basepath + oplist[file[1]][0] file_list.append(pth) else: for file in selected: basepath = oplist[file[1]][2] if basepath[-1] != "/" and basepath[-1] != "\\": basepath = basepath + '/' pth = basepath + oplist[file[1]][0] file_list.append(pth) if not file_list: return False if Print == True: print("\n- User selected the following results: ") for file in file_list: print(file) else: print("- User selected {} files".format(str(len(file_list)))) if TDlist != False and file_list: file_listv2.append([file_list, TD]) if TDlist != False: return file_listv2 return file_list
def compress(filePath, ofolder=None, level=17, threads=0, delta=False, ofile=None, buffer=65536, pos=False, nthreads=False): isthreaded = False if pos != False: isthreaded = True elif str(pos) == '0': isthreaded = True else: pos = 0 pos = int(pos) files_list = sq_tools.ret_nsp_offsets(filePath) files = list() filesizes = list() if isthreaded == True and nthreads != False: tqlist = list() for i in range(nthreads): tq = tqdm(total=0, unit='B', unit_scale=True, leave=False, position=i) tqlist.append(tq) fplist = list() for k in range(len(files_list)): entry = files_list[k] fplist.append(entry[0]) for i in range(len(files_list)): entry = files_list[i] cnmtfile = entry[0] if cnmtfile.endswith('.cnmt.nca'): f = squirrelNSP(filePath, 'rb') titleid, titleversion, base_ID, keygeneration, rightsId, RSV, RGV, ctype, metasdkversion, exesdkversion, hasHtmlManual, Installedsize, DeltaSize, ncadata = f.get_data_from_cnmt( cnmtfile) f.flush() f.close() for j in range(len(ncadata)): row = ncadata[j] # print(row) if row['NCAtype'] != 'Meta': test1 = str(row['NcaId']) + '.nca' test2 = str(row['NcaId']) + '.ncz' if test1 in fplist or test2 in fplist: # print(str(row['NcaId'])+'.nca') files.append(str(row['NcaId']) + '.nca') filesizes.append(int(row['Size'])) else: # print(str(row['NcaId'])+'.cnmt.nca') files.append(str(row['NcaId']) + '.cnmt.nca') filesizes.append(int(row['Size'])) for k in range(len(files_list)): entry = files_list[k] fp = entry[0] sz = int(entry[3]) if fp.endswith('xml'): files.append(fp) filesizes.append(sz) for k in range(len(files_list)): entry = files_list[k] fp = entry[0] sz = int(entry[3]) if fp.endswith('.tik'): files.append(fp) filesizes.append(sz) for k in range(len(files_list)): entry = files_list[k] fp = entry[0] sz = int(entry[3]) if fp.endswith('.cert'): files.append(fp) filesizes.append(sz) nspheader = sq_tools.gen_nsp_header(files, filesizes) properheadsize = len(nspheader) compressionLevel = int(level) container = nutFs.factory(filePath) container.open(filePath, 'rb') CHUNK_SZ = buffer if ofolder is None and ofile is None: nszPath = filePath[0:-1] + 'z' elif ofolder is not None: nszPath = os.path.join(ofolder, os.path.basename(filePath[0:-1] + 'z')) elif ofile is not None: nszPath = ofile tsize = properheadsize for sz in filesizes: tsize += sz if isthreaded == True: from colorama import Fore colors = Fore.__dict__ k = 0 l = pos for col in colors: if l > len(colors): l = l - len(colors) color = colors[col] if k == (l + 1): break else: k += 1 t = tqdm(total=tsize, unit='B', unit_scale=True, leave=False, position=pos, bar_format="{l_bar}%s{bar}%s{r_bar}" % (color, Fore.RESET)) else: t = tqdm(total=tsize, unit='B', unit_scale=True, leave=False, position=0) # nszPath = os.path.abspath(nszPath) nszPath = Path(filePath) nszPath = os.path.join(nszPath.parents[0], os.path.basename(filePath[0:-1] + 'z')) if isthreaded == False: t.write('\n Compressing with level %d and %d threads' % (compressionLevel, threads)) t.write('%s -> %s \n' % (filePath, nszPath)) newNsp = nutFs.Pfs0.Pfs0Stream(nszPath, headsize=properheadsize) for file in files: for nspf in container: if nspf._path == file: if isinstance( nspf, nutFs.Nca.Nca ) and nspf.header.contentType == nutFs.Type.Content.DATA and delta == False: if isthreaded == False: t.write('-> Skipping delta fragment') continue if isinstance(nspf, nutFs.Nca.Nca) and ( nspf.header.contentType == nutFs.Type.Content.PROGRAM or nspf.header.contentType == nutFs.Type.Content.PUBLIC_DATA): if isNcaPacked(nspf): cctx = zstandard.ZstdCompressor(level=compressionLevel, threads=threads) newFileName = nspf._path[0:-1] + 'z' f = newNsp.add(newFileName, nspf.size, t, isthreaded) start = f.tell() nspf.seek(0) f.write(nspf.read(ncaHeaderSize)) written = ncaHeaderSize compressor = cctx.stream_writer(f) sections = get_sections(nspf) header = b'NCZSECTN' header += len(sections).to_bytes(8, 'little') i = 0 for fs in sections: i += 1 header += fs.offset.to_bytes(8, 'little') header += fs.size.to_bytes(8, 'little') header += fs.cryptoType.to_bytes(8, 'little') header += b'\x00' * 8 header += fs.cryptoKey header += fs.cryptoCounter f.write(header) written += len(header) timestamp = time.time() decompressedBytes = ncaHeaderSize totsize = 0 for fs in sections: totsize += fs.size for section in sections: #print('offset: %x\t\tsize: %x\t\ttype: %d\t\tiv%s' % (section.offset, section.size, section.cryptoType, str(hx(section.cryptoCounter)))) o = nspf.partition(offset=section.offset, size=section.size, n=None, cryptoType=section.cryptoType, cryptoKey=section.cryptoKey, cryptoCounter=bytearray( section.cryptoCounter), autoOpen=True) while not o.eof(): buffer = o.read(CHUNK_SZ) t.update(len(buffer)) if len(buffer) == 0: raise IOError('read failed') written += compressor.write(buffer) decompressedBytes += len(buffer) compressor.flush(zstandard.FLUSH_FRAME) elapsed = time.time() - timestamp minutes = elapsed / 60 seconds = elapsed % 60 speed = 0 if elapsed == 0 else (nspf.size / elapsed) written = f.tell() - start if isthreaded == False: t.write( '\n * Compressed at %d%% from %s to %s - %s' % (int(written * 100 / nspf.size), str(sq_tools.getSize(decompressedBytes)), str(sq_tools.getSize(written)), nspf._path)) t.write( ' * Compressed in %02d:%02d at speed: %.1f MB/s\n' % (minutes, seconds, speed / 1000000.0)) newNsp.resize(newFileName, written) continue else: print('not packed!') f = newNsp.add(nspf._path, nspf.size, t, isthreaded) nspf.seek(0) while not nspf.eof(): buffer = nspf.read(CHUNK_SZ) t.update(len(buffer)) f.write(buffer) t.close() newNsp.close() if isthreaded == True and nthreads != False: for i in range(nthreads): tqlist[i].close()
def supertrim_xci(filepath, buffer=65536, outfile=None, keepupd=False, level=17, threads=0, pos=False, nthreads=False): isthreaded = False if pos != False: isthreaded = True elif str(pos) == '0': isthreaded = True else: pos = 0 pos = int(pos) if isthreaded == False: try: exchangefile.deletefile() except: pass f = squirrelXCI(filepath) t = tqdm(total=0, unit='B', unit_scale=True, leave=False, position=0) for nspF in f.hfs0: if str(nspF._path) == "secure": for ticket in nspF: if str(ticket._path).endswith('.tik'): if isthreaded == False: t.write('- Titlerights: ' + ticket.rightsId) tk = (str(hex(ticket.getTitleKeyBlock()))[2:]).upper() if isthreaded == False: if len(tk) == 30: tk = '00' + str(tk).upper() if len(tk) == 31: tk = '0' + str(tk).upper() t.write('- Titlekey: ' + tk) exchangefile.add(ticket.rightsId, tk) f.flush() f.close() t.close() files_list = sq_tools.ret_xci_offsets(filepath) files = list() filesizes = list() if isthreaded == True and nthreads != False: tqlist = list() for i in range(nthreads): tq = tqdm(total=0, unit='B', unit_scale=True, leave=False, position=i) tqlist.append(tq) fplist = list() for k in range(len(files_list)): entry = files_list[k] fplist.append(entry[0]) for i in range(len(files_list)): entry = files_list[i] cnmtfile = entry[0] if cnmtfile.endswith('.cnmt.nca'): f = squirrelXCI(filepath) titleid, titleversion, base_ID, keygeneration, rightsId, RSV, RGV, ctype, metasdkversion, exesdkversion, hasHtmlManual, Installedsize, DeltaSize, ncadata = f.get_data_from_cnmt( cnmtfile) f.flush() f.close() for j in range(len(ncadata)): row = ncadata[j] # print(row) if row['NCAtype'] != 'Meta': test1 = str(row['NcaId']) + '.nca' test2 = str(row['NcaId']) + '.ncz' if test1 in fplist or test2 in fplist: # print(str(row['NcaId'])+'.nca') files.append(str(row['NcaId']) + '.nca') filesizes.append(int(row['Size'])) else: # print(str(row['NcaId'])+'.cnmt.nca') files.append(str(row['NcaId']) + '.cnmt.nca') filesizes.append(int(row['Size'])) for k in range(len(files_list)): entry = files_list[k] fp = entry[0] sz = int(entry[3]) if fp.endswith('xml'): files.append(fp) filesizes.append(sz) for k in range(len(files_list)): entry = files_list[k] fp = entry[0] sz = int(entry[3]) if fp.endswith('.tik'): files.append(fp) filesizes.append(sz) for k in range(len(files_list)): entry = files_list[k] fp = entry[0] sz = int(entry[3]) if fp.endswith('.cert'): files.append(fp) filesizes.append(sz) sec_hashlist = list() f = squirrelXCI(filepath) try: for file in files: sha, size, gamecard = f.file_hash(file) # print(sha) if sha != False: sec_hashlist.append(sha) except BaseException as e: Print.error('Exception: ' + str(e)) f.flush() f.close() xci_header, game_info, sig_padding, xci_certificate, root_header, upd_header, norm_header, sec_header, rootSize, upd_multiplier, norm_multiplier, sec_multiplier = sq_tools.get_xciheader( files, filesizes, sec_hashlist) compressionLevel = int(level) CHUNK_SZ = buffer if outfile == None: ofile = filepath[0:-1] + 'z' else: ofile = outfile ofile = os.path.abspath(ofile) outheader = xci_header outheader += game_info outheader += sig_padding outheader += xci_certificate outheader += root_header outheader += upd_header outheader += norm_header outheader += sec_header properheadsize = len(outheader) compressionLevel = int(level) CHUNK_SZ = buffer if outfile == None: nszPath = filepath[0:-1] + 'z' else: nszPath = outfile nszPath = os.path.abspath(nszPath) tsize = properheadsize for sz in filesizes: tsize += sz if isthreaded == True: from colorama import Fore colors = Fore.__dict__ k = 0 l = pos for col in colors: if l > len(colors): l = l - len(colors) color = colors[col] if k == (l + 1): break else: k += 1 t = tqdm(total=tsize, unit='B', unit_scale=True, leave=False, position=pos, bar_format="{l_bar}%s{bar}%s{r_bar}" % (color, Fore.RESET)) else: t = tqdm(total=tsize, unit='B', unit_scale=True, leave=False, position=0) if isthreaded == False: t.write('Compressing with level %d and %d threads' % (compressionLevel, threads)) t.write('\n %s -> %s \n' % (filepath, nszPath)) newNsp = nutFs.Pfs0.Pfs0Stream(nszPath, headsize=properheadsize, mode='wb+') xcicontainer = Xci(filepath) # f.compressed_supertrim(buffer,outfile,keepupd,level,threads) files2 = list() filesizes2 = list() for file in files: for nspF in xcicontainer.hfs0: if str(nspF._path) == "secure": for nca in nspF: if nca._path == file: if isinstance(nca, nutFs.Nca.Nca) and ( nca.header.contentType == nutFs.Type.Content.PROGRAM or nca.header.contentType == nutFs.Type.Content.PUBLIC_DATA): if isNcaPacked(nca): cctx = zstandard.ZstdCompressor( level=compressionLevel, threads=threads) newFileName = nca._path[0:-1] + 'z' f = newNsp.add(newFileName, nca.size, t, isthreaded) start = f.tell() nca.seek(0) data = nca.read(ncaHeaderSize) f.write(data) nca.seek(ncaHeaderSize) written = ncaHeaderSize compressor = cctx.stream_writer(f) sections = get_sections(nca) header = b'NCZSECTN' header += len(sections).to_bytes(8, 'little') i = 0 for fs in sections: i += 1 header += fs.offset.to_bytes(8, 'little') header += fs.size.to_bytes(8, 'little') header += fs.cryptoType.to_bytes( 8, 'little') header += b'\x00' * 8 header += fs.cryptoKey header += fs.cryptoCounter f.write(header) t.update(len(header)) written += len(header) timestamp = time.time() decompressedBytes = ncaHeaderSize totsize = 0 for fs in sections: totsize += fs.size for section in sections: #print('offset: %x\t\tsize: %x\t\ttype: %d\t\tiv%s' % (section.offset, section.size, section.cryptoType, str(hx(section.cryptoCounter)))) o = nca.partition( offset=section.offset, size=section.size, n=None, cryptoType=section.cryptoType, cryptoKey=section.cryptoKey, cryptoCounter=bytearray( section.cryptoCounter), autoOpen=True) while not o.eof(): buffer = o.read(CHUNK_SZ) t.update(len(buffer)) if len(buffer) == 0: raise IOError('read failed') written += compressor.write(buffer) decompressedBytes += len(buffer) compressor.flush(zstandard.FLUSH_FRAME) elapsed = time.time() - timestamp minutes = elapsed / 60 seconds = elapsed % 60 speed = 0 if elapsed == 0 else (nca.size / elapsed) written = f.tell() - start if isthreaded == False: t.write( '\n * Compressed at %d%% from %s to %s - %s' % (int(written * 100 / nca.size), str( sq_tools.getSize( decompressedBytes)), str(sq_tools.getSize(written)), nca._path)) t.write( ' * Compressed in %02d:%02d at speed: %.1f MB/s\n' % (minutes, seconds, speed / 1000000.0)) newNsp.resize(newFileName, written) files2.append(newFileName) filesizes2.append(written) continue else: if isthreaded == False: t.write('not packed!') f = newNsp.add(nca._path, nca.size, t, isthreaded) files2.append(nca._path) filesizes2.append(nca.size) nca.seek(0) while not nca.eof(): buffer = nca.read(CHUNK_SZ) t.update(len(buffer)) f.write(buffer) t.close() newNsp.close() xci_header, game_info, sig_padding, xci_certificate, root_header, upd_header, norm_header, sec_header, rootSize, upd_multiplier, norm_multiplier, sec_multiplier = sq_tools.get_xciheader( files2, filesizes2, sec_hashlist) outheader = xci_header outheader += game_info outheader += sig_padding outheader += xci_certificate outheader += root_header outheader += upd_header outheader += norm_header outheader += sec_header with open(nszPath, 'rb+') as o: o.seek(0) o.write(outheader) try: exchangefile.deletefile() except: pass if isthreaded == True and nthreads != False: for i in range(nthreads): tqlist[i].close() return nszPath
def getinfo(filename): print('* Retrieving Game Information') if filename.endswith('.nsp') or filename.endswith( '.nsx') or filename.endswith('.nsz'): f = Fs.ChromeNsp(filename, 'rb') elif filename.endswith('.xci') or filename.endswith('.xcz'): f = Fs.ChromeXci(filename) else: return [] dict = f.return_DBdict() try: ModuleId, BuildID8, BuildID16 = f.read_buildid() ModuleId = sq_tools.trimm_module_id(ModuleId) except: ModuleId = "-" BuildID8 = "-" BuildID16 = "-" try: MinRSV = sq_tools.getMinRSV(dict['keygeneration'], dict['RSV']) RSV_rq_min = sq_tools.getFWRangeRSV(MinRSV) FW_rq = sq_tools.getFWRangeKG(dict['keygeneration']) except: MinRSV = "-" RSV_rq_min = "-" FW_rq = "-" try: RGV = dict['RGV'] RS_number = int(int(RGV) / 65536) except: RGV = "0" RS_number = "0" send_ = list() try: if str(dict['Type']).upper() == 'DLC': send_.append(dict['contentname']) else: send_.append(dict['baseName']) except: send_.append('-') try: send_.append(dict['editor']) except: send_.append('-') try: send_.append(dict['id']) except: send_.append('-') try: send_.append(dict['version']) except: send_.append('-') try: send_.append(dict['Type']) except: send_.append('-') try: send_.append(dict['dispversion']) except: send_.append('-') try: send_.append(dict['metasdkversion']) except: send_.append('-') try: send_.append(dict['exesdkversion']) except: send_.append('-') try: lang = str((', '.join(dict['languages']))) send_.append(lang) except: send_.append('-') try: send_.append(dict['RSV']) except: send_.append('-') try: send_.append(str(dict['keygeneration']) + " -> " + FW_rq) except: send_.append('-') try: send_.append(dict['nsuId']) except: send_.append('-') try: genres = str((', '.join(dict['genretags']))) send_.append(genres) except: send_.append('-') try: ratags = str((', '.join(dict['ratingtags']))) send_.append(ratags) except: send_.append('-') try: send_.append(dict['worldreleasedate']) except: send_.append('-') try: send_.append(dict['numberOfPlayers']) except: send_.append('-') try: send_.append(str(dict['eshoprating'])) except: send_.append('-') try: send_.append(sq_tools.getSize(dict['InstalledSize'])) except: send_.append('-') try: send_.append(BuildID8) except: send_.append('-') try: send_.append(ModuleId) except: send_.append('-') try: send_.append(dict['key']) except: send_.append('-') try: send_.append(RSV_rq_min[1:-1]) except: send_.append('-') if 'regions' in dict: reg = str((', '.join(dict['regions']))) send_.append(reg) else: send_.append('-') try: if dict["intro"] != '-' and dict["intro"] != None and dict[ "intro"] != '': if str(dict['Type']).upper() != 'DLC': send_.append(dict['baseName'] + ". " + dict["intro"]) else: send_.append(dict['contentname'] + ". " + dict["intro"]) else: if str(dict['Type']).upper() != 'DLC': send_.append(dict['baseName']) else: send_.append(dict['contentname']) except: if str(dict['Type']).upper() != 'DLC': try: send_.append(dict['baseName']) except: send_.append('-') else: try: send_.append(dict['contentname']) except: send_.append('-') try: if dict["description"] != '-': send_.append(dict["description"]) else: send_.append("Not available") except: send_.append("Not available") try: if str(dict['HtmlManual']).lower() == "true": send_.append("Yes") else: send_.append("No") except: send_.append('-') try: # print(str(dict['linkedAccRequired'])) if str(dict['linkedAccRequired']).lower() == "true": send_.append("Yes") else: send_.append("No") except: send_.append('-') try: if dict["ContentNumber"] != '-': if int(dict["ContentNumber"]) > 1: if 'ContentString' in dict: send_.append(dict["ContentString"]) else: send_.append("Yes ({})".format(dict["ContentNumber"])) else: send_.append("No") else: send_.append("-") except: send_.append("-") try: if filename.endswith('.nsp') or filename.endswith( '.nsx') or filename.endswith('.nsz'): send_.append("Eshop") elif filename.endswith('.xci') or filename.endswith('.xcz'): send_.append("Gamecard") else: send_.append("-") except: send_.append("-") try: send_.append(sq_tools.getSize(dict['GCSize'])) except: send_.append('-') try: x = get_screen_gallery(dict["bannerUrl"], dict["screenshots"]) send_.append(x) except: send_.append("Not available") try: RQversion = 0 if str(dict['Type']).upper() == 'DLC': if int(RGV) > 0: RQversion = str(RGV) + " -> Patch ({})".format(str(RS_number)) else: RQversion = str(RGV) + " -> Application ({})".format( str(RS_number)) send_.append(RQversion) except: send_.append('-') f.flush() f.close() return send_
def getinfo(filename): print('* Retrieving Game Information') if filename.endswith('.nsp')or filename.endswith('.nsx') or filename.endswith('.nsz'): f = Fs.ChromeNsp(filename, 'rb') elif filename.endswith('.xci') or filename.endswith('.xcz'): f = Fs.ChromeXci(filename) else: return [] dict=f.return_DBdict() try: ModuleId,BuildID8,BuildID16=f.read_buildid() ModuleId=sq_tools.trimm_module_id(ModuleId) except: ModuleId="-";BuildID8="-";BuildID16="-"; try: MinRSV=sq_tools.getMinRSV(dict['keygeneration'],dict['RSV']) RSV_rq_min=sq_tools.getFWRangeRSV(MinRSV) FW_rq=sq_tools.getFWRangeKG(dict['keygeneration']) except: MinRSV="-";RSV_rq_min="-";FW_rq="-" try: RGV=dict['RGV'] RS_number=int(int(RGV)/65536) except: RGV="0";RS_number="0"; send_=list() try: if str(dict['Type']).upper()=='DLC': send_.append(dict['contentname']) else: send_.append(dict['baseName']) except:send_.append('-') try: send_.append(dict['editor']) except:send_.append('-') try: send_.append(dict['id']) except:send_.append('-') try: send_.append(dict['version']) except:send_.append('-') try: send_.append(dict['Type']) except:send_.append('-') try: send_.append(dict['dispversion']) except:send_.append('-') try: send_.append(dict['metasdkversion']) except:send_.append('-') try: send_.append(dict['exesdkversion']) except:send_.append('-') try: lang=str((', '.join(dict['languages']))) send_.append(lang) except:send_.append('-') try: send_.append(dict['RSV']) except:send_.append('-') try: send_.append(str(dict['keygeneration'])+" -> " +FW_rq) except:send_.append('-') try: send_.append(dict['nsuId']) except:send_.append('-') try: genres=str((', '.join(dict['genretags']))) send_.append(genres) except:send_.append('-') try: ratags=str((', '.join(dict['ratingtags']))) send_.append(ratags) except:send_.append('-') try: send_.append(dict['worldreleasedate']) except:send_.append('-') try: send_.append(dict['numberOfPlayers']) except:send_.append('-') try: send_.append(str(dict['eshoprating'])) except:send_.append('-') try: send_.append(sq_tools.getSize(dict['InstalledSize'])) except:send_.append('-') try: send_.append(BuildID8) except:send_.append('-') try: send_.append(ModuleId) except:send_.append('-') try: send_.append(dict['key']) except:send_.append('-') try: send_.append(RSV_rq_min[1:-1]) except:send_.append('-') if 'regions' in dict: reg=str((', '.join(dict['regions']))) send_.append(reg) else: send_.append('-') try: if dict["intro"] !='-' and dict["intro"] !=None and dict["intro"] !='': if str(dict['Type']).upper()!='DLC': send_.append(dict['baseName']+". "+dict["intro"]) else: send_.append(dict['contentname']+". "+dict["intro"]) else: if str(dict['Type']).upper()!='DLC': send_.append(dict['baseName']) else: send_.append(dict['contentname']) except: if str(dict['Type']).upper()!='DLC': try: send_.append(dict['baseName']) except:send_.append('-') else: try: send_.append(dict['contentname']) except:send_.append('-') try: if dict["description"] !='-': send_.append(dict["description"]) else: send_.append("Not available") except:send_.append("Not available") try: if str(dict['HtmlManual']).lower()=="true": send_.append("Yes") else: send_.append("No") except:send_.append('-') try: # print(str(dict['linkedAccRequired'])) if str(dict['linkedAccRequired']).lower()=="true": send_.append("Yes") else: send_.append("No") except:send_.append('-') try: if dict["ContentNumber"] !='-': if int(dict["ContentNumber"])>1: if 'ContentString' in dict: send_.append(dict["ContentString"]) else: send_.append("Yes ({})".format(dict["ContentNumber"])) else: send_.append("No") else: send_.append("-") except:send_.append("-") try: if filename.endswith('.nsp')or filename.endswith('.nsx') or filename.endswith('.nsz'): send_.append("Eshop") elif filename.endswith('.xci') or filename.endswith('.xcz'): send_.append("Gamecard") else: send_.append("-") except:send_.append("-") try: send_.append(sq_tools.getSize(dict['GCSize'])) except:send_.append('-') try:#data[30] x=get_screen_gallery(dict["bannerUrl"],dict["screenshots"]) send_.append(x) except:send_.append("Not available") try:#data[31] RQversion=0 if str(dict['Type']).upper()=='DLC': if int(RGV)>0: RQversion=str(RGV)+" -> Patch ({})".format(str(RS_number)) else: RQversion=str(RGV)+" -> Application ({})".format(str(RS_number)) send_.append(RQversion) except:send_.append('-') ###NEW JSON STUFF### try:#data[32] send_.append(dict['developer']) except:send_.append('-') try:#data[33] send_.append(dict['productCode']) except:send_.append('-') try:#data[34] if str(dict['OnlinePlay']).lower()=="true": send_.append("Yes") else: send_.append("No") except:send_.append('No') try:#data[35] if str(dict['SaveDataCloud']).lower()=="true": send_.append("Yes") else: send_.append("No") except:send_.append('No') try:#data[36] playmodes=str((', '.join(dict['playmodes']))) send_.append(playmodes) except:send_.append('-') try:#data[37] if str(dict['metascore']).lower()=='false': send_.append('-') else: send_.append(dict['metascore']) except:send_.append('-') try:#data[38] if str(dict['userscore']).lower()=='false': send_.append('-') else: send_.append(dict['userscore']) except:send_.append('-') try:#data[39] FWoncard=dict['FWoncard'] FWoncard=str(FWoncard).strip("'") send_.append(FWoncard) except:send_.append('-') try:#data[40] if str(enablevideoplayback).lower() == 'true': video=dict['video'] video=ast.literal_eval(str(video)) video=video[0] send_.append(str(video)) else: send_.append('-') except:send_.append('-') try:#data[41] if str(dict['openscore']).lower()=='false': send_.append('-') else: if dict['openscore'] != dict['metascore']: send_.append(dict['openscore']) else: send_.append('-') except:send_.append('-') f.flush() f.close() return send_
def xci_to_nsz(filepath, buffer=65536, outfile=None, keepupd=False, level=17, threads=0): try: exchangefile.deletefile() except: pass f = squirrelXCI(filepath) for nspF in f.hfs0: if str(nspF._path) == "secure": for ticket in nspF: if str(ticket._path).endswith('.tik'): print('- Titlerights: ' + ticket.rightsId) tk = (str(hex(ticket.getTitleKeyBlock()))[2:]).upper() print('- Titlekey: ' + tk) exchangefile.add(ticket.rightsId, tk) f.flush() f.close() files_list = sq_tools.ret_xci_offsets(filepath) files = list() filesizes = list() fplist = list() for k in range(len(files_list)): entry = files_list[k] fplist.append(entry[0]) for i in range(len(files_list)): entry = files_list[i] cnmtfile = entry[0] if cnmtfile.endswith('.cnmt.nca'): f = squirrelXCI(filepath) titleid, titleversion, base_ID, keygeneration, rightsId, RSV, RGV, ctype, metasdkversion, exesdkversion, hasHtmlManual, Installedsize, DeltaSize, ncadata = f.get_data_from_cnmt( cnmtfile) f.flush() f.close() for j in range(len(ncadata)): row = ncadata[j] # print(row) if row['NCAtype'] != 'Meta': test1 = str(row['NcaId']) + '.nca' test2 = str(row['NcaId']) + '.ncz' if test1 in fplist or test2 in fplist: # print(str(row['NcaId'])+'.nca') files.append(str(row['NcaId']) + '.nca') filesizes.append(int(row['Size'])) else: # print(str(row['NcaId'])+'.cnmt.nca') files.append(str(row['NcaId']) + '.cnmt.nca') filesizes.append(int(row['Size'])) for k in range(len(files_list)): entry = files_list[k] fp = entry[0] sz = int(entry[3]) if fp.endswith('xml'): files.append(fp) filesizes.append(sz) for k in range(len(files_list)): entry = files_list[k] fp = entry[0] sz = int(entry[3]) if fp.endswith('.tik'): files.append(fp) filesizes.append(sz) for k in range(len(files_list)): entry = files_list[k] fp = entry[0] sz = int(entry[3]) if fp.endswith('.cert'): files.append(fp) filesizes.append(sz) nspheader = sq_tools.gen_nsp_header(files, filesizes) properheadsize = len(nspheader) compressionLevel = int(level) CHUNK_SZ = buffer if outfile == None: nszPath = filepath[0:-1] + 'z' else: nszPath = outfile nszPath = os.path.abspath(nszPath) Print.info('Compressing with level %d and %d threads' % (compressionLevel, threads)) Print.info('\n %s -> %s \n' % (filepath, nszPath)) newNsp = nutFs.Pfs0.Pfs0Stream(nszPath, headsize=properheadsize, mode='wb+') xcicontainer = Xci(filepath) # f.compressed_supertrim(buffer,outfile,keepupd,level,threads) for nspF in xcicontainer.hfs0: if str(nspF._path) == "secure": for nca in nspF: if isinstance(nca, nutFs.Nca.Nca) and ( nca.header.contentType == nutFs.Type.Content.PROGRAM or nca.header.contentType == nutFs.Type.Content.PUBLIC_DATA): if isNcaPacked(nca): cctx = zstandard.ZstdCompressor(level=compressionLevel, threads=threads) newFileName = nca._path[0:-1] + 'z' f = newNsp.add(newFileName, nca.size) start = f.tell() nca.seek(0) data = nca.read(ncaHeaderSize) f.write(data) nca.seek(ncaHeaderSize) written = ncaHeaderSize compressor = cctx.stream_writer(f) sections = [] for fs in sortedFs(nca): sections += fs.getEncryptionSections() header = b'NCZSECTN' header += len(sections).to_bytes(8, 'little') i = 0 for fs in sections: i += 1 header += fs.offset.to_bytes(8, 'little') header += fs.size.to_bytes(8, 'little') header += fs.cryptoType.to_bytes(8, 'little') header += b'\x00' * 8 header += fs.cryptoKey header += fs.cryptoCounter f.write(header) written += len(header) timestamp = time.time() decompressedBytes = ncaHeaderSize totsize = 0 for fs in sortedFs(nca): totsize += fs.size t = tqdm(total=totsize, unit='B', unit_scale=True, leave=False) for section in sections: #print('offset: %x\t\tsize: %x\t\ttype: %d\t\tiv%s' % (section.offset, section.size, section.cryptoType, str(hx(section.cryptoCounter)))) o = nca.partition(offset=section.offset, size=section.size, n=None, cryptoType=section.cryptoType, cryptoKey=section.cryptoKey, cryptoCounter=bytearray( section.cryptoCounter), autoOpen=True) while not o.eof(): buffer = o.read(CHUNK_SZ) t.update(len(buffer)) if len(buffer) == 0: raise IOError('read failed') written += compressor.write(buffer) decompressedBytes += len(buffer) t.close() compressor.flush(zstandard.FLUSH_FRAME) elapsed = time.time() - timestamp minutes = elapsed / 60 seconds = elapsed % 60 speed = 0 if elapsed == 0 else (nca.size / elapsed) written = f.tell() - start print('\n * Compressed at %d%% from %s to %s - %s' % (int(written * 100 / nca.size), str(sq_tools.getSize(decompressedBytes)), str(sq_tools.getSize(written)), nca._path)) print( ' * Compressed in %02d:%02d at speed: %.1f MB/s\n' % (minutes, seconds, speed / 1000000.0)) newNsp.resize(newFileName, written) continue else: print('not packed!') f = newNsp.add(nca._path, nca.size) nca.seek(0) t = tqdm(total=nca.size, unit='B', unit_scale=True, leave=False) while not nca.eof(): buffer = nca.read(CHUNK_SZ) t.update(len(buffer)) f.write(buffer) t.close() newNsp.close() try: exchangefile.deletefile() except: pass return nszPath