def folderpicker(path, ofolder=None, TD=None, filter=None, mode='download'): pathlist = search_folder(path, TD, pick=True, Print=False, filter=filter) counter = len(pathlist) for path in pathlist: if mode == 'download': download(path, ofolder, TD) elif mode == 'get_cnmt_data': DriveTools.get_cnmt_data(path, TD) elif mode == 'decompress': from Drive.Decompress import decompress decompress(path, ofolder, TD) elif mode == 'supertrimm': from Drive.XciTools import supertrimm supertrimm(path, ofolder, TD) elif mode == 'read_cnmt': from Drive.Info import read_cnmt read_cnmt(path, TD) counter -= 1 if counter > 0: print("Still {} files to download".format(str(counter)))
def gen_xci_parts_spec1(filepath=None,remote=None,target_cnmt=None,cachefolder=None,keypatch=False,files_list=None): if filepath=="": filepath=None if remote=="": remote=None if remote==None: test=filepath.split('|');TD=None if len(test)<2: filepath=test[0] lib,TD,libpath=get_library_from_path(remote_lib_file,filepath) else: filepath=test[0] TD=test[1] if str(TD).upper()=="NONE": TD=None ID,name,type,size,md5,remote=DrivePrivate.get_Data(filepath,TD=TD,Print=False) if keypatch!=False: try: keypatch=int(keypatch) except: keypatch=False if cachefolder==None: cachefolder=os.path.join(ztools_dir, '_mtp_cache_') if not os.path.exists(cachefolder): os.makedirs(cachefolder) else: for f in os.listdir(cachefolder): fp = os.path.join(cachefolder, f) try: shutil.rmtree(fp) except OSError: os.remove(fp) if files_list==None: files_list=DriveTools.get_files_from_head(remote,remote.name) files=list();filesizes=list() fplist=list() for k in range(len(files_list)): entry=files_list[k] fplist.append(entry[0]) if target_cnmt==None: for i in range(len(files_list)): entry=files_list[i] cnmtfile=entry[0] if cnmtfile.endswith('.cnmt.nca'): target_cnmt=cnmtfile break for i in range(len(files_list)): entry=files_list[i] cnmtfile=entry[0] if cnmtfile.endswith('.cnmt.nca') and target_cnmt==cnmtfile: metadict,d1,d2=DriveTools.get_cnmt_data(target=cnmtfile,file=remote) ncadata=metadict['ncadata'] for j in range(len(ncadata)): row=ncadata[j] if row['NCAtype']!='Meta' and row['NCAtype']!='Program': test1=str(row['NcaId'])+'.nca';test2=str(row['NcaId'])+'.ncz' if test1 in fplist: files.append(str(row['NcaId'])+'.nca') filesizes.append(int(row['Size'])) elif test2 in fplist: files.append(str(row['NcaId'])+'.ncz') for k in range(len(files_list)): entry=files_list[k] if entry[0]==test2: filesizes.append(int(entry[3])) break for j in range(len(ncadata)): row=ncadata[j] if row['NCAtype']=='Meta': # print(str(row['NcaId'])+'.cnmt.nca') files.append(str(row['NcaId'])+'.cnmt.nca') filesizes.append(int(row['Size'])) for j in range(len(ncadata)): row=ncadata[j] # print(row) if row['NCAtype']=='Program': test1=str(row['NcaId'])+'.nca';test2=str(row['NcaId'])+'.ncz' if test1 in fplist: files.append(str(row['NcaId'])+'.nca') filesizes.append(int(row['Size'])) elif test2 in fplist: files.append(str(row['NcaId'])+'.ncz') for k in range(len(files_list)): entry=files_list[k] if entry[0]==test2: filesizes.append(int(entry[3])) break break remote.rewind() outheader = sq_tools.gen_nsp_header(files,filesizes) properheadsize=len(outheader) # print(properheadsize) # print(bucketsize) i=0;sum=properheadsize; outfile=os.path.join(cachefolder, "0") outf = open(outfile, 'w+b') outf.write(outheader) written=0 for fi in files: if fi.endswith('nca') or fi.endswith('ncz') : for i in range(len(files_list)): if str(files_list[i][0]).lower() == str(fi).lower(): nca_name=files_list[i][0] off1=files_list[i][1] off2=files_list[i][2] nca_size=files_list[i][3] break data=remote.read_at(off1,nca_size) ncaHeader = NcaHeader() ncaHeader.open(MemoryFile(remote.read_at(off1,0x400), FsType.Crypto.XTS, uhx(Keys.get('header_key')))) crypto1=ncaHeader.getCryptoType() crypto2=ncaHeader.getCryptoType2() if crypto2>crypto1: masterKeyRev=crypto2 if crypto2<=crypto1: masterKeyRev=crypto1 crypto = aes128.AESECB(Keys.keyAreaKey(Keys.getMasterKeyIndex(masterKeyRev), ncaHeader.keyIndex)) hcrypto = aes128.AESXTS(uhx(Keys.get('header_key'))) gc_flag='00'*0x01 crypto1=ncaHeader.getCryptoType() crypto2=ncaHeader.getCryptoType2() if ncaHeader.getRightsId() != 0: ncaHeader.rewind() if crypto2>crypto1: masterKeyRev=crypto2 if crypto2<=crypto1: masterKeyRev=crypto1 titleKeyDec = Keys.decryptTitleKey(titleKey, Keys.getMasterKeyIndex(int(masterKeyRev))) encKeyBlock = crypto.encrypt(titleKeyDec * 4) if str(keypatch) != "False": t = tqdm(total=False, unit='B', unit_scale=False, leave=False) if keypatch < ncaHeader.getCryptoType2(): encKeyBlock,crypto1,crypto2=get_new_cryptoblock(ncaHeader,keypatch,encKeyBlock,t) t.close() if ncaHeader.getRightsId() == 0: ncaHeader.rewind() encKeyBlock = ncaHeader.getKeyBlock() if str(keypatch) != "False": t = tqdm(total=False, unit='B', unit_scale=False, leave=False) if keypatch < ncaHeader.getCryptoType2(): encKeyBlock,crypto1,crypto2=get_new_cryptoblock(ncaHeader,keypatch,encKeyBlock,t) t.close() ncaHeader.rewind() i=0 newheader=get_newheader(MemoryFile(remote.read_at(off1,0xC00)),encKeyBlock,crypto1,crypto2,hcrypto,gc_flag) outf.write(newheader) written+=len(newheader) break else:pass outf.flush() outf.close() tfile=os.path.join(cachefolder, "remote_files.csv") with open(tfile,'w') as csvfile: csvfile.write("{}|{}|{}|{}|{}|{}|{}\n".format("step","filepath","size","targetsize","off1","off2","token")) csvfile.write("{}|{}|{}|{}|{}|{}|{}\n".format(0,outfile,properheadsize+written,properheadsize,0,properheadsize,"False")) k=0;l=0 for fi in files: for j in files_list: if j[0]==fi: csvfile.write("{}|{}|{}|{}|{}|{}|{}\n".format(k+1,outfile,properheadsize+written,0xC00,(properheadsize+l*0xC00),(properheadsize+(l*0xC00)+0xC00),"False")) off1=j[1]+0xC00 off2=j[2] targetsize=j[3]-0xC00 URL='https://www.googleapis.com/drive/v3/files/'+remote.ID+'?alt=media' token=remote.access_token csvfile.write("{}|{}|{}|{}|{}|{}|{}\n".format(k+2,URL,remote.size,targetsize,off1,off2,token)) break k+=2;l+=1 nspname="test.nsp" try: g=remote.name g0=[pos for pos, char in enumerate(g) if char == '['] g0=(g[0:g0[0]]).strip() titleid=metadict['titleid'] titleversion=metadict['version'] ctype=metadict['ctype'] nspname=f"{g0} [{titleid}] [v{titleversion}] [{ctype}].nsp" except:pass return nspname
def public_gdrive_install(filepath,destiny="SD",truecopy=True,outfolder=None,ch_medium=True,check_fw=True,patch_keygen=False,ch_base=False,ch_other=False,installed_list=False): check_connection() lib,TD,libpath=get_cache_lib() if lib==None: sys.exit(f"Google Drive Public Links are only supported via cache folder") filename=addtodrive(filepath,truecopy=truecopy) ID,name,type,size,md5,remote=DrivePrivate.get_Data(filename,TD=TD,Print=False) token=remote.access_token name=remote.name sz=remote.size URL='https://www.googleapis.com/drive/v3/files/'+remote.ID+'?alt=media' ext=name.split('.') ext=ext[-1] if not name.endswith('nsp') and not name.endswith('nsz') and not name.endswith('xci') and not name.endswith('xcz'): print(f"Extension not supported for direct instalation {ext} in {name}") return False print("- Retrieving Space on device") SD_ds,SD_fs,NAND_ds,NAND_fs,FW,device=get_storage_info() print("- Calculating Installed size") filesize=int(sz) if destiny=="SD": print(f" * SD free space: {SD_fs} ({sq_tools.getSize(SD_fs)})") print(f" * File size: {filesize} ({sq_tools.getSize(filesize)})") if filesize>SD_fs: if filesize<NAND_fs and ch_medium==True: print(" Not enough space on SD. Changing target to EMMC") print(f" * EMMC free space: {NAND_fs} ({sq_tools.getSize(NAND_fs)})") destiny="NAND" elif ch_medium==False: sys.exit(" NOT ENOUGH SPACE SD STORAGE") else: sys.exit(" NOT ENOUGH SPACE ON DEVICE") else: print(f" * EMMC free space: {NAND_fs} ({sq_tools.getSize(NAND_fs)})") print(f" * File size: {filesize} ({sq_tools.getSize(filesize)})") if filesize>NAND_fs: if filesize<SD_fs and ch_medium==True: print(" Not enough space on EMMC. Changing target to SD") print(f" * SD free space: {SD_fs} ({sq_tools.getSize(SD_fs)})") destiny="SD" elif ch_medium==False: sys.exit(" NOT ENOUGH SPACE EMMC STORAGE") else: sys.exit(" NOT ENOUGH SPACE ON DEVICE") kgwarning=False if check_fw==True: try: cnmtdata,files_list,remote=DriveTools.get_cnmt_data(file=remote) keygeneration=int(cnmtdata['keygeneration']) if FW!='unknown': try: FW_RSV,RRSV=sq_tools.transform_fw_string(FW) FW_kg=sq_tools.kg_by_RSV(FW_RSV) except BaseException as e: Print.error('Exception: ' + str(e)) FW='unknown' FW_kg='unknown' pass if FW!='unknown' and FW_kg!='unknown': if int(keygeneration)>int(FW_kg): kgwarning=True tgkg=int(FW_kg) else: tgkg=keygeneration else: tgkg=keygeneration print(f"- Console Firmware: {FW} ({FW_RSV}) - keygen {FW_kg})") print(f"- File keygeneration: {keygeneration}") if kgwarning==True: print("File requires a higher firmware. Skipping...") return False except: print("Error getting cnmtdata from file") if installed_list!=False: try: fileid,fileversion,cctag,nG,nU,nD,baseid=listmanager.parsetags(name) fileversion=int(fileversion) if fileid.endswith('000') and fileversion==0 and fileid in installed_list.keys() and ch_base==True: print("Base game already installed. Skipping...") return False elif fileid.endswith('000') and fileid in installed_list.keys() and ch_other==True: updid=fileid[:-3]+'800' if fileversion>((installed_list[fileid])[2]): print("Asking DBI to delete previous content") process=subprocess.Popen([nscb_mtp,"DeleteID","-ID",fileid]) while process.poll()==None: if process.poll()!=None: process.terminate(); process=subprocess.Popen([nscb_mtp,"DeleteID","-ID",updid]) while process.poll()==None: if process.poll()!=None: process.terminate(); else: print("The update is a previous version than the installed on device.Skipping..") listmanager.striplines(tfile,counter=True) return False elif ch_other==True and fileid in installed_list.keys(): if fileversion>((installed_list[fileid])[2]): print("Asking DBI to delete previous update") process=subprocess.Popen([nscb_mtp,"DeleteID","-ID",fileid]) while process.poll()==None: if process.poll()!=None: process.terminate(); else: print("The update is a previous version than the installed on device.Skipping..") listmanager.striplines(tfile,counter=True) return False except:pass if name.endswith('xci') or name.endswith('xcz'): from mtpxci_remote import install_xci_csv install_xci_csv(remote=remote,destiny=destiny,cachefolder=outfolder) else: process=subprocess.Popen([nscb_mtp,"DriveInstall","-ori",URL,"-dst",destiny,"-name",name,"-size",sz,"-tk",token]) while process.poll()==None: if process.poll()!=None: process.terminate();
def decompress_xcz(remote, ofolder): buf = 64 * 1024 buffer = buf endname = remote.name[:-1] + 'i' output = os.path.join(ofolder, endname) files_list = DriveTools.get_files_from_head(remote, remote.name) remote.rewind() # print(files_list) print('Decompressing {}'.format(remote.name)) print('- Parsing headers...') files = list() filesizes = list() fplist = list() for k in range(len(files_list)): entry = files_list[k] fplist.append(entry[0]) for i in range(len(files_list)): entry = files_list[i] cnmtfile = entry[0] metadict = {} if cnmtfile.endswith('.cnmt.nca'): metadict, d1, d2 = DriveTools.get_cnmt_data(target=cnmtfile, file=remote) ncadata = metadict['ncadata'] for j in range(len(ncadata)): row = ncadata[j] # print(row) if row['NCAtype'] != 'Meta': test1 = str(row['NcaId']) + '.nca' test2 = str(row['NcaId']) + '.ncz' if test1 in fplist or test2 in fplist: # print(str(row['NcaId'])+'.nca') files.append(str(row['NcaId']) + '.nca') filesizes.append(int(row['Size'])) else: # print(str(row['NcaId'])+'.cnmt.nca') files.append(str(row['NcaId']) + '.cnmt.nca') filesizes.append(int(row['Size'])) for k in range(len(files_list)): entry = files_list[k] fp = entry[0] sz = int(entry[3]) if fp.endswith('xml'): files.append(fp) filesizes.append(sz) for k in range(len(files_list)): entry = files_list[k] fp = entry[0] sz = int(entry[3]) if fp.endswith('.tik'): files.append(fp) filesizes.append(sz) for k in range(len(files_list)): entry = files_list[k] fp = entry[0] sz = int(entry[3]) if fp.endswith('.cert'): files.append(fp) filesizes.append(sz) sec_hashlist = list() try: for target in files: sha, size, gamecard = DriveTools.file_hash(remote, target, files_list) # print(sha) if sha != False: sec_hashlist.append(sha) except BaseException as e: Print.error('Exception: ' + str(e)) # print(sec_hashlist) xci_header, game_info, sig_padding, xci_certificate, root_header, upd_header, norm_header, sec_header, rootSize, upd_multiplier, norm_multiplier, sec_multiplier = sq_tools.get_xciheader( files, filesizes, sec_hashlist) outheader = xci_header outheader += game_info outheader += sig_padding outheader += xci_certificate outheader += root_header outheader += upd_header outheader += norm_header outheader += sec_header properheadsize = len(outheader) totsize = properheadsize for s in filesizes: totsize += s # Hex.dump(outheader) # print(totsize) print(files) t = tqdm(total=totsize, unit='B', unit_scale=True, leave=False) with open(output, 'wb') as o: o.write(outheader) t.update(len(outheader)) for file in files: if file.endswith('cnmt.nca'): for i in range(len(files_list)): if str(files_list[i][0]).lower() == str(file).lower(): nca_name = files_list[i][0] off1 = files_list[i][1] off2 = files_list[i][2] nca_size = files_list[i][3] break t.write('- Appending {}'.format(nca_name)) data = remote.read_at(off1, nca_size) with open(output, 'ab') as o: o.write(data) t.update(len(data)) elif file.endswith('.nca'): for i in range(len(files_list)): if str(files_list[i][0]).lower() == str(file).lower(): nca_name = files_list[i][0] off1 = files_list[i][1] off2 = files_list[i][2] nca_size = files_list[i][3] t.write('- Appending {}'.format(nca_name)) o = open(output, 'ab+') remote.seek(off1, off2) for data in remote.response.iter_content(chunk_size=buf): o.write(data) t.update(len(data)) o.flush() if not data: o.close() break elif (str(files_list[i][0]).lower())[:-1] == ( str(file).lower())[:-1] and str( files_list[i][0]).endswith('ncz'): ncz_name = files_list[i][0] off1 = files_list[i][1] off2 = files_list[i][2] sz = files_list[i][3] t.write('- Calculating compressed sections for: ' + ncz_name) remote.seek(off1, off2) header = remote.read(0x4000) magic = readInt64(remote) sectionCount = readInt64(remote) sections = [] for i in range(sectionCount): sections.append(Section(remote)) t.write(' Detected {} sections'.format(str(sectionCount))) with open(output, 'rb+') as o: o.seek(0, os.SEEK_END) curr_off = o.tell() t.write('- Appending decompressed {}'.format(ncz_name)) t.write(' Writing nca header') o.write(header) t.update(len(header)) timestamp = time.time() t.write(' Writing decompressed body in plaintext') count = 0 checkstarter = 0 dctx = zstandard.ZstdDecompressor() hd = Private.get_html_header(remote.access_token, remote.position, off2) remote.get_session(hd) reader = dctx.stream_reader(remote.response.raw, read_size=buffer) c = 0 spsize = 0 for s in sections: end = s.offset + s.size if s.cryptoType == 1: #plain text t.write(' * Section {} is plaintext'.format( str(c))) t.write(' %x - %d bytes, Crypto type %d' % ((s.offset), s.size, s.cryptoType)) spsize += s.size end = s.offset + s.size i = s.offset while i < end: chunkSz = buffer if end - i > buffer else end - i chunk = reader.read(chunkSz) if not len(chunk): break o.write(chunk) t.update(len(chunk)) i += chunkSz elif s.cryptoType not in (3, 4): raise IOError('Unknown crypto type: %d' % s.cryptoType) else: t.write(' * Section {} needs decompression'. format(str(c))) t.write(' %x - %d bytes, Crypto type %d' % ((s.offset), s.size, s.cryptoType)) t.write(' Key: %s, IV: %s' % (str(hx(s.cryptoKey)), str(hx(s.cryptoCounter)))) crypto = AESCTR(s.cryptoKey, s.cryptoCounter) spsize += s.size test = int(spsize / (buffer)) i = s.offset while i < end: crypto.seek(i) chunkSz = buffer if end - i > buffer else end - i chunk = reader.read(chunkSz) if not len(chunk): break o.write(crypto.encrypt(chunk)) t.update(len(chunk)) i += chunkSz c += 1 elapsed = time.time() - timestamp minutes = elapsed / 60 seconds = elapsed % 60 speed = 0 if elapsed == 0 else (spsize / elapsed) t.write( '\n Decompressed in %02d:%02d at speed: %.1f MB/s\n' % (minutes, seconds, speed / 1000000.0)) else: for i in range(len(files_list)): if str(files_list[i][0]).lower() == str(file).lower(): file_name = files_list[i][0] off1 = files_list[i][1] off2 = files_list[i][2] file_size = files_list[i][3] break t.write('- Appending {}'.format(file_name)) data = remote.read_at(off1, file_size) with open(output, 'ab') as o: o.write(data) t.update(len(data)) t.close()