def scanDLC(id, showErr=True, dlcStatus=None): id = id.upper() title = Titles.get(id) baseDlc = Title.baseDlcId(id) for i in range(0x1FF): scanId = format(baseDlc + i, 'X').zfill(16) if Titles.contains(scanId): continue ver = CDNSP.get_version(scanId.lower()) if ver != None: t = Title() t.setId(scanId) Titles.set(scanId, t) Titles.save() Print.info('Found new DLC ' + str(title.name) + ' : ' + scanId) elif showErr: Print.info('nothing found at ' + scanId + ', ' + str(ver)) if dlcStatus: dlcStatus.add()
def unpack(self, path): os.makedirs(path, exist_ok=True) for nspF in self: filePath = os.path.abspath(path + '/' + nspF._path) f = open(filePath, 'wb') nspF.rewind() i = 0 pageSize = 0x10000 while True: buf = nspF.read(pageSize) if len(buf) == 0: break i += len(buf) f.write(buf) f.close() Print.info(filePath)
def __init__(self, path=None, mode='rb'): self.path = None self.titleId = None self.hasValidTicket = None self.timestamp = None self.version = None self.fileSize = None self.fileModified = None self.extractedNcaMeta = False super(Nsp, self).__init__(None, path, mode) if path: self.setPath(path) # if files: # self.pack(files) if self.titleId and self.isUnlockable(): Print.info('unlockable title found ' + self.path)
def scan(base): i = 0 fileList = {} nspOut = os.path.abspath(Config.paths.nspOut) duplicatesFolder = os.path.abspath(Config.paths.duplicates) Print.info('scanning %s' % base) for root, _, _files in os.walk(base, topdown=False): for name in _files: suffix = pathlib.Path(name).suffix if suffix in ('.nsp', '.nsx', '.xci', '.nsz'): path = os.path.abspath(root + '/' + name) if not path.startswith(nspOut) and not path.startswith( duplicatesFolder): fileList[path] = name if len(fileList) == 0: save() return 0 status = Status.create(len(fileList), desc='Scanning files...') try: for path, name in fileList.items(): try: status.add(1) if not path in files: Print.info('scanning ' + name) nsp = Fs.Nsp(path, None) nsp.timestamp = time.time() nsp.getFileSize() # cache file size files[nsp.path] = nsp i = i + 1 if i % 20 == 0: save() except KeyboardInterrupt: status.close() raise except BaseException as e: # pylint: disable=broad-except Print.info('An error occurred processing file: ' + str(e)) save() status.close() except BaseException as e: # pylint: disable=broad-except Print.info('An error occurred scanning files: ' + str(e)) return i
def scan(base, force=False): global hasScanned #if hasScanned and not force: # return hasScanned = True i = 0 fileList = {} Print.info(base) for root, dirs, _files in os.walk(base, topdown=False, followlinks=True): for name in _files: suffix = pathlib.Path(name).suffix if suffix == '.nsp' or suffix == '.nsx' or suffix == '.nsz': path = os.path.abspath(root + '/' + name) fileList[path] = name if len(fileList) == 0: save() return 0 status = Status.create(len(fileList), desc='Scanning files...') try: for path, name in fileList.items(): try: status.add(1) if not path in files: Print.info('scanning ' + name) nsp = Fs.Nsp(path, None) nsp.getFileSize() files[nsp.path] = nsp i = i + 1 if i % 20 == 0: save() except KeyboardInterrupt: status.close() raise except BaseException as e: Print.info('An error occurred processing file: ' + str(e)) raise save() status.close() except BaseException as e: Print.info('An error occurred scanning files: ' + str(e)) raise return i
def scan(base): i = 0 fileList = {} nspOut = os.path.abspath(Config.paths.nspOut) duplicatesFolder = os.path.abspath(Config.paths.duplicates) Print.info(f"scanning {base}") for root, _, _files in os.walk(base, topdown=False): for name in _files: if _is_file_hidden(name): continue suffix = pathlib.Path(name).suffix if suffix in ('.nsp', '.nsx', '.xci', '.nsz', '.xcz'): path = os.path.abspath(root + '/' + name) if not path.startswith(nspOut) and not path.startswith( duplicatesFolder): fileList[path] = name if len(fileList) == 0: save() return 0 status = Status.create(len(fileList), desc='Scanning files...') try: for path, name in fileList.items(): try: status.add(1) path = os.path.abspath(path) if path not in files: Print.info('scanning ' + name) registerFile(path) i = i + 1 #if i % 20 == 0: # save() except KeyboardInterrupt: status.close() raise except BaseException as e: # pylint: disable=broad-except Print.info('An error occurred processing file: ' + str(e)) save() status.close() except BaseException as e: # pylint: disable=broad-except Print.info('An error occurred scanning files: ' + str(e)) return i
def loadTitleBuffer(buffer, silent=False): global nsuIdMap firstLine = True importedRegions = {} map = ['id', 'key', 'name'] for line in buffer.split('\n'): line = line.strip() if len(line) == 0 or line[0] == '#': continue if firstLine: firstLine = False if re.match('[A-Za-z\|\s]+', line, re.I): map = line.split('|') i = 0 while i < len(map): if map[i] == 'RightsID': map[i] = 'id' if map[i] == 'TitleKey': map[i] = 'key' if map[i] == 'Name': map[i] = 'name' i += 1 continue t = Title.Title() t.loadCsv(line, map) if not isinstance(t.id, str): continue if 'nsuId' in map: nsuIdMap[t.nsuId] = t.id title = get(t.id, None, None) titleKey = title.key title.loadCsv(line, map) if not silent and titleKey != titles[t.id].key: Print.info('Added new title key for ' + str(titles[t.id].name) + '[' + str(t.id) + ']')
def load(fileName='titledb/files.json', verify=True): global hasLoaded # pylint: disable=global-statement if hasLoaded: return hasLoaded = True timestamp = time.perf_counter() if os.path.isfile(fileName): with open(fileName, encoding="utf-8-sig") as f: for k in json.loads(f.read()): t = Fs.Nsp(k['path'], None) t.timestamp = k['timestamp'] t.titleId = k['titleId'] t.version = k['version'] if 'extractedNcaMeta' in k and k['extractedNcaMeta'] == 1: t.extractedNcaMeta = True else: t.extractedNcaMeta = False if 'fileSize' in k: t.fileSize = k['fileSize'] if 'cr' in k: t.cr = k['cr'] else: t.cr = None if not t.path: continue path = os.path.abspath(t.path) if verify and Config.isScanning: if os.path.isfile(path) and os.path.exists(path): files[path] = t else: files[path] = t Print.info('loaded file list in ' + str(time.perf_counter() - timestamp) + ' seconds')
def downloadAll(wait=True): nut.initTitles() nut.initFiles() global activeDownloads global status try: for k, t in Titles.items(): if t.isUpdateAvailable() and ( t.isDLC or t.isUpdate or Config.download.base) and ( not t.isDLC or Config.download.DLC) and ( not t.isDemo or Config.download.demo) and ( not t.isUpdate or Config.download.update) and ( t.key or Config.download.sansTitleKey ) and (len(Config.titleWhitelist) == 0 or t.id in Config.titleWhitelist ) and t.id not in Config.titleBlacklist: if not t.id or t.id == '0' * 16 or ( t.isUpdate and t.lastestVersion() in [None, '0']): #Print.warning('no valid id? ' + str(t.path)) continue if not t.lastestVersion(): Print.info('Could not get version for ' + str(t.name) + ' [' + str(t.id) + ']') continue Titles.queue.add(t.id) Titles.save() status = Status.create(Titles.queue.size(), 'Total Download') startDownloadThreads() while wait and (not Titles.queue.empty() or sum(activeDownloads) > 0): time.sleep(1) except KeyboardInterrupt: pass except BaseException as e: Print.error(str(e)) if status: status.close()
def getInstall(request, response): nsp = Nsps.getByTitleId(request.bits[2]) try: url = ('%s:%s@%s:%d/api/download/%s/title.nsp' % (request.user.id, request.user.password, Config.server.hostname, Config.server.port, request.bits[2])) Print.info('Installing ' + url) file_list_payloadBytes = url.encode('ascii') sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) #sock.settimeout(1) sock.connect((request.user.switchHost, request.user.switchPort)) #sock.settimeout(99999) sock.sendall(struct.pack('!L', len(file_list_payloadBytes)) + file_list_payloadBytes) while len(sock.recv(1)) < 1: time.sleep(0.05) sock.close() response.write(json.dumps({'success': True, 'message': 'install successful'})) except BaseException as e: response.write(json.dumps({'success': False, 'message': str(e)}))
def scanLatestTitleUpdates(): initTitles() initFiles() for k, i in CDNSP.get_versionUpdates().items(): id = str(k).upper() version = str(i) if not Titles.contains(id): if len(id) != 16: Print.info('invalid title id: ' + id) continue t = Titles.get(id) if str(t.version) != str(version): Print.info('new version detected for %s[%s] v%s' % (t.name or '', t.id or ('0' * 16), str(version))) t.setVersion(version, True) Titles.save()
def init(): global initialized if initialized: return False initialized = True path = os.path.abspath(os.path.join(__file__, '../../plugins')) for f in os.listdir(path): try: if not os.path.isfile( os.path.join(os.path.join( path, f), '__init__.py')) or os.path.isfile( os.path.join(os.path.join(path, f), 'disabled')): continue name = f importlib.import_module('plugins.%s' % name) except BaseException as e: Print.error("plugin loader exception: %s" % str(e)) return True
def call(*argv): global _hooks argv = list(argv) if len(argv) == 0: return False name = argv.pop(0) if name not in _hooks: return False for func in _hooks[name]: try: func(*argv) except BaseException as e: Print.error('plugin exception: %s' % str(e)) return True
def get(self): if not self.token: try: with open(self.fileName(), encoding='utf8') as f: self.token = f.read().strip() self.expires = os.path.getmtime( self.fileName()) + (60 * 60) except BaseException as e: Print.error(str(e)) pass if not self.token or not self.expires or time.time() > self.expires: import cdn.Auth self.token = cdn.Auth.getEdgeToken(self.clientId) self.expires = os.path.getmtime(self.fileName()) + (60 * 60) if not self.token: raise IOError('No edge token') return self.token
def moveDupe(self): if Config.dryRun: return True try: newPath = self.fileName() os.makedirs(Config.paths.duplicates, exist_ok=True) origDupePath = Config.paths.duplicates + os.path.basename(newPath) dupePath = origDupePath Print.info('moving duplicate ' + os.path.basename(newPath)) c = 0 while os.path.isfile(dupePath): dupePath = Config.paths.duplicates + os.path.basename( newPath) + '.' + str(c) c = c + 1 shutil.move(self.path, dupePath) return True except BaseException as e: Print.error('failed to move to duplicates! ' + str(e)) return False
def isUpdateAvailable(self, localOnly=False): nsp = self.getLatestFile() if not nsp: if not nsp: if not self.isUpdate or (self.version and int(self.version) > 0): return True else: return False try: latest = self.lastestVersion(localOnly=localOnly) if latest is None: return False if int(nsp.version) < int(latest): return True except BaseException as e: Print.error('isUpdateAvailable exception %s: %s' % (self.id, str(e))) pass return False
def run(): global httpd global sock global addr Print.info(time.asctime() + ' Server Starts - %s:%s' % (Config.server.hostname, Config.server.port)) try: addr = (Config.server.hostname, Config.server.port) sock = socket.socket (socket.AF_INET, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind(addr) sock.listen(5) [Thread(i) for i in range(16)] for thread in threads: thread.join() except KeyboardInterrupt: pass Print.info(time.asctime() + ' Server Stops - %s:%s' % (Config.server.hostname, Config.server.port))
def printInfo(self, maxDepth=3, indent=0): tabs = '\t' * indent Print.info('\n%sXCI Archive\n' % (tabs)) super(Xci, self).printInfo(maxDepth, indent) Print.info(tabs + 'magic = ' + str(self.magic)) Print.info(tabs + 'titleKekIndex = ' + str(self.titleKekIndex)) Print.info(tabs + 'gamecardCert = ' + str( hx(self.gamecardCert.magic + self.gamecardCert.unknown1 + self.gamecardCert.unknown2 + self.gamecardCert.data))) self.hfs0.printInfo(maxDepth, indent)
def CreateTargetDict(targetFolder, extension): filesAtTarget = set() alreadyExists = {} for file in os.scandir(targetFolder): filePath = os.path.join(targetFolder, file) if file.name.endswith(extension): Print.infoNoNewline('Extract TitleID/Version: {0} '.format( file.name)) filesAtTarget.add(file.name.lower()) (titleID, version) = ExtractTitleIDAndVersion(file) titleIDEntry = alreadyExists.get(titleID) if titleIDEntry == None: titleIDEntry = {version: {filePath}} elif not version in titleIDEntry: titleIDEntry.add({version: {filePath}}) else: titleIDEntry[version].add(filePath) alreadyExists[titleID] = titleIDEntry Print.info('=> {0} {1}'.format(titleID, version)) return (filesAtTarget, alreadyExists)
def scanBaseThread(baseStatus): while Config.isRunning: try: id = getRandomTitleId() if Titles.contains(id): continue ver = CDNSP.get_version(id.lower()) if ver != None: Print.info('Found new base ' + id) t = Title() t.setId(id) Titles.set(id, t) Titles.save() baseStatus.add() except BaseException as e: print('exception: ' + str(e))
def importRegion(region='US', language='en', save=True): if region not in Config.regionLanguages( ) or language not in Config.regionLanguages()[region]: Print.info('Could not locate %s/%s !' % (region, language)) return False Hook.call("import.pre", region, language) regionLanguages = [] for region2 in Config.regionLanguages(): for language2 in Config.regionLanguages()[region2]: regionLanguages.append( RegionLanguage(region2, language2, region, language)) for rl in sorted(regionLanguages): data = Titles.data(rl.region, rl.language) for nsuId in sorted(data.keys(), reverse=True): regionTitle = data[nsuId] if not regionTitle.id: continue try: for tid in regionTitle.ids: title = Titles.get(tid, None, None) title.importFrom(regionTitle, rl.region, rl.language, preferredRegion=region, preferredLanguage=language) except: title = Titles.get(regionTitle.id, None, None) title.importFrom(regionTitle, rl.region, rl.language, preferredRegion=region, preferredLanguage=language) Titles.loadTxtDatabases() Hook.call("import.post", region, language) if save: Titles.save()
def AllowedToWriteOutfile(filePath, targetFileExtension, targetDict, removeOld, overwrite, parseCnmt): (filesAtTarget, alreadyExists) = targetDict extractedIdVersion = ExtractTitleIDAndVersion(filePath, parseCnmt) if extractedIdVersion == None: Print.error("Failed to extract TitleID/Version from filename {0}. Use -p to extract from Cnmt.".format(Path(filePath).name)) return fileNameCheck(filePath, targetFileExtension, filesAtTarget, removeOld, overwrite) (titleIDExtracted, versionExtracted) = extractedIdVersion titleIDEntry = alreadyExists.get(titleIDExtracted) if removeOld: if titleIDEntry != None: exitFlag = False for versionEntry in titleIDEntry.keys(): print(versionEntry, versionExtracted) if versionEntry < versionExtracted: for delFilePath in titleIDEntry[versionEntry]: Print.info('Delete outdated version: {0}'.format(delFilePath)) remove(delFilePath) del filesAtTarget[Path(delFilePath).name.lower()] else: exitFlag = True if exitFlag: Print.info('{0} with a the same ID and newer version already exists in the output directory.\n'\ 'If you want to process it do not use --rm-old-version!'.format(Path(filePath).name)) return False if not titleIDEntry == None: for versionEntry in titleIDEntry: if versionEntry == titleIDEntry: if overwrite: for (fileName, filePath) in filesAtTarget: # NEEDS TO BE FIXED Print.info('Delete duplicate: {0}'.format(filePath)) filesAtTarget.remove(Path(filePath).name.lower()) remove(filePath) else: Print.info('{0} with the same ID and version already exists in the output directory.\n'\ 'If you want to overwrite it use the -w parameter!'.format(Path(filePath).name)) return False return fileNameCheck(filePath, targetFileExtension, filesAtTarget, removeOld, overwrite)
def tr(str_): global _initialized global _lang_db global _lang global _lang global _locale global _file_name if not _initialized: if not reload(): return str_ try: translated = _lang_db[str_] except: # pylint: disable=bare-except try: translated = _en_db[str_] except: # pylint: disable=bare-except Print.warning(f"missing translation for '{str_}' key") translated = str_ return translated
def extractCnmt(nsp): isOpen = nsp.isOpen() try: if not isOpen: nsp.open(nsp.path, 'rb') for n in nsp: if not isinstance(n, Nca): continue if int(n.header.contentType) == 1: for p in n: for m in p: if isinstance(m, Cnmt): return m except BaseException as e: Print.info('exception: %s %s' % (nsp.path, str(e))) finally: if not isOpen: nsp.close() return None
def loadTxtDatabases(): confLock.acquire() if os.path.isfile("titles.txt"): loadTitleFile('titles.txt', True) try: files = sorted([ f for f in os.listdir(Config.paths.titleDatabase) if f.endswith('.txt') ]) for file in files: if file.endswith('personal_keys.txt'): parsePersonalKeys(Config.paths.titleDatabase + '/' + file) else: loadTitleFile(Config.paths.titleDatabase + '/' + file, False) except BaseException as e: Print.error('title load error: ' + str(e)) confLock.release()
def getAddOns(titleId, shop_id=3): url = 'https://superfly.hac.%s.d4c.nintendo.net/v1/a/%s/dv' % (Config.cdn.environment, titleId) j = makeJsonRequest('GET', url, {}, '%d/a/%s/dv.json' % (shop_id, titleId), force = False) lst = [] if not j: return lst for i in j: id = i['title_id'].upper() if not Titles.contains(id): Print.info('New DLC found: ' + id) title = Titles.get(id, None, None) title.setVersion(int(i['version'])) lst.append(id) return lst
def organizeNcas(dir): files = [f for f in os.listdir(dir) if f.endswith('.nca')] for file in files: try: path = os.path.join(dir, file) f = Fs.Nca() f.open(path, 'r+b') f.close() titleId = f.header.titleId header = f.header os.makedirs(os.path.join(dir, f.header.titleId), exist_ok=True) if f.header.contentType == Fs.Type.Content.META: dest = os.path.join(dir, f.header.titleId, file.split('.')[0] + '.cnmt.nca') else: dest = os.path.join(dir, f.header.titleId, file.split('.')[0] + '.nca') os.rename(path, dest) Print.info(dest) except BaseException as e: Print.info(str(e))
def unpack(self, path, extractregex="*"): os.makedirs(str(path), exist_ok=True) for nspf in self: filePath_str = str(path.joinpath(nspf._path)) if not re.match(extractregex, filePath_str): continue f = open(filePath_str, 'wb') nspf.rewind() i = 0 pageSize = 0x100000 while True: buf = nspf.read(pageSize) if len(buf) == 0: break i += len(buf) f.write(buf) f.close() Print.info(filePath_str)
def unlockAll(copy=False): nut.initTitles() nut.initFiles() files = [] for k, f in Nsps.files.items(): files.append(f) for f in files: try: if f.isUnlockable() and f.title().isActive(): if f.title().getLatestNsp() is not None or f.title( ).getLatestNsz() is not None: Print.info('unlocked file arleady exists, skipping ' + str(f.path)) f.open(getUnlockPath(f.path, copy), 'r+b') if not f.verifyKey(f.titleId, f.title().key): raise IOError('Could not verify title key! %s / %s - %s' % (f.titleId, f.title().key, f.title().name)) continue Print.info('unlocking ' + str(f.path)) f.unlock() f.close() except BaseException as e: Print.info('error unlocking: ' + str(e)) traceback.print_exc(file=sys.stdout)
def downloadThread(i): if not hasCdn: return Print.info('starting thread ' + str(i)) global status while Config.isRunning and not Titles.queue.empty(): try: id = Titles.queue.shift() if id and Titles.contains(id): activeDownloads[i] = 1 t = Titles.get(id) path = cdn.downloadTitle(t.id.lower(), None, t.key) if path and os.path.isfile(path): nsp = Fs.Nsp(path, None) nsp.move() Nsps.save() if status is not None: status.add() activeDownloads[i] = 0 else: time.sleep(1) except KeyboardInterrupt: pass except BaseException as e: Print.error('downloadThread exception: ' + str(e)) traceback.print_exc(file=sys.stdout) activeDownloads[i] = 0 Print.info('ending thread ' + str(i))