def scrape(delta): initTitles() initFiles() global scrapeQueue if Config.reverse: scrapeQueue = queue.LifoQueue() else: scrapeQueue = queue.Queue() for titleId in Titles.titles.keys(): scrapeQueue.put(titleId) st = Status.create(scrapeQueue.qsize(), 'eShop meta scrape') threads = [] for i in range(scrapeThreads): t = threading.Thread(target=scrapeThread, args=[st, delta]) t.start() threads.append(t) for t in threads: t.join() Titles.save() st.close()
def scanLatestTitleUpdates(): nut.initTitles() nut.initFiles() for k, i in CDNSP.get_versionUpdates().items(): id = str(k).upper() version = str(i) if not Titles.contains(id): if len(id) != 16: Print.info('invalid title id: ' + id) continue continue t = Title() t.setId(id) Titles.set(id, t) Print.info('Found new title id: ' + str(id)) t = Titles.get(id) if str(t.version) != str(version): Print.info('new version detected for %s[%s] v%s' % (t.name or '', t.id or ('0' * 16), str(version))) t.setVersion(version, True) Titles.save()
def importRegion(region='US', language='en'): if not region in Config.regionLanguages( ) or language not in Config.regionLanguages()[region]: Print.error('Could not locate %s/%s !' % (region, language)) return False for region2 in Config.regionLanguages(): for language2 in Config.regionLanguages()[region2]: for nsuId, regionTitle in Titles.data(region2, language2).items(): if not regionTitle.id: continue title = Titles.get(regionTitle.id, None, None) title.importFrom(regionTitle, region2, language2) for region2 in Config.regionLanguages(): for language2 in Config.regionLanguages()[region2]: if language2 != language: continue for nsuId, regionTitle in Titles.data(region2, language2).items(): if not regionTitle.id: continue title = Titles.get(regionTitle.id, None, None) title.importFrom(regionTitle, region2, language2) for nsuId, regionTitle in Titles.data(region, language).items(): if not regionTitle.id: continue title = Titles.get(regionTitle.id, None, None) title.importFrom(regionTitle, region, language) Titles.loadTxtDatabases() Titles.save()
def importRegion(region='US', language='en', save=True): if region not in Config.regionLanguages() or language not in Config.regionLanguages()[region]: Print.info('Could not locate %s/%s !' % (region, language)) return False Hook.call("import.pre", region, language) regionLanguages = [] for region2 in Config.regionLanguages(): for language2 in Config.regionLanguages()[region2]: regionLanguages.append(RegionLanguage(region2, language2, region, language)) for rl in sorted(regionLanguages): data = Titles.data(rl.region, rl.language) for nsuId in sorted(data.keys(), reverse=True): regionTitle = data[nsuId] if not regionTitle.id: continue try: for tid in regionTitle.ids: title = Titles.get(tid, None, None) title.importFrom(regionTitle, rl.region, rl.language, preferredRegion=region, preferredLanguage=language) except: title = Titles.get(regionTitle.id, None, None) title.importFrom(regionTitle, rl.region, rl.language, preferredRegion=region, preferredLanguage=language) Titles.loadTxtDatabases() Hook.call("import.post", region, language) if save: Titles.save()
def post(args): if not args.import_title_keys: return nut.initTitles() nut.initFiles() with open(args.import_title_keys, 'r') as f: for line in f.read().split('\n'): if '=' not in line: continue try: rightsId, key = line.split('=') rightsId = rightsId.strip() titleId = rightsId[0:16] key = key.strip() title = Titles.get(titleId) nsp = title.getLatestNsp() nsz = title.getLatestNsz() print(nsp) if not nsp and not nsz: Print.info('title import: new title detected: %s - %s' % (title.id, title.name)) elif not title.key: Print.info( 'title import: new title key detected: %s - %s' % (title.id, title.name)) title.rightsId = rightsId title.key = key except: raise Titles.save()
def genTinfoilTitles(): nut.initTitles() nut.initFiles() for region, languages in Config.regionLanguages().items(): for language in languages: nut.importRegion(region, language) Titles.save('titledb/titles.%s.%s.json' % (region, language), False) #Print.info('%s - %s' % (region, language)) nut.scanLatestTitleUpdates() nut.export('titledb/versions.txt', ['id', 'rightsId', 'version'])
def downloadAll(wait=True): initTitles() initFiles() global activeDownloads global status i = 0 Print.info('Downloading All') try: for k, t in Titles.items(): i = i + 1 if not t.isActive(): continue if t.isUpdateAvailable(): if not t.id or t.id == '0' * 16: Print.warning('no valid id? id: %s version: %s' % (str(t.id), str(t.lastestVersion()))) continue Titles.queue.add(t.id) Print.info("%d titles scanned, downloading %d" % (i, Titles.queue.size())) if Titles.queue.size() > 0: Titles.save() #status = Status.create(Titles.queue.size(), 'Total Download') if Config.threads <= 1: activeDownloads.append(1) downloadThread(0) else: startDownloadThreads() while wait and (not Titles.queue.empty() or sum(activeDownloads) > 0): time.sleep(1) Print.info( '%d downloads, is empty %d' % (sum(activeDownloads), int(Titles.queue.empty()))) except KeyboardInterrupt: pass except BaseException as e: Print.error(str(e)) Print.info('Downloads finished') #if status: # status.close() Print.info('DownloadAll finished')
def genTinfoilTitles(): nut.initTitles(verify=False) nut.initFiles(verify=False) nut.refreshRegions(False) for region, languages in Config.regionLanguages().items(): for language in languages: nut.importRegion(region, language, save=False) Titles.save('titledb/titles.%s.%s.json' % (region, language), False) #Print.info('%s - %s' % (region, language)) nut.importRegion() exit(0)
def scan(): global hasScanned #if hasScanned: # return hasScanned = True initTitles() initFiles() refreshRegions() importRegion(Config.region, Config.language) r = Nsps.scan(Config.paths.scan) Titles.save() return r
def scanLatestTitleUpdates(): global versionHistory initTitles() initFiles() now = datetime.datetime.now() today = now.strftime("%Y-%m-%d") try: with open('titledb/versions.json', 'r') as f: for titleId, vers in json.loads(f.read()).items(): for ver, date in vers.items(): setVersionHistory(titleId, ver, date) except BaseException: pass if not hasCdn: return for k, i in cdn.hacVersionList().items(): id = str(k).upper() version = str(i) if not Titles.contains(id): if len(id) != 16: Print.info('invalid title id: ' + id) continue t = Titles.get(id) if t.isUpdate: setVersionHistory(Title.getBaseId(id), version, today) else: setVersionHistory(id, version, today) if str(t.version) != str(version): Print.info('new version detected for %s[%s] v%s' % (t.name or '', t.id or ('0' * 16), str(version))) t.setVersion(version, True) Titles.save() try: with open('titledb/versions.json', 'w') as outfile: json.dump(versionHistory, outfile, indent=4, sort_keys=True) except BaseException as e: Print.info(str(e))
def scanDLC(id, showErr=True, dlcStatus=None): id = id.upper() title = Titles.get(id) baseDlc = Title.baseDlcId(id) for i in range(0x1FF): scanId = format(baseDlc + i, 'X').zfill(16) if Titles.contains(scanId): continue ver = CDNSP.get_version(scanId.lower()) if ver != None: t = Title() t.setId(scanId) Titles.set(scanId, t) Titles.save() Print.info('Found new DLC ' + str(title.name) + ' : ' + scanId) elif showErr: Print.info('nothing found at ' + scanId + ', ' + str(ver)) if dlcStatus: dlcStatus.add()
def downloadAll(wait=True): initTitles() initFiles() global activeDownloads global status i = 0 Print.info('Downloading All') try: for k, t in Titles.items(): i = i + 1 if not t.isActive(): continue if t.isUpdateAvailable(): if not t.id or t.id == '0' * 16 or ( t.isUpdate and t.lastestVersion() in [None]): Print.warning('no valid id? id: %s version: %s' % (str(t.id), str(t.lastestVersion()))) continue if t.lastestVersion() is None: Print.info('Could not get version for ' + str(t.name) + ' [' + str(t.id) + ']') continue Titles.queue.add(t.id) Print.info("%d titles scanned, downloading %d" % (i, Titles.queue.size())) Titles.save() status = Status.create(Titles.queue.size(), 'Total Download') startDownloadThreads() while wait and (not Titles.queue.empty() or sum(activeDownloads) > 0): time.sleep(1) except KeyboardInterrupt: pass except BaseException as e: Print.error(str(e)) if status: status.close()
def downloadAll(wait=True): nut.initTitles() nut.initFiles() global activeDownloads global status try: for k, t in Titles.items(): if t.isUpdateAvailable() and ( t.isDLC or t.isUpdate or Config.download.base) and ( not t.isDLC or Config.download.DLC) and ( not t.isDemo or Config.download.demo) and ( not t.isUpdate or Config.download.update) and ( t.key or Config.download.sansTitleKey ) and (len(Config.titleWhitelist) == 0 or t.id in Config.titleWhitelist ) and t.id not in Config.titleBlacklist: if not t.id or t.id == '0' * 16 or ( t.isUpdate and t.lastestVersion() in [None, '0']): #Print.warning('no valid id? ' + str(t.path)) continue if not t.lastestVersion(): Print.info('Could not get version for ' + str(t.name) + ' [' + str(t.id) + ']') continue Titles.queue.add(t.id) Titles.save() status = Status.create(Titles.queue.size(), 'Total Download') startDownloadThreads() while wait and (not Titles.queue.empty() or sum(activeDownloads) > 0): time.sleep(1) except KeyboardInterrupt: pass except BaseException as e: Print.error(str(e)) if status: status.close()
def scanBaseThread(baseStatus): while Config.isRunning: try: id = getRandomTitleId() if Titles.contains(id): continue ver = CDNSP.get_version(id.lower()) if ver != None: Print.info('Found new base ' + id) t = Title() t.setId(id) Titles.set(id, t) Titles.save() baseStatus.add() except BaseException as e: print('exception: ' + str(e))
def refreshRegions(): for region in Config.regionLanguages(): for language in Config.regionLanguages()[region]: for i in Titles.data(region, language): regionTitle = Titles.data(region, language)[i] if regionTitle.id: title = Titles.get(regionTitle.id, None, None) if not hasattr(title, 'regions') or not title.regions: title.regions = [] if not hasattr(title, 'languages') or not title.languages: title.languages = [] if not region in title.regions: title.regions.append(region) if not language in title.languages: title.languages.append(language) Titles.save()
def updateVersions(force=True): initTitles() initFiles() i = 0 for k, t in Titles.items(): if force or t.version is None: if (t.isDLC or t.isUpdate or Config.download.base) and (not t.isDLC or Config.download.DLC) and (not t.isDemo or Config.download.demo) and (not t.isUpdate or Config.download.update) and ( t.key or Config.download.sansTitleKey) and (len(Config.titleWhitelist) == 0 or t.id in Config.titleWhitelist) and t.id not in Config.titleBlacklist: v = t.lastestVersion(True) Print.info("%s[%s] v = %s" % (str(t.name), str(t.id), str(v))) i = i + 1 if i % 20 == 0: Titles.save() for t in list(Titles.data().values()): if not t.isUpdate and not t.isDLC and t.updateId and t.updateId and not Titles.contains(t.updateId): u = Title.Title() u.setId(t.updateId) if u.lastestVersion(): Titles.set(t.updateId, u) Print.info("%s[%s] FOUND" % (str(t.name), str(u.id))) i = i + 1 if i % 20 == 0: Titles.save() Titles.save()
def refresh(titleRightsOnly=False): nut.initTitles() nut.initFiles() i = 0 for k, f in Nsps.files.items(): try: if titleRightsOnly: title = Titles.get(f.titleId) if title and title.rightsId and (title.key or f.path.endswith('.nsx')): continue i = i + 1 print(f.path) f.open() f.readMeta() f.close() if i > 20: i = 0 Titles.save() except BaseException as e: print('exception: ' + str(e)) pass Titles.save()
def updateVersions(force=True): initTitles() initFiles() i = 0 for k, t in tqdm(Titles.items()): if force or t.version is None: if t.isActive(): v = t.lastestVersion(True) Print.info("%s[%s] v = %s" % (str(t.name), str(t.id), str(v))) for t in list(Titles.data().values()): if not t.isUpdate and not t.isDLC and t.updateId and t.updateId and not Titles.contains( t.updateId): u = Title.Title() u.setId(t.updateId) if u.lastestVersion(): Titles.set(t.updateId, u) Print.info("%s[%s] FOUND" % (str(t.name), str(u.id))) Titles.save()
def scrapeShogunThreaded(force = False, refresh = False): initTitles() initFiles() scrapeThreads = [] numThreads = 4 if Config.reverse: q = queue.LifoQueue() else: q = queue.Queue() for region in cdn.regions(): q.put(region) for i in range(numThreads): t = threading.Thread(target=scrapeShogunWorker, args=[q, force, refresh]) t.daemon = True t.start() scrapeThreads.append(t) Print.info('joining shogun queue') q.join() for i in range(numThreads): q.put(None) i = 0 for t in scrapeThreads: i += 1 t.join() Print.info('joined thread %d of %d' % (i, len(scrapeThreads))) Print.info('saving titles') Titles.save() Print.info('titles saved')
def scrapeShogunUnnamed(): initTitles() initFiles() result = {} for k, t in Titles.data().items(): if not t.isDLC: continue if not t.name and t.baseId != '0100069000078000': result[t.baseId] = True if not Config.dryRun: for id, j in tqdm(result.items()): try: for region, languages in Config.regionLanguages().items(): for language in languages: t = Titles.getTitleId(id, region, language) if t: s = cdn.Shogun.scrapeTitle(int(t.nsuId), region=region, language=language, force=False) #print(json.dumps(s.__dict__)) except: pass for region, languages in Config.regionLanguages().items(): for language in languages: Titles.saveRegion(region, language) Titles.save() else: print(result)
def getUpdateDb(request, response): for url in Config.titleUrls: nut.updateDb(url) Titles.loadTxtDatabases() Titles.save() return success(request, response, "Fin")
raise if args.decompress: for f in expandFiles(args.file): path = nut.NszDecompressor.decompress( str(f), Config.paths.nspOut) if path: i = Nsp(path) i.move() if args.update_titles: nut.initTitles() for url in Config.titleUrls: nut.updateDb(url) Titles.loadTxtDatabases() Titles.save() if args.update_check: nut.initTitles() nut.initFiles() for _, game in Nsps.files.items(): title = game.title() if title.isUpdate or title.isDLC: if game.isUpdateAvailable(): Print.info(title.getName()) Print.info(game.isUpdateAvailable()) exit(0) if args.submit_keys: nut.initTitles() nut.initFiles()
def scrapeTitles(region='US', shop_id=4): for language in countryLanguages(region): scrapeLangTitles(region, language, shop_id) Titles.save()