Beispiel #1
0
def download_file(url, fPath, titleId=None):
    fName = os.path.basename(fPath).split()[0]

    if os.path.exists(fPath):
        dlded = os.path.getsize(fPath)
        r = make_request('GET', url, hdArgs={'Range': 'bytes=%s-' % dlded})

        if r.headers.get('Server') != 'openresty/1.9.7.4':
            Print.info('Download is already complete, skipping!')
            return fPath
        elif r.headers.get(
                'Content-Range'
        ) == None:  # CDN doesn't return a range if request >= filesize
            fSize = int(r.headers.get('Content-Length'))
        else:
            fSize = dlded + int(r.headers.get('Content-Length'))

        if dlded == fSize:
            Print.info('Download is already complete, skipping!')
            return fPath
        elif dlded < fSize:
            Print.info('Resuming download...')
            f = open(fPath, 'ab')
        else:
            Print.error(
                'Existing file is bigger than expected (%s/%s), restarting download...'
                % (dlded, fSize))
            dlded = 0
            f = open(fPath, "wb")
    else:
        dlded = 0
        r = make_request('GET', url)
        fSize = int(r.headers.get('Content-Length'))
        f = open(fPath, 'wb')

    chunkSize = 0x100000

    if fSize >= 10000:
        s = Status.create(fSize, desc=fName, unit='B')
        s.id = titleId.upper()
        s.add(dlded)
        for chunk in r.iter_content(chunkSize):
            f.write(chunk)
            s.add(len(chunk))
            dlded += len(chunk)

            if not Config.isRunning:
                break
        s.close()
    else:
        f.write(r.content)
        dlded += len(r.content)

    if fSize != 0 and dlded != fSize:
        raise ValueError('Downloaded data is not as big as expected (%s/%s)!' %
                         (dlded, fSize))

    f.close()
    Print.debug('\r\nSaved to %s!' % f.name)
    return fPath
Beispiel #2
0
 def __init__(self, response, f, size=None):
     self.response = response
     self.f = f
     self.status = Status.create(size or f.size,
                                 'Downloading ' + os.path.basename(f.url))
     self.buffer = b''
     self.chunk_size = 0x100000 * 16
Beispiel #3
0
def download(url, f, titleId=None, name=None, checkSize=True):
	bytes = 0
	r = makeRequest('GET', url)

	if r.status_code == 404:
		Print.error('could not download: ' + str(url))
		return 0
	size = int(r.headers.get('Content-Length'))

	chunkSize = 0x100000

	if size >= 10000:
		s = Status.create(size, desc=name, unit='B')
		if titleId is not None:
			s.id = titleId.upper()

		for chunk in r.iter_content(chunkSize):
			f.write(chunk)
			s.add(len(chunk))
			bytes += len(chunk)

			if not Config.isRunning:
				break
		s.close()
	else:
		f.write(r.content)
		bytes += len(r.content)

	if checkSize and size != 0 and bytes != size:
		raise ValueError('Downloaded data is not as big as expected (%s/%s)!' % (bytes, size))

	return bytes
Beispiel #4
0
def scrape(delta):
	initTitles()
	initFiles()

	global scrapeQueue

	if Config.reverse:
		scrapeQueue = queue.LifoQueue()
	else:
		scrapeQueue = queue.Queue()

	for titleId in Titles.titles.keys():
		scrapeQueue.put(titleId)

	st = Status.create(scrapeQueue.qsize(), 'eShop meta scrape')
	threads = []
	for i in range(scrapeThreads):
		t = threading.Thread(target=scrapeThread, args=[st, delta])
		t.start()
		threads.append(t)

	for t in threads:
		t.join()

	Titles.save()
	st.close()
Beispiel #5
0
def _ftpsync(url):
    if Config.reverse:
        q = queue.LifoQueue()
    else:
        q = queue.Queue()

    fileList = []

    for f in Fs.driver.openDir(url).ls():
        if f.isFile():
            fileList.append(f.url)

    for path in fileList:
        try:
            if path.split('.')[-1].lower() not in ('nsx', 'nsz', 'nsp', 'xci'):
                continue

            unq = urllib.parse.unquote(path)
            nsp = Fs.factory(unq, unq, None)
            nsp.downloadPath = path

            if not nsp.titleId:
                continue

            title = Titles.get(nsp.titleId)

            if not title.isActive(skipKeyCheck=True):
                continue

            files = title.getFiles(path[-3:])
            files = [x for x in files if int(x.version) >= int(nsp.version)]

            if not len(files):
                if path[-3:] == 'nsx':
                    if len(Titles.get(nsp.titleId).getFiles('nsp')) or len(
                            Titles.get(nsp.titleId).getFiles('nsz')):
                        continue
                q.put(nsp)
        except BaseException as e:
            Print.error(str(e))
            # raise #TODO

    numThreads = Config.threads
    threads = []

    s = Status.create(q.qsize(), 'Total File Pulls')
    if numThreads > 0:
        Print.info('creating pull threads, items: ' + str(q.qsize()))

        for i in range(numThreads):
            t = threading.Thread(target=pullWorker, args=[q, s])
            t.daemon = True
            t.start()
            threads.append(t)

        for t in threads:
            t.join()
    else:
        pullWorker(q, s)
    s.close()
Beispiel #6
0
    def repack(self):
        Print.debug('\tRepacking to NSP...')

        hd = self.gen_header()

        totSize = len(hd) + sum(os.path.getsize(file) for file in self.files)
        if os.path.exists(self.path) and os.path.getsize(self.path) == totSize:
            Print.info('\t\tRepack %s is already complete!' % self.path)
            return

        t = Status.create(totSize, unit='B', desc=os.path.basename(self.path))

        Print.debug('\t\tWriting header...')
        outf = open(self.path, 'wb')
        outf.write(hd)
        t.update(len(hd))

        done = 0
        for file in self.files:
            Print.debug('\t\tAppending %s...' % os.path.basename(file))
            with open(file, 'rb') as inf:
                while True:
                    buf = inf.read(4096)
                    if not buf:
                        break
                    outf.write(buf)
                    t.update(len(buf))
        t.close()

        Print.debug('\t\tRepacked to %s!' % outf.name)
        outf.close()
Beispiel #7
0
def getQueue(request, response):
    r = Status.data().copy()
    q = Titles.queue.get().copy()
    i = Titles.queue.i
    while i < len(q):
        r.append({'id': q[i], 'i': 0, 'size': 0, 'elapsed': 0, 'speed': 0})
        i += 1
    response.write(json.dumps(r))
Beispiel #8
0
def scan(base):
    i = 0

    fileList = {}

    nspOut = os.path.abspath(Config.paths.nspOut)
    duplicatesFolder = os.path.abspath(Config.paths.duplicates)

    Print.info('scanning %s' % base)
    for root, _, _files in os.walk(base, topdown=False):
        for name in _files:
            if _is_file_hidden(name):
                continue
            suffix = pathlib.Path(name).suffix

            if suffix in ('.nsp', '.nsx', '.xci', '.nsz'):
                path = os.path.abspath(root + '/' + name)
                if not path.startswith(nspOut) and not path.startswith(
                        duplicatesFolder):
                    fileList[path] = name

    if len(fileList) == 0:
        save()
        return 0

    status = Status.create(len(fileList), desc='Scanning files...')

    try:
        for path, name in fileList.items():
            try:
                status.add(1)

                if path not in files:
                    Print.info('scanning ' + name)

                    nsp = Fs.Nsp(path, None)
                    nsp.timestamp = time.time()
                    nsp.getFileSize()  # cache file size

                    files[nsp.path] = nsp

                    i = i + 1
                    if i % 20 == 0:
                        save()
            except KeyboardInterrupt:
                status.close()
                raise
            except BaseException as e:  # pylint: disable=broad-except
                Print.info('An error occurred processing file: ' + str(e))

        save()
        status.close()
    except BaseException as e:  # pylint: disable=broad-except
        Print.info('An error occurred scanning files: ' + str(e))
    return i
Beispiel #9
0
def scrapeThread(id, delta=True):
    size = len(Titles.titles) // scrapeThreads
    st = Status.create(size, 'Thread ' + str(id))
    for i, titleId in enumerate(Titles.titles.keys()):
        try:
            if (i - id) % scrapeThreads == 0:
                Titles.get(titleId).scrape(delta)
                st.add()
        except BaseException as e:
            Print.error(str(e))
    st.close()
Beispiel #10
0
def startDlcScan(queue):
    dlcStatus = Status.create(queue.size() * 0x200, 'DLC Scan')
    #scanDLC(id)
    threads = []
    for i in range(scrapeThreads):
        t = threading.Thread(target=scanDLCThread, args=[queue, dlcStatus])
        t.start()
        threads.append(t)

    for t in threads:
        t.join()
    dlcStatus.close()
Beispiel #11
0
def scan(base, force=False):
    global hasScanned
    #if hasScanned and not force:
    #	return

    hasScanned = True
    i = 0

    fileList = {}

    Print.info(base)
    for root, dirs, _files in os.walk(base, topdown=False, followlinks=True):
        for name in _files:
            suffix = pathlib.Path(name).suffix

            if suffix == '.nsp' or suffix == '.nsx' or suffix == '.nsz':
                path = os.path.abspath(root + '/' + name)
                fileList[path] = name

    if len(fileList) == 0:
        save()
        return 0

    status = Status.create(len(fileList), desc='Scanning files...')

    try:
        for path, name in fileList.items():
            try:
                status.add(1)

                if not path in files:
                    Print.info('scanning ' + name)
                    nsp = Fs.Nsp(path, None)
                    nsp.getFileSize()

                    files[nsp.path] = nsp

                    i = i + 1
                    if i % 20 == 0:
                        save()
            except KeyboardInterrupt:
                status.close()
                raise
            except BaseException as e:
                Print.info('An error occurred processing file: ' + str(e))
                raise

        save()
        status.close()
    except BaseException as e:
        Print.info('An error occurred scanning files: ' + str(e))
        raise
    return i
Beispiel #12
0
def startBaseScan():
    baseStatus = Status.create(pow(2, 28), 'Base Scan')

    threads = []
    for i in range(scrapeThreads):
        t = threading.Thread(target=scanBaseThread, args=[baseStatus])
        t.start()
        threads.append(t)

    for t in threads:
        t.join()

    baseStatus.close()
Beispiel #13
0
def _ftpsync(url):
	if Config.reverse:
		q = queue.LifoQueue()
	else:
		q = queue.Queue()

	fileList = []

	for f in Fs.driver.openDir(url).ls():
		if f.isFile():
			fileList.append(f.url)

	for path in fileList:
		try:
			#print('checking ' + path)
			nsp = Fs.Nsp()
			nsp.setPath(urllib.parse.unquote(path))
			nsp.downloadPath = path

			if not nsp.titleId:
				continue

			if not Titles.contains(nsp.titleId) or (not len(Titles.get(nsp.titleId).getFiles(
					path[-3:])) and Titles.get(nsp.titleId).isActive(skipKeyCheck=True)):
				if path[-3:] == 'nsx':
					if len(Titles.get(nsp.titleId).getFiles('nsp')) or len(Titles.get(nsp.titleId).getFiles('nsz')):
						continue
				q.put(nsp)
		except BaseException as e:
			Print.error(str(e))
			# raise #TODO

	numThreads = Config.threads
	threads = []

	s = Status.create(q.qsize(), 'Total File Pulls')
	if numThreads > 0:
		Print.info('creating pull threads, items: ' + str(q.qsize()))

		for i in range(numThreads):
			t = threading.Thread(target=pullWorker, args=[q, s])
			t.daemon = True
			t.start()
			threads.append(t)

		for t in threads:
			t.join()
	else:
		pullWorker(q, s)
	s.close()
Beispiel #14
0
def calc_sha256(fPath):
    f = open(fPath, 'rb')
    fSize = os.path.getsize(fPath)
    hash = sha256()

    if fSize >= 10000:
        t = Status.create(fSize, unit='B', desc=os.path.basename(fPath))
        while True:
            buf = f.read(4096)
            if not buf:
                break
            hash.update(buf)
            t.update(len(buf))
        t.close()
    else:
        hash.update(f.read())
    f.close()
    return hash.hexdigest()
Beispiel #15
0
	def sha256(self):
		hash = hashlib.sha256()

		self.rewind()

		if self.size >= 10000:
			t = Status.create(self.size, unit='B', desc=os.path.basename(self._path))

			while True:
				buf = self.read(1 * 1024 * 1024, True)
				if not buf:
					break
				hash.update(buf)
				t.update(len(buf))
			t.close()
		else:
			hash.update(self.read(None, True))

		return hash.hexdigest()
Beispiel #16
0
def downloadAll(wait=True):
    nut.initTitles()
    nut.initFiles()

    global activeDownloads
    global status

    try:

        for k, t in Titles.items():
            if t.isUpdateAvailable() and (
                    t.isDLC or t.isUpdate or Config.download.base) and (
                        not t.isDLC or Config.download.DLC) and (
                            not t.isDemo or Config.download.demo) and (
                                not t.isUpdate or Config.download.update) and (
                                    t.key or Config.download.sansTitleKey
                                ) and (len(Config.titleWhitelist) == 0
                                       or t.id in Config.titleWhitelist
                                       ) and t.id not in Config.titleBlacklist:
                if not t.id or t.id == '0' * 16 or (
                        t.isUpdate and t.lastestVersion() in [None, '0']):
                    #Print.warning('no valid id? ' + str(t.path))
                    continue

                if not t.lastestVersion():
                    Print.info('Could not get version for ' + str(t.name) +
                               ' [' + str(t.id) + ']')
                    continue

                Titles.queue.add(t.id)
        Titles.save()
        status = Status.create(Titles.queue.size(), 'Total Download')
        startDownloadThreads()
        while wait and (not Titles.queue.empty() or sum(activeDownloads) > 0):
            time.sleep(1)
    except KeyboardInterrupt:
        pass
    except BaseException as e:
        Print.error(str(e))

    if status:
        status.close()
Beispiel #17
0
Datei: nut.py Projekt: blawar/nut
            if args.mtime_max:
                Config.download.mtime_max = args.mtime_max

            if args.reverse:
                Config.reverse = True
            else:
                Config.reverse = False

            if args.extract_version:
                Config.extractVersion = True

            if args.json:
                Config.jsonOutput = True

            Status.start()

            Print.info('                        ,;:;;,')
            Print.info('                       ;;;;;')
            Print.info('               .=\',    ;:;;:,')
            Print.info('              /_\', "=. \';:;:;')
            Print.info('              @=:__,  \,;:;:\'')
            Print.info('                _(\.=  ;:;;\'')
            Print.info('               `"_(  _/="`')
            Print.info('                `"\'')

            if args.dry:
                Config.dryRun = True

            Config.download.base = bool(args.base)
            Config.download.DLC = bool(args.dlc)
Beispiel #18
0
 def __init__(self, response, f):
     self.response = response
     self.f = f
     self.status = Status.create(f.size,
                                 'Downloading ' + os.path.basename(f.url))
Beispiel #19
0
def decompressAll():
	initTitles()
	initFiles()

	global activeDownloads
	global status

	i = 0
	Print.info('De-compressing All')

	if Config.reverse:
		q = queue.LifoQueue()
	else:
		q = queue.Queue()

	for k, t in Titles.items():
		try:
			i = i + 1
			if not t.isActive(skipKeyCheck=True):
				continue

			lastestNsz = t.getLatestNsz()

			if not lastestNsz:
				continue

			lastestNsp = t.getLatestNsp()

			if lastestNsp is not None and int(lastestNsp.version) >= int(lastestNsz.version):
				continue

			if Config.dryRun:
				Print.info('nsp ver = %x, nsz ver = %x, %s' % (getVer(lastestNsp), getVer(lastestNsz), t.getName()))

			if Config.download.fileSizeMax is not None and lastestNsz.getFileSize() > Config.download.fileSizeMax:
				continue

			if Config.download.fileSizeMin is not None and lastestNsz.getFileSize() < Config.download.fileSizeMin:
				continue

			q.put(lastestNsz.path)

		except BaseException as e:
			Print.info('DECOMPRESS ALL EXCEPTION: ' + str(e))

	numThreads = Config.threads
	threads = []

	s = Status.create(q.qsize(), desc="NSPs", unit='B')

	if numThreads > 0:
		Print.info('creating decompression threads ' + str(q.qsize()))

		for i in range(numThreads):
			t = threading.Thread(target=decompressWorker, args=[q, Config.paths.nspOut, s])
			t.daemon = True
			t.start()
			threads.append(t)

		for t in threads:
			t.join()
	else:
		decompressWorker(q, Config.paths.nspOut, s)

	s.close()
Beispiel #20
0
def compressAll(level=19):
	initTitles()
	initFiles()

	global activeDownloads
	global status

	i = 0
	Print.info('Compressing All')

	if Config.reverse:
		q = queue.LifoQueue()
	else:
		q = queue.Queue()

	for k, t in Titles.items():
		try:
			i = i + 1
			if not t.isActive(skipKeyCheck=True):
				continue

			lastestNsp = t.getLatestNsp()

			if not lastestNsp:
				continue

			if lastestNsp.titleId.endswith('000') and lastestNsp.version and int(lastestNsp.version) > 0:
				Print.info('Cannot compress sparse file: ' + str(lastestNsp.path))
				continue

			lastestNsz = t.getLatestNsz()

			if lastestNsz is not None and int(lastestNsz.version) >= int(lastestNsp.version):
				continue

			if Config.download.fileSizeMax is not None and lastestNsp.getFileSize() > Config.download.fileSizeMax:
				continue

			if Config.download.fileSizeMin is not None and lastestNsp.getFileSize() < Config.download.fileSizeMin:
				continue

			q.put(lastestNsp.path)

		except BaseException as e:
			Print.info('COMPRESS ALL EXCEPTION: ' + str(e))

	numThreads = Config.threads
	threads = []

	s = Status.create(q.qsize(), desc="NSPs", unit='B')

	if numThreads > 0:
		Print.info('creating compression threads ' + str(q.qsize()))

		for i in range(numThreads):
			t = threading.Thread(target=compressWorker, args=[q, level, Config.paths.nspOut, s])
			t.daemon = True
			t.start()
			threads.append(t)

		for t in threads:
			t.join()
	else:
		compressWorker(q, level, Config.paths.nspOut, s)

	s.close()
Beispiel #21
0
def compress(filePath, compressionLevel=19, outputDir=None):
	filePath = os.path.abspath(filePath)

	CHUNK_SZ = 0x1000000

	if outputDir is None:
		nszPath = filePath[0:-1] + 'z'
	else:
		nszPath = os.path.join(outputDir, os.path.basename(filePath[0:-1] + 'z'))

	nszPath = os.path.abspath(nszPath)

	Print.info('compressing (level %d) %s -> %s' % (compressionLevel, filePath, nszPath))

	if Config.dryRun:
		return None

	container = Fs.factory(filePath)

	container.open(filePath, 'rb')

	newNsp = Pfs0Stream(nszPath)

	for nspf in container:
		if isinstance(nspf, Fs.Nca) and ((nspf.header.contentType == Fs.Type.Content.PROGRAM or nspf.header.contentType == Fs.Type.Content.PUBLICDATA) or int(nspf.header.titleId, 16) <= 0x0100000000001000):
			if nspf.size > ncaHeaderSize * 2:
				cctx = zstandard.ZstdCompressor(level=compressionLevel)

				newFileName = nspf._path[0:-1] + 'z'

				f = newNsp.add(newFileName, nspf.size)

				start = f.tell()

				nspf.seek(0)
				h = nspf.read(ncaHeaderSize)
				#crypto = aes128.AESXTS(uhx(Keys.get('header_key')))
				#d = crypto.decrypt(h)

				# if d[0x200:0x204] == b'NCA3':
				#	d = d[0:0x200] + b'NCZ3' + d[0x204:]
				#	h = crypto.encrypt(d)
				# else:
				#	raise IOError('unknown NCA magic')

				# self.partition(0x0, 0xC00, self.header, Fs.Type.Crypto.XTS, uhx(Keys.get('header_key')))
				f.write(h)
				written = ncaHeaderSize

				compressor = cctx.stream_writer(f)

				sections = []
				sectionsTmp = []
				for fs in sortedFs(nspf):
					sectionsTmp += fs.getEncryptionSections()

				currentOffset = ncaHeaderSize
				for fs in sectionsTmp:
					if fs.offset < ncaHeaderSize:
						if fs.offset + fs.size < ncaHeaderSize:
							currentOffset = fs.offset + fs.size
							continue
						else:
							fs.size -= ncaHeaderSize - fs.offset
							fs.offset = ncaHeaderSize
					elif fs.offset > currentOffset:
						sections.append(BaseFs.EncryptedSection(currentOffset, fs.offset - currentOffset, Fs.Type.Crypto.NONE, None, None))
					elif fs.offset < currentOffset:
						raise IOError("misaligned nca partitions")

					sections.append(fs)
					currentOffset = fs.offset + fs.size

				header = b'NCZSECTN'
				header += len(sections).to_bytes(8, 'little')

				i = 0
				for fs in sections:
					i += 1
					header += fs.offset.to_bytes(8, 'little')
					header += fs.size.to_bytes(8, 'little')
					header += fs.cryptoType.to_bytes(8, 'little')
					header += b'\x00' * 8
					header += fs.cryptoKey
					header += fs.cryptoCounter

				f.write(header)
				written += len(header)

				bar = Status.create(nspf.size, desc=os.path.basename(nszPath), unit='B')

				decompressedBytes = ncaHeaderSize
				bar.add(ncaHeaderSize)

				for section in sections:
					#print('offset: %x\t\tsize: %x\t\ttype: %d\t\tiv%s' % (section.offset, section.size, section.cryptoType, str(hx(section.cryptoCounter))))
					o = nspf.partition(offset=section.offset, size=section.size, n=None, cryptoType=section.cryptoType,
									   cryptoKey=section.cryptoKey, cryptoCounter=bytearray(section.cryptoCounter), autoOpen=True)

					while not o.eof():
						buffer = o.read(CHUNK_SZ)

						if len(buffer) == 0:
							raise IOError('read failed')

						written += compressor.write(buffer)

						decompressedBytes += len(buffer)
						bar.add(len(buffer))

					o.close()

				compressor.flush(zstandard.FLUSH_FRAME)
				bar.close()

				Print.info('%d written vs %d tell' % (written, f.tell() - start))
				written = f.tell() - start
				Print.info('compressed %d%% %d -> %d  - %s' % (int(written * 100 / nspf.size), decompressedBytes, written, nspf._path))
				newNsp.resize(newFileName, written)

				continue

		f = newNsp.add(nspf._path, nspf.size)
		nspf.seek(0)
		while not nspf.eof():
			buffer = nspf.read(CHUNK_SZ)
			f.write(buffer)

	newNsp.close()
	return nszPath
Beispiel #22
0
        if args.hostname:
            args.server = True
            Config.server.hostname = args.hostname

        if args.port:
            args.server = True
            Config.server.port = int(args.port)

        if args.silent:
            Print.silent = True

        if args.json:
            Config.jsonOutput = True

        Status.start()

        Print.info('                        ,;:;;,')
        Print.info('                       ;;;;;')
        Print.info('               .=\',    ;:;;:,')
        Print.info('              /_\', "=. \';:;:;')
        Print.info('              @=:__,  \,;:;:\'')
        Print.info('                _(\.=  ;:;;\'')
        Print.info('               `"_(  _/="`')
        Print.info('                `"\'')

        if args.extract:
            nut.initTitles()
            for filePath in args.extract:
                #f = Fs.Nsp(filePath, 'rb')
                f = Fs.factory(filePath)
Beispiel #23
0
def debug(s):
    if not silent and enableDebug:
        Status.print_(s)
Beispiel #24
0
def warning(s):
    if not silent and enableWarning:
        Status.print_(s)
Beispiel #25
0
def error(s):
    if not silent and enableError:
        Status.print_(s)
Beispiel #26
0
def serveFile(response, path, filename=None, start=None, end=None):
    try:
        if start is not None:
            start = int(start)

        if end is not None:
            end = int(end)

        if not filename:
            filename = os.path.basename(path)

        response.attachFile(filename)

        chunkSize = 0x400000

        with open(path, "rb") as f:
            f.seek(0, 2)
            size = f.tell()
            if start and end:
                if end == None:
                    end = size - 1
                else:
                    end = int(end)

                if start == None:
                    start = size - end
                else:
                    start = int(start)

                if start >= size or start < 0 or end <= 0:
                    return Server.Response400(
                        request, response,
                        'Invalid range request %d - %d' % (start, end))

                response.setStatus(206)

            else:
                if start == None:
                    start = 0
                if end == None:
                    end = size

            if end >= size:
                end = size

                if end <= start:
                    response.write(b'')
                    return

            print('ranged request for %d - %d' % (start, end))
            f.seek(start, 0)

            response.setMime(path)
            response.setHeader('Accept-Ranges', 'bytes')
            response.setHeader('Content-Range',
                               'bytes %s-%s/%s' % (start, end - 1, size))
            response.setHeader('Content-Length', str(end - start))
            response.sendHeader()

            if not response.head:
                size = end - start

                i = 0
                status = Status.create(size,
                                       'Downloading ' + os.path.basename(path))

                while i < size:
                    chunk = f.read(min(size - i, chunkSize))
                    i += len(chunk)

                    status.add(len(chunk))

                    if chunk:
                        pass
                        response.write(chunk)
                    else:
                        break
                status.close()
    except BaseException as e:
        Print.error('File download exception: ' + str(e))
        traceback.print_exc(file=sys.stdout)

    if response.bytesSent == 0:
        response.write(b'')
Beispiel #27
0
def __decompressNcz(nspf, f, statusReportInfo):
    UNCOMPRESSABLE_HEADER_SIZE = 0x4000
    blockID = 0
    nspf.seek(0)
    header = nspf.read(UNCOMPRESSABLE_HEADER_SIZE)
    if f is not None:
        start = f.tell()

    magic = nspf.read(8)
    if not magic == b'NCZSECTN':
        raise ValueError("No NCZSECTN found! Is this really a .ncz file?")
    sectionCount = nspf.readInt64()
    sections = [Section(nspf) for _ in range(sectionCount)]
    if sections[0].offset - UNCOMPRESSABLE_HEADER_SIZE > 0:
        fakeSection = FakeSection(
            UNCOMPRESSABLE_HEADER_SIZE,
            sections[0].offset - UNCOMPRESSABLE_HEADER_SIZE)
        sections.insert(0, fakeSection)
    nca_size = UNCOMPRESSABLE_HEADER_SIZE
    for i in range(sectionCount):
        nca_size += sections[i].size

    decompressor = ZstdDecompressor().stream_reader(nspf)
    hash = sha256()

    bar = Status.create(nspf.size, desc=os.path.basename(nspf._path), unit='B')

    # if statusReportInfo == None:
    #	BAR_FMT = u'{desc}{desc_pad}{percentage:3.0f}%|{bar}| {count:{len_total}d}/{total:d} {unit} [{elapsed}<{eta}, {rate:.2f}{unit_pad}{unit}/s]'
    #	bar = enlighten.Counter(total=nca_size//1048576, desc='Decompress', unit="MiB", color='red', bar_format=BAR_FMT)
    decompressedBytes = len(header)
    if f is not None:
        f.write(header)
        bar.add(len(header))

    hash.update(header)

    firstSection = True
    for s in sections:
        i = s.offset
        useCrypto = s.cryptoType in (3, 4)
        if useCrypto:
            crypto = aes128.AESCTR(s.cryptoKey, s.cryptoCounter)
        end = s.offset + s.size
        if firstSection:
            firstSection = False
            uncompressedSize = UNCOMPRESSABLE_HEADER_SIZE - sections[0].offset
            if uncompressedSize > 0:
                i += uncompressedSize
        while i < end:
            if useCrypto:
                crypto.seek(i)
            chunkSz = 0x10000 if end - i > 0x10000 else end - i

            inputChunk = decompressor.read(chunkSz)
            decompressor.flush()

            if not len(inputChunk):
                break
            if useCrypto:
                inputChunk = crypto.encrypt(inputChunk)
            if f is not None:
                f.write(inputChunk)
                bar.add(len(inputChunk))
            hash.update(inputChunk)
            lenInputChunk = len(inputChunk)
            i += lenInputChunk
            decompressedBytes += lenInputChunk
            bar.add(lenInputChunk)

    bar.close()
    print()

    hexHash = hash.hexdigest()
    if f is not None:
        end = f.tell()
        written = (end - start)
        return (written, hexHash)
    return (0, hexHash)
Beispiel #28
0
def getDownload(request, response, start=None, end=None):
    try:
        nsp = Nsps.getByTitleId(request.bits[2])
        response.attachFile(nsp.titleId + '.nsp')

        if len(request.bits) >= 5:
            start = int(request.bits[-2])
            end = int(request.bits[-1])

        #chunkSize = 0x1000000
        chunkSize = 0x400000

        with open(nsp.path, "rb") as f:
            f.seek(0, 2)
            size = f.tell()
            if 'Range' in request.headers:
                start, end = request.headers.get('Range').strip().strip(
                    'bytes=').split('-')

                if end == '':
                    end = size - 1
                else:
                    end = int(end) + 1

                if start == '':
                    start = size - end
                else:
                    start = int(start)

                if start >= size or start < 0 or end <= 0:
                    return Server.Response400(
                        request, response,
                        'Invalid range request %d - %d' % (start, end))

                response.setStatus(206)

            else:
                if start == None:
                    start = 0
                if end == None:
                    end = size

            if end >= size:
                end = size

                if end <= start:
                    response.write(b'')
                    return

            print('ranged request for %d - %d' % (start, end))
            f.seek(start, 0)

            response.setMime(nsp.path)
            response.setHeader('Accept-Ranges', 'bytes')
            response.setHeader('Content-Range',
                               'bytes %s-%s/%s' % (start, end - 1, size))
            response.setHeader('Content-Length', str(end - start))
            #Print.info(response.headers['Content-Range'])
            response.sendHeader()

            if not response.head:
                size = end - start

                i = 0
                status = Status.create(
                    size, 'Downloading ' + os.path.basename(nsp.path))

                while i < size:
                    chunk = f.read(min(size - i, chunkSize))
                    i += len(chunk)

                    status.add(len(chunk))

                    if chunk:
                        pass
                        response.write(chunk)
                    else:
                        break
                status.close()
    except BaseException as e:
        Print.error('NSP download exception: ' + str(e))
        traceback.print_exc(file=sys.stdout)
    if response.bytesSent == 0:
        response.write(b'')
Beispiel #29
0
def info(s):
    if not silent and enableInfo:
        Status.print_(s)