def readlines_brotli(file): with open(file, 'rb') as infile: dec = brotli.Decompressor() inbuf = io.BytesIO() line_extra_from_previous = 0 while True: raw_rem: bytes = infile.read(4096) if not raw_rem: break rem = dec.process(raw_rem) previous_ind = 0 newline_ind = rem.find(b'\n') while newline_ind != -1: inbuf.write(rem[previous_ind:newline_ind]) inbuf.seek(0) available_bytes = line_extra_from_previous + newline_ind - previous_ind line_bytes = inbuf.read(available_bytes) line = line_bytes.decode('utf-8') line_extra_from_previous = 0 inbuf.seek(0) yield line previous_ind = newline_ind + 1 newline_ind = rem.find(b'\n', newline_ind + 1) line_extra_from_previous = len(rem) - previous_ind inbuf.seek(0) inbuf.write(rem[previous_ind:])
def transfer(file, dat_fn, img_fn): '''Transfer data.''' br_compressed = True if dat_fn.endswith('.br') else False dec = brotli.Decompressor() if br_compressed else None dat_f = open(dat_fn, 'rb') img_f = open(img_fn, 'wb') cookie = [None] for line in file.readlines(): logging.info(line) tokens = line.split() if len(tokens) != 2: logging.error('Invalid line: ' + line) return False cmd = tokens[0] ranges = parse_ranges(tokens[1]) if cmd == 'erase': cmd_erase(img_f, ranges) elif cmd == 'zero': cmd_zero(img_f, ranges) elif cmd == 'new': cmd_new(img_f, ranges, dat_f, dec, cookie) else: logging.error('Invalid cmd: ' + cmd) return False return True
def test_streaming_decompression_fails_properly_on_garbage(exception_cls): """ Garbage data properly fails decompression. """ o = brotli.Decompressor() with pytest.raises(exception_cls): o.decompress(b'some random garbage')
def __init__(self) -> None: assert ( brotli is not None ), "The 'brotlipy' or 'brotli' library must be installed to use 'BrotliDecoder'" self.decompressor = brotli.Decompressor() self.seen_data = False if hasattr(self.decompressor, "decompress"): self._decompress = self.decompressor.decompress else: self._decompress = self.decompressor.process # pragma: nocover
def __init__(self): self._obj = brotli.Decompressor() if hasattr(self._obj, "decompress"): self.decompress = self._obj.decompress else: self.decompress = self._obj.process
def main(self): rkpihdr = self._decode_rkpi_header(self.args.file_in.read(10)) if rkpihdr is None: print("Reekpie header not found or incomplete.") exit() # dump information to the TTY for describing the # aspects of the PCM data (as this is required # for parsing). NOTE: Put that into STDERR to avoid # it being piped with a program or a file by default. stderr.write(( "Sampleformat: {sample_format}\n" "Compression: {compression}\n" f"Bytedepth: {rkpihdr['bytedepth']}\n" f"Samplerate: {rkpihdr['samplerate']}\n" "Channellayout: {channel_layout}\n" "Endianness: {endianness}\n" f"Channels: {rkpihdr['channels']}\n").format_map({ 'channel_layout': ('interleaved', 'planar') \ [rkpihdr['channellayout']], 'endianness': ('little', 'big') \ [rkpihdr['endianness']], 'sample_format': ('unknown', 'unsigned', 'signed', 'float', 'adpcm', 'mu-law', 'a-law') \ [rkpihdr['sampleformat']], 'compression': ('none', 'zstd', 'brotli', 'lzma') \ [rkpihdr['compression']] })) if self.args.do_decompress or rkpihdr['compression'] == 0b00: if copyfile(self.args.file_in, self.args.file_out) == 0: stderr.write("Can’t do IO.") else: data_copied = 0 # how many bytes were copied during decompression. # Zstandard decompressor. if rkpihdr['compression'] == 0b01: data_copied = copyfile( zstandard.ZstdDecompressor().stream_reader( self.args.file_in), self.args.file_out) # Brotli decompressor. elif rkpihdr['compression'] == 0b10: data_copied = copyfilemap(self.args.file_in, self.args.file_out, brotli.Decompressor().decompress) # LZMA decompressor. elif rkpihdr['compression'] == 0b11: data_copied = copyfile(lzma.LZMAFile(self.args.file_in), self.args.file_out) if data_copied == 0: stderr.write("Decompressor failed or can’t do IO.")
def __init__(self, properties: bytes, block_size: int): if len(properties) != 3: raise UnsupportedCompressionMethodError( properties, "Unknown size of properties are passed") if (properties[0], properties[1]) > (brotli_major, brotli_minor): raise UnsupportedCompressionMethodError( properties, "Unsupported brotli version: {}.{} our {}.{}".format( properties[0], properties[1], brotli_major, brotli_minor), ) self._prefix_checked = False self._decompressor = brotli.Decompressor()
def test_decompressobj(simple_compressed_file): with open(simple_compressed_file[0], 'rb') as f: uncompressed_data = f.read() with open(simple_compressed_file[1], 'rb') as f: compressed_data = f.read() o = brotli.Decompressor() data = o.decompress(compressed_data) data += o.flush() assert data == uncompressed_data
def __init__(self) -> None: if brotli is None: # pragma: nocover raise ImportError( "Using 'BrotliDecoder', but the 'brotlipy' or 'brotli' library " "is not installed." "Make sure to install httpx using `pip install httpx[brotli]`." ) from None self.decompressor = brotli.Decompressor() self.seen_data = False if hasattr(self.decompressor, "decompress"): self._decompress = self.decompressor.decompress else: self._decompress = self.decompressor.process # pragma: nocover
def __init__(self, out: StreamReader, encoding: Optional[str]) -> None: self.out = out self.size = 0 self.encoding = encoding self._started_decoding = False if encoding == "br": if not HAS_BROTLI: # pragma: no cover raise ContentEncodingError( "Can not decode content-encoding: brotli (br). " "Please install `brotlipy`") self.decompressor = brotli.Decompressor() else: zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS self.decompressor = zlib.decompressobj(wbits=zlib_mode)
def __init__(self, out, encoding): self.out = out self.size = 0 self.encoding = encoding self._started_decoding = False if encoding == 'br': if not HAS_BROTLI: # pragma: no cover raise ContentEncodingError( 'Can not decode content-encoding: brotli (br). ' 'Please install `brotlipy`') self.decompressor = brotli.Decompressor() else: zlib_mode = (16 + zlib.MAX_WBITS if encoding == 'gzip' else -zlib.MAX_WBITS) self.decompressor = zlib.decompressobj(wbits=zlib_mode)
def test_brotli_storage(self): payload = ','.join([str(i) for i in range(1000)]).encode() chunk_size = 1024 storage.brotli_storage.save('test.txt', ContentFile(payload)) self.assertTrue( os.path.exists(os.path.join(settings.COMPRESS_ROOT, 'test.txt'))) self.assertTrue( os.path.exists(os.path.join(settings.COMPRESS_ROOT, 'test.txt.br'))) decompressed_data = b'' br_decompressor = brotli.Decompressor() with open(os.path.join(settings.COMPRESS_ROOT, 'test.txt.br'), 'rb') as f: for data in iter(lambda: f.read(chunk_size), b''): decompressed_data += br_decompressor.process(data) self.assertEqual(payload, decompressed_data)
def download(url, dst='.', name=None, decompress=False): if not name: name = url.split('/')[-1] dst = Path(dst) / name res = requests.get(url, stream=True) decompresser = None if decompress: decompresser = brotli.Decompressor() with open(dst, 'wb') as fp: for chunk in res.iter_content(chunk_size=8192): if decompress: chunk = decompresser.decompress(chunk) fp.write(chunk) if decompress: decompresser.finish() return dst
def test_drip_feed(simple_compressed_file): """ Sending in the data one byte at a time still works. """ with open(simple_compressed_file[0], 'rb') as f: uncompressed_data = f.read() with open(simple_compressed_file[1], 'rb') as f: compressed_data = f.read() outdata = [] o = brotli.Decompressor() for i in range(0, len(compressed_data)): outdata.append(o.decompress(compressed_data[i:i+1])) outdata.append(o.flush()) assert b''.join(outdata) == uncompressed_data
def __init__(self, out, encoding): self.out = out self.size = 0 self.encoding = encoding self.started = False if encoding == 'br': if brotli is None: raise ContentEncodingError( 'Can not decode content-encoding: brotli (br). Please install `brotlipy' ) self.decompressor = brotli.Decompressor() else: if encoding == 'gzip': zlib_mode = 16 + zlib.MAX_WBITS else: zlib_mode = -zlib.MAX_WBITS self.decompressor = zlib.decompressobj(wbits=zlib_mode)
def setUp(self): self.decompressor = brotli.Decompressor()
def __init__ (self, fd, readchunk=100*1024): self.fd = fd self.readchunk = readchunk self.decompressor = brotli.Decompressor () self.buf = b''
def __init__(self) -> None: assert ( brotli is not None ), "The 'brotlipy' library must be installed to use 'BrotliDecoder'" self.decompressor = brotli.Decompressor()
def test_compressed_data_with_dictionaries(s, dictionary): d = brotli.Decompressor(dictionary) compressed = brotli.compress(s, dictionary=dictionary) uncompressed = d.decompress(compressed) assert uncompressed == s
def brotli_decompressor(): decomp = brotli.Decompressor() decomp.unused_data = None return decomp
def __init__(self): _obj = brotli.Decompressor() if hasattr(_obj, "decompress"): decompress = _obj.decompress else: decompress = _obj.process
def __init__(self, code, reason, headers, sock, connection=None, request_method=None): #: The reason phrase returned by the server. self.reason = reason #: The status code returned by the server. self.status = code #: The response headers. These are determined upon creation, assigned #: once, and never assigned again. self.headers = headers #: The response trailers. These are always intially ``None``. self.trailers = None # The socket this response is being sent over. self._sock = sock # Whether we expect the connection to be closed. If we do, we don't # bother checking for content-length, we just keep reading until # we no longer can. self._expect_close = False if b'close' in self.headers.get(b'connection', []): self._expect_close = True # The expected length of the body. if request_method != b'HEAD': try: self._length = int(self.headers[b'content-length'][0]) except KeyError: self._length = None else: self._length = 0 # Whether we expect a chunked response. self._chunked = (b'chunked' in self.headers.get(b'transfer-encoding', [])) # When content-length is absent and response is not chunked, # body length is determined by connection closure. # https://tools.ietf.org/html/rfc7230#section-3.3.3 if self._length is None and not self._chunked: # 200 response to a CONNECT request means that proxy has connected # to the target host and it will start forwarding everything sent # from the either side. Thus we must not try to read body of this # response. Socket of current connection will be taken over by # the code that has sent a CONNECT request. if not (request_method is not None and b'CONNECT' == request_method.upper() and code == 200): self._expect_close = True # This object is used for decompressing gzipped request bodies. Right # now we only support gzip because that's all the RFC mandates of us. # Later we'll add support for more encodings. # This 16 + MAX_WBITS nonsense is to force gzip. See this # Stack Overflow answer for more: # http://stackoverflow.com/a/2695466/1401686 if b'gzip' in self.headers.get(b'content-encoding', []): self._decompressobj = zlib.decompressobj(16 + zlib.MAX_WBITS) elif b'br' in self.headers.get(b'content-encoding', []): self._decompressobj = brotli.Decompressor() elif b'deflate' in self.headers.get(b'content-encoding', []): self._decompressobj = DeflateDecoder() else: self._decompressobj = None # This is a reference that allows for the Response class to tell the # parent connection object to throw away its socket object. This is to # be used when the connection is genuinely closed, so that the user # can keep using the Connection object. # Strictly, we take a weakreference to this so that we don't set up a # reference cycle. if connection is not None: self._parent = weakref.ref(connection) else: self._parent = None self._buffered_data = b'' self._chunker = None
print('This script expects to find a file "rom.zip" in the cwd') print('This file requires the following modules from pypi') print('brotlipy') import brotli print('zipfile') import zipfile #uncompress zip with zipfile.ZipFile('rom.zip', 'r') as zip: zip.extractall('rom') if os.path.isfile('rom/system.new.dat.br'): # data=open('rom/system.new.dat.br', 'rb').read() # data = brotli.decompress(data) d = brotli.Decompressor() f = open('rom/system.new.dat.br', 'rb') f2 = open('rom/system.new.dat', 'wb') dat = f.read(128) #128mb while len(dat): f2.write(d.decompress(dat)) dat = f.read(128) d.finish() # print(data) # with open('rom/system.new.dat', 'wb') as f: # f.write(data) print('decompressed brotli') import sdat2img
def __init__(self): self._obj = brotli.Decompressor()
async def decode(): decoder = brotli.Decompressor() async for chunk in reader(): yield decoder.process(chunk) if not decoder.is_finished(): raise I.ProtocolValidationException("brotli compressed data incomplete")