def test_send_message_deflate_frame_bfinal(self):
        extension = common.ExtensionParameter(common.DEFLATE_FRAME_EXTENSION)
        request = _create_request_from_rawdata(
            '', deflate_frame_request=extension)
        self.assertEquals(1, len(request.ws_extension_processors))
        deflate_frame_processor = request.ws_extension_processors[0]
        deflate_frame_processor.set_bfinal(True)
        msgutil.send_message(request, 'Hello')
        msgutil.send_message(request, 'World')

        expected = ''

        compress = zlib.compressobj(
            zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS)
        compressed_hello = compress.compress('Hello')
        compressed_hello += compress.flush(zlib.Z_FINISH)
        compressed_hello = compressed_hello + chr(0)
        expected += '\xc1%c' % len(compressed_hello)
        expected += compressed_hello

        compress = zlib.compressobj(
            zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS)
        compressed_world = compress.compress('World')
        compressed_world += compress.flush(zlib.Z_FINISH)
        compressed_world = compressed_world + chr(0)
        expected += '\xc1%c' % len(compressed_world)
        expected += compressed_world

        self.assertEqual(expected, request.connection.written_data())
Example #2
0
    def test_receive_message_deflate_stream(self):
        compress = zlib.compressobj(
            zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS)

        data = compress.compress('\x81\x85' + _mask_hybi('Hello'))
        data += compress.flush(zlib.Z_SYNC_FLUSH)
        data += compress.compress('\x81\x89' + _mask_hybi('WebSocket'))
        data += compress.flush(zlib.Z_FINISH)

        compress = zlib.compressobj(
            zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS)

        data += compress.compress('\x81\x85' + _mask_hybi('World'))
        data += compress.flush(zlib.Z_SYNC_FLUSH)
        # Close frame
        data += compress.compress(
            '\x88\x8a' + _mask_hybi(struct.pack('!H', 1000) + 'Good bye'))
        data += compress.flush(zlib.Z_SYNC_FLUSH)

        request = _create_request_from_rawdata(data, deflate_stream=True)
        self.assertEqual('Hello', msgutil.receive_message(request))
        self.assertEqual('WebSocket', msgutil.receive_message(request))
        self.assertEqual('World', msgutil.receive_message(request))

        self.assertFalse(request.drain_received_data_called)

        self.assertEqual(None, msgutil.receive_message(request))

        self.assertTrue(request.drain_received_data_called)
    def test_send_message_permessage_compress_deflate_fragmented_bfinal(self):
        extension = common.ExtensionParameter(
            common.PERMESSAGE_COMPRESSION_EXTENSION)
        extension.add_parameter('method', 'deflate')
        request = _create_request_from_rawdata(
                      '', permessage_compression_request=extension)
        self.assertEquals(1, len(request.ws_extension_processors))
        compression_processor = (
            request.ws_extension_processors[0].get_compression_processor())
        compression_processor.set_bfinal(True)
        msgutil.send_message(request, 'Hello', end=False)
        msgutil.send_message(request, 'World', end=True)

        expected = ''

        compress = zlib.compressobj(
            zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS)
        compressed_hello = compress.compress('Hello')
        compressed_hello += compress.flush(zlib.Z_FINISH)
        compressed_hello = compressed_hello + chr(0)
        expected += '\x41%c' % len(compressed_hello)
        expected += compressed_hello

        compress = zlib.compressobj(
            zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS)
        compressed_world = compress.compress('World')
        compressed_world += compress.flush(zlib.Z_FINISH)
        compressed_world = compressed_world + chr(0)
        expected += '\x80%c' % len(compressed_world)
        expected += compressed_world

        self.assertEqual(expected, request.connection.written_data())
Example #4
0
    def setUp(self):
        super(ZlibTest, self).setUp()
        self.text = b"""
Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Maecenas porttitor congue massa. Fusce posuere, magna sed pulvinar ultricies, purus lectus malesuada libero, sit amet commodo magna eros quis urna.
Nunc viverra imperdiet enim. Fusce est. Vivamus a tellus.
Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Proin pharetra nonummy pede. Mauris et orci.
Aenean nec lorem. In porttitor. Donec laoreet nonummy augue.
Suspendisse dui purus, scelerisque at, vulputate vitae, pretium mattis, nunc. Mauris eget neque at sem venenatis eleifend. Ut nonummy.
Fusce aliquet pede non pede. Suspendisse dapibus lorem pellentesque magna. Integer nulla.
Donec blandit feugiat ligula. Donec hendrerit, felis et imperdiet euismod, purus ipsum pretium metus, in lacinia nulla nisl eget sapien. Donec ut est in lectus consequat consequat.
Etiam eget dui. Aliquam erat volutpat. Sed at lorem in nunc porta tristique.
Proin nec augue. Quisque aliquam tempor magna. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas.
Nunc ac magna. Maecenas odio dolor, vulputate vel, auctor ac, accumsan id, felis. Pellentesque cursus sagittis felis.
Pellentesque porttitor, velit lacinia egestas auctor, diam eros tempus arcu, nec vulputate augue magna vel risus. Cras non magna vel ante adipiscing rhoncus. Vivamus a mi.
Morbi neque. Aliquam erat volutpat. Integer ultrices lobortis eros.
Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Proin semper, ante vitae sollicitudin posuere, metus quam iaculis nibh, vitae scelerisque nunc massa eget pede. Sed velit urna, interdum vel, ultricies vel, faucibus at, quam.
Donec elit est, consectetuer eget, consequat quis, tempus quis, wisi. In in nunc. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos hymenaeos.
Donec ullamcorper fringilla eros. Fusce in sapien eu purus dapibus commodo. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus.
Cras faucibus condimentum odio. Sed ac ligula. Aliquam at eros.
Etiam at ligula et tellus ullamcorper ultrices. In fermentum, lorem non cursus porttitor, diam urna accumsan lacus, sed interdum wisi nibh nec nisl. Ut tincidunt volutpat urna.
Mauris eleifend nulla eget mauris. Sed cursus quam id felis. Curabitur posuere quam vel nibh.
Cras dapibus dapibus nisl. Vestibulum quis dolor a felis congue vehicula. Maecenas pede purus, tristique ac, tempus eget, egestas quis, mauris.
Curabitur non eros. Nullam hendrerit bibendum justo. Fusce iaculis, est quis lacinia pretium, pede metus molestie lacus, at gravida wisi ante at libero.
"""
        deflate_compress = zlib.compressobj(9, zlib.DEFLATED, -zlib.MAX_WBITS)
        zlib_compress = zlib.compressobj(9, zlib.DEFLATED, zlib.MAX_WBITS)
        self.deflate_data = deflate_compress.compress(self.text) + deflate_compress.flush()
        self.zlib_data = zlib_compress.compress(self.text) + zlib_compress.flush()
        self.gzip_data = create_gzip(self.text)
Example #5
0
    def test_receive_message_deflate_frame_comp_bit(self):
        compress = zlib.compressobj(
            zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS)

        data = ''

        compressed_hello = compress.compress('Hello')
        compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH)
        compressed_hello = compressed_hello[:-4]
        data += '\xc1%c' % (len(compressed_hello) | 0x80)
        data += _mask_hybi(compressed_hello)

        data += '\x81\x85' + _mask_hybi('Hello')

        compress = zlib.compressobj(
            zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS)

        compressed_2nd_hello = compress.compress('Hello')
        compressed_2nd_hello += compress.flush(zlib.Z_SYNC_FLUSH)
        compressed_2nd_hello = compressed_2nd_hello[:-4]
        data += '\xc1%c' % (len(compressed_2nd_hello) | 0x80)
        data += _mask_hybi(compressed_2nd_hello)

        extension = common.ExtensionParameter(common.DEFLATE_FRAME_EXTENSION)
        request = _create_request_from_rawdata(
            data, deflate_frame_request=extension)
        for i in xrange(3):
            self.assertEqual('Hello', msgutil.receive_message(request))
Example #6
0
    async def _send_frame(self, message: bytes, opcode: int,
                          compress: Optional[int]=None) -> None:
        """Send a frame over the websocket with message as its payload."""
        if self._closing:
            ws_logger.warning('websocket connection is closing.')

        rsv = 0

        # Only compress larger packets (disabled)
        # Does small packet needs to be compressed?
        # if self.compress and opcode < 8 and len(message) > 124:
        if (compress or self.compress) and opcode < 8:
            if compress:
                # Do not set self._compress if compressing is for this frame
                compressobj = zlib.compressobj(wbits=-compress)
            else:  # self.compress
                if not self._compressobj:
                    self._compressobj = zlib.compressobj(wbits=-self.compress)
                compressobj = self._compressobj

            message = compressobj.compress(message)
            message = message + compressobj.flush(
                zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH)
            if message.endswith(_WS_DEFLATE_TRAILING):
                message = message[:-4]
            rsv = rsv | 0x40

        msg_length = len(message)

        use_mask = self.use_mask
        if use_mask:
            mask_bit = 0x80
        else:
            mask_bit = 0

        if msg_length < 126:
            header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit)
        elif msg_length < (1 << 16):
            header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length)
        else:
            header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length)
        if use_mask:
            mask = self.randrange(0, 0xffffffff)
            mask = mask.to_bytes(4, 'big')
            message = bytearray(message)
            _websocket_mask(mask, message)
            self.transport.write(header + mask + message)
            self._output_size += len(header) + len(mask) + len(message)
        else:
            if len(message) > MSG_SIZE:
                self.transport.write(header)
                self.transport.write(message)
            else:
                self.transport.write(header + message)

            self._output_size += len(header) + len(message)

        if self._output_size > self._limit:
            self._output_size = 0
            await self.protocol._drain_helper()
Example #7
0
    def __init__(self, level = 6):
        """
Constructor __init__(GzipCompressor)

:since: v1.0.0
        """

        self.compressor = None
        """
Deflate compressor instance
        """
        self.crc32 = None
        """
CRC32 from previous run
        """
        self.header = None
        """
Gzip header
        """
        self.size = None
        """
Total size of compressed data
        """

        # Use the zlib magic +16 to generate the GZip header and trailer on flush() if supported
        try: self.compressor = compressobj(level, wbits = 16 + MAX_WBITS)
        except TypeError:
            self.compressor = compressobj(level)

            if (level == 9): deflate_flag = 2
            elif (level == 1): deflate_flag = 4
            else: deflate_flag = 0

            self.header = pack("<8s2B", Binary.bytes("\x1f\x8b" + ("\x00" if (level == 0) else "\x08") + "\x00\x00\x00\x00\x00"), deflate_flag, 255)
            self.size = 0
Example #8
0
    def test_receive_message_permessage_deflate_compression(self):
        compress = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS)

        data = ""

        compressed_hello = compress.compress("HelloWebSocket")
        compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH)
        compressed_hello = compressed_hello[:-4]
        split_position = len(compressed_hello) / 2
        data += "\x41%c" % (split_position | 0x80)
        data += _mask_hybi(compressed_hello[:split_position])

        data += "\x80%c" % ((len(compressed_hello) - split_position) | 0x80)
        data += _mask_hybi(compressed_hello[split_position:])

        compress = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS)

        compressed_world = compress.compress("World")
        compressed_world += compress.flush(zlib.Z_SYNC_FLUSH)
        compressed_world = compressed_world[:-4]
        data += "\xc1%c" % (len(compressed_world) | 0x80)
        data += _mask_hybi(compressed_world)

        # Close frame
        data += "\x88\x8a" + _mask_hybi(struct.pack("!H", 1000) + "Good bye")

        extension = common.ExtensionParameter(common.PERMESSAGE_COMPRESSION_EXTENSION)
        extension.add_parameter("method", "deflate")
        request = _create_request_from_rawdata(data, permessage_compression_request=extension)
        self.assertEqual("HelloWebSocket", msgutil.receive_message(request))
        self.assertEqual("World", msgutil.receive_message(request))

        self.assertEqual(None, msgutil.receive_message(request))
Example #9
0
    def test_receive_message_deflate_frame(self):
        compress = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS)

        data = ""

        compressed_hello = compress.compress("Hello")
        compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH)
        compressed_hello = compressed_hello[:-4]
        data += "\xc1%c" % (len(compressed_hello) | 0x80)
        data += _mask_hybi(compressed_hello)

        compressed_websocket = compress.compress("WebSocket")
        compressed_websocket += compress.flush(zlib.Z_FINISH)
        compressed_websocket += "\x00"
        data += "\xc1%c" % (len(compressed_websocket) | 0x80)
        data += _mask_hybi(compressed_websocket)

        compress = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS)

        compressed_world = compress.compress("World")
        compressed_world += compress.flush(zlib.Z_SYNC_FLUSH)
        compressed_world = compressed_world[:-4]
        data += "\xc1%c" % (len(compressed_world) | 0x80)
        data += _mask_hybi(compressed_world)

        # Close frame
        data += "\x88\x8a" + _mask_hybi(struct.pack("!H", 1000) + "Good bye")

        extension = common.ExtensionParameter(common.DEFLATE_FRAME_EXTENSION)
        request = _create_request_from_rawdata(data, deflate_frame_request=extension)
        self.assertEqual("Hello", msgutil.receive_message(request))
        self.assertEqual("WebSocket", msgutil.receive_message(request))
        self.assertEqual("World", msgutil.receive_message(request))

        self.assertEqual(None, msgutil.receive_message(request))
Example #10
0
def create_CompressedDataBody(alg, d):
    """Create a CompressedDataBody instance.

    :Parameters:
        - `alg`: integer compressed algorithm constant
        - `d`: string data to compress

    :Returns: `CompressedDataBody` instance
    """
    if COMP_ZIP == alg: # ..from zipfile.py source
        cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15)
        data = cmpr.compress(d)
        ex = cmpr.flush()
        if ex:
            data = data + ex
    elif COMP_ZLIB == alg:
        cmpr = zlib.compressobj()
        data = cmpr.compress(d)
        ex = cmpr.flush()
        if ex:
            data = data + ex
    elif COMP_UNCOMPRESSED == alg:
        data = d
    else:
        raise NotImplementedError("Unsupported compression algorithm->(%s)" % alg)
    return CompressedDataBody(''.join([chr(alg), data]))
 def start_compress_message(self):
     # compressobj([level[, method[, wbits[, mem_level[, strategy]]]]])
     # http://bugs.python.org/issue19278
     # http://hg.python.org/cpython/rev/c54c8e71b79a
     if self._is_server:
         if self._compressor is None or self.server_no_context_takeover:
             self._compressor = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -self.server_max_window_bits, self.mem_level)
     else:
         if self._compressor is None or self.client_no_context_takeover:
             self._compressor = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -self.client_max_window_bits, self.mem_level)
Example #12
0
 def _initialize_compressor(self):
   if self._compression_type == CompressionTypes.BZIP2:
     self._compressor = bz2.BZ2Compressor()
   elif self._compression_type == CompressionTypes.DEFLATE:
     self._compressor = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
                                         zlib.DEFLATED)
   else:
     assert self._compression_type == CompressionTypes.GZIP
     self._compressor = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
                                         zlib.DEFLATED, self._gzip_mask)
 def startCompressMessage(self):
     if self._isServer:
         if self._compressor is None or self.s2c_no_context_takeover:
             self._compressor = zlib.compressobj(
                 zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -self.s2c_max_window_bits
             )
     else:
         if self._compressor is None or self.c2s_no_context_takeover:
             self._compressor = zlib.compressobj(
                 zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -self.c2s_max_window_bits
             )
Example #14
0
    def test_multi_decoding_gzip_gzip(self):
        compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
        data = compress.compress(b'foo')
        data += compress.flush()

        compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
        data = compress.compress(data)
        data += compress.flush()

        fp = BytesIO(data)
        r = HTTPResponse(fp, headers={'content-encoding': 'gzip, gzip'})

        assert r.data == b'foo'
Example #15
0
def deflate_encoder(level=None):
    if level is None:
        obj = zlib.compressobj()
    else:
        obj = zlib.compressobj(level)

    def enc(data, final):
        ret = obj.compress(data)
        if final:
            ret += obj.flush()
        return ret

    return enc
Example #16
0
def writeblock(mapdir, px,py,pz, block):

	sectordir = mapdir + get_sector_dir(px, pz);
	
	try:
		os.makedirs(sectordir)
	except OSError:
		pass
	
	path = sectordir+"/"+to4h(py)

	print("writing block file "+path)

	f = open(sectordir+"/"+to4h(py), "wb")

	if f == None:
		return

	# version
	version = 17
	f.write(struct.pack('B', version))

	# flags
	# 0x01=is_undg, 0x02=dn_diff, 0x04=lighting_expired
	flags = 0 + 0x02 + 0x04
	f.write(struct.pack('B', flags))
	
	# data
	c_obj = zlib.compressobj()
	c_obj.compress(block.serialize_data())
	f.write(struct.pack('BB', 0x78, 0x9c)) # zlib magic number
	f.write(c_obj.flush())

	# node metadata
	c_obj = zlib.compressobj()
	c_obj.compress(block.serialize_nodemeta())
	f.write(struct.pack('BB', 0x78, 0x9c)) # zlib magic number
	f.write(c_obj.flush())

	# mapblockobject count
	f.write(ser_u16(0))

	# static objects
	f.write(block.serialize_staticobj())

	# timestamp
	f.write(ser_u32(0xffffffff))

	f.close()
Example #17
0
 def _add_packet_to_queue(self, packet, start_send_cb=None, end_send_cb=None):
     packets = self.encode(packet)
     if not self.raw_packets:
         assert len(packets)==1
     try:
         self._write_lock.acquire()
         counter = 0
         for index,compress,data in packets:
             if self.raw_packets:
                 if compress and self._compression_level>0:
                     level = self._compression_level
                     if self._compressor is None:
                         self._compressor = zlib.compressobj(level)
                     data = self._compressor.compress(data)+self._compressor.flush(zlib.Z_SYNC_FLUSH)
                 else:
                     level = 0
                 l = len(data)
                 #'p' + protocol-version + compression_level + packet_index + packet_size
                 header = struct.pack('!cBBBL', "P", 0, level, index, l)
             else:
                 assert index==0
                 l = len(data)
                 header = ("PS%014d" % l).encode('latin1')
             scb, ecb = None, None
             #fire the start_send_callback just before the first packet is processed:
             if counter==0:
                 scb = start_send_cb
             #fire the end_send callback when the last packet (index==0) makes it out:
             if index==0:
                 ecb = end_send_cb
             if l<4096 and sys.version<'3':
                 #send size and data together (low copy overhead):
                 self._queue_write(header+data, scb, ecb, True)
             else:
                 self._queue_write(header)
                 self._queue_write(data, scb, ecb, True)
             counter += 1
     finally:
         if packet[0]=="set_deflate":
             level = packet[1]
             log("set_deflate packet, changing compressor from %s to level=%s", self._compression_level, level)
             if self._compression_level!=level or self._compressor is None:
                 if level>0:
                     self._compressor = zlib.compressobj(level)
                 else:
                     self._compressor = None
         self.output_packetcount += 1
         self._write_lock.release()
Example #18
0
def png_write(width, height, image):
    def _chunk(name, data=""):
        length = struct.pack(">I", len(data))
        crc = struct.pack(">i", crc32(name + data))
        return length + name + data + crc
    
    _data = ""
    _data = struct.pack(">8B", 137, 80, 78, 71, 13, 10, 26, 10)
    _data += _chunk("IHDR", struct.pack(">2I5B", width, height, 8, 2, 0, 0, 0))
    
    image_data = ""
    compressor = zlib.compressobj()
    for y in xrange(1, height+1):
        row = ""
        for x in xrange(1, width+1):
            p = x11.XGetPixel(image, x, y)
            p = struct.pack(">I", p)[1:]
            row += p
        image_data += compressor.compress(row)
    image_data += compressor.flush()
    
    _data += _chunk("IDAT", image_data)
    
    _data += _chunk("IEND")
    return _data
Example #19
0
  def testZLibFlushRecord(self):
    fn = self._WriteRecordsToFile([b"small record"], "small_record")
    with open(fn, "rb") as h:
      buff = h.read()

    # creating more blocks and trailing blocks shouldn't break reads
    compressor = zlib.compressobj(9, zlib.DEFLATED, zlib.MAX_WBITS)

    output = b""
    for c in buff:
      if isinstance(c, int):
        c = six.int2byte(c)
      output += compressor.compress(c)
      output += compressor.flush(zlib.Z_FULL_FLUSH)

    output += compressor.flush(zlib.Z_FULL_FLUSH)
    output += compressor.flush(zlib.Z_FULL_FLUSH)
    output += compressor.flush(zlib.Z_FINISH)

    # overwrite the original file with the compressed data
    with open(fn, "wb") as h:
      h.write(output)

    with self.test_session() as sess:
      options = tf_record.TFRecordOptions(
          compression_type=TFRecordCompressionType.ZLIB)
      reader = io_ops.TFRecordReader(name="test_reader", options=options)
      queue = data_flow_ops.FIFOQueue(1, [dtypes.string], shapes=())
      key, value = reader.read(queue)
      queue.enqueue(fn).run()
      queue.close().run()
      k, v = sess.run([key, value])
      self.assertTrue(compat.as_text(k).startswith("%s:" % fn))
      self.assertAllEqual(b"small record", v)
Example #20
0
    def __init__(self, conn):
        asyncore.dispatcher_with_send.__init__(self, conn)
        
        self.ssled = False
        self.secure_connection(certfile="server.passless.crt", keyfile="server.passless.key", server_side=True)               

        self.consumed_ace = False
        self.data = ""
        self.binary_mode = False
        self.decompressor = zlib.decompressobj()
        self.compressor = zlib.compressobj()
        self.unzipped_input = ""
        self.unzipped_output_buffer = ""
        self.output_buffer = ""
        self.speech = dict()
        self.pong = 1
        self.ping = 0
        self.httpClient = AsyncOpenHttp(self.handle_google_data, self.handle_google_failure)
        self.gotGoogleAnswer = False
        self.googleData = None
        self.lastRequestId = None
        self.dictation = None
        self.dbConnection = db.getConnection()
        self.assistant = None
        self.sendLock = threading.Lock()
        self.current_running_plugin = None
        self.current_location = None
        self.plugin_lastAceId = None
        self.logger = logging.getLogger("logger")
Example #21
0
 def __init__(self, filename = None, mode = None, compresslevel = 9, fileobj = None, mtime = None):
     if mode and 'b' not in mode:
         mode += 'b'
     if fileobj is None:
         fileobj = self.myfileobj = __builtin__.open(filename, mode or 'rb')
     if filename is None:
         if hasattr(fileobj, 'name'):
             filename = fileobj.name
         else:
             filename = ''
     if mode is None:
         if hasattr(fileobj, 'mode'):
             mode = fileobj.mode
         else:
             mode = 'rb'
     if mode[0:1] == 'r':
         self.mode = READ
         self._new_member = True
         self.extrabuf = ''
         self.extrasize = 0
         self.extrastart = 0
         self.name = filename
         self.min_readsize = 100
     elif mode[0:1] == 'w' or mode[0:1] == 'a':
         self.mode = WRITE
         self._init_write(filename)
         self.compress = zlib.compressobj(compresslevel, zlib.DEFLATED, -zlib.MAX_WBITS, zlib.DEF_MEM_LEVEL, 0)
     else:
         raise IOError, 'Mode ' + mode + ' not supported'
     self.fileobj = fileobj
     self.offset = 0
     self.mtime = mtime
     if self.mode == WRITE:
         self._write_gzip_header()
Example #22
0
		def start(self, listener):
			"""
			This method is used to make the actual connection to the server and prepare
			for audio streaming.

			listener is a HoundListener (or derived class) object
			"""
			self.audioFinished = False
			self.buffer = ''
			self.HoundRequestInfo['RequestID'] = str(uuid.uuid4())
			self.conn = HTPConnection(self.hostname, self.port)
			htpMsg = self.conn.ReadMessage()
			challengeMsg = json.loads(htpMsg.data)
			if not challengeMsg.has_key('status') or challengeMsg['status'] <> 'ok':
				raise Exception("Error reading challenge message")

			nonce = challengeMsg['nonce']
			signature = self._authenticate(nonce)

			## Startup the listening thread (above)
			self.callbackTID = threading.Thread(target = self._callback, args = (listener,))
			self.callbackTID.start()

			self.conn.SendMessage(HTPMessage(HTPMessage.HTP_TYPE_JSON,
					json.dumps({'access_id': self.clientID, 'signature': signature, 'version': '1.1'})))
			HoundRequestInfo = json.dumps(self.HoundRequestInfo)
			gzip_compressor = zlib.compressobj(9, zlib.DEFLATED, zlib.MAX_WBITS | 16)
			msg = gzip_compressor.compress(HoundRequestInfo) + gzip_compressor.flush()
			self.conn.SendMessage(HTPMessage(HTPMessage.HTP_TYPE_BINARY, msg))

			spxHeader = pySHSpeex.Init()
			self.conn.SendMessage(HTPMessage(HTPMessage.HTP_TYPE_BINARY, spxHeader))
Example #23
0
    def write_bytearray(self, filename, rows):
        stream = BinaryStream(big_endian=True)
        # http://www.w3.org/TR/PNG/#5PNG-file-signature
        stream.write(PyPngWriter._SIGNATURE)

        # http://www.w3.org/TR/PNG/#11IHDR
        PyPngWriter._write_chunk(stream, b'IHDR', struct.pack('!II B B BBB', self.width, self.height, 8, 6, 0, 0, 0))

        # http://www.w3.org/TR/PNG/#11IDAT
        compressor = zlib.compressobj()

        data = bytearray()
        for row in rows:
            data.append(0)
            data.extend(row)
            if len(data) > self.chunk_limit:
                compressed = compressor.compress(bytes(data))
                if len(compressed):
                    PyPngWriter._write_chunk(stream, b'IDAT', compressed)
                data = bytearray()
        if len(data):
            compressed = compressor.compress(bytes(data))
        else:
            compressed = bytes()
        flushed = compressor.flush()
        if len(compressed) or len(flushed):
            PyPngWriter._write_chunk(stream, b'IDAT', compressed + flushed)

        # http://www.w3.org/TR/PNG/#11IEND
        PyPngWriter._write_chunk(stream, b'IEND')
        stream.write_file(filename)
Example #24
0
def compress(text):
    """ generate a possibly-compressed representation of text """
    if not text:
        return ("", text)
    l = len(text)
    bin = None
    if l < 44:
        pass
    elif l > 1000000:
        # zlib makes an internal copy, thus doubling memory usage for
        # large files, so lets do this in pieces
        z = zlib.compressobj()
        p = []
        pos = 0
        while pos < l:
            pos2 = pos + 2**20
            p.append(z.compress(text[pos:pos2]))
            pos = pos2
        p.append(z.flush())
        if sum(map(len, p)) < l:
            bin = "".join(p)
    else:
        bin = _compress(text)
    if bin is None or len(bin) > l:
        if text[0] == '\0':
            return ("", text)
        return ('u', text)
    return ("", bin)
def compress(body, compress_level):
    """Compress 'body' at the given compress_level."""
    import zlib

    # See http://www.gzip.org/zlib/rfc-gzip.html
    yield ntob("\x1f\x8b")  # ID1 and ID2: gzip marker
    yield ntob("\x08")  # CM: compression method
    yield ntob("\x00")  # FLG: none set
    # MTIME: 4 bytes
    yield struct.pack("<L", int(time.time()) & int("FFFFFFFF", 16))
    yield ntob("\x02")  # XFL: max compression, slowest algo
    yield ntob("\xff")  # OS: unknown

    crc = zlib.crc32(ntob(""))
    size = 0
    zobj = zlib.compressobj(compress_level, zlib.DEFLATED, -zlib.MAX_WBITS, zlib.DEF_MEM_LEVEL, 0)
    for line in body:
        size += len(line)
        crc = zlib.crc32(line, crc)
        yield zobj.compress(line)
    yield zobj.flush()

    # CRC32: 4 bytes
    yield struct.pack("<L", crc & int("FFFFFFFF", 16))
    # ISIZE: 4 bytes
    yield struct.pack("<L", size & int("FFFFFFFF", 16))
Example #26
0
    def __init__(self, name, uncompressed_data_to_preload, level=None):
        """
        Initializes the compressor. Usually, you'll want to use a
        helper method that reads a description blurb based on
        PLCD_SCHEME_JSON_SCHEMA.

        :param name: Name of the compressor, or None to disable version tags
        :type name: str or None

        :param uncompressed_data_to_preload:
            data you wish to seed the compressor with
        :type uncompressed_data_to_preload: str

        :param level: zlib compression level, or None to use the default
        :type level: int or None
        """
        self.name = str(name) if name is not None else None
        self.name_tag = str(name) + ':' if name is not None else ''

        self.level = level

        _compressobj = zlib.compressobj(*([level] if level else []))
        _decompressobj = zlib.decompressobj()

        assert isinstance(uncompressed_data_to_preload, str)
        self.preloaded_compressed = (
            _compressobj.compress(uncompressed_data_to_preload) +
            _compressobj.flush(zlib.Z_SYNC_FLUSH)
        )
        _decompressobj.decompress(self.preloaded_compressed)

        # To prevent bugs, we don't let anyone actually access
        # _new_compressor / _new_decompressor
        self._new_decompressor = _decompressobj.copy
        self._new_compressor = _compressobj.copy
Example #27
0
 def _write_block(self, block):
     #print("Saving %i bytes" % len(block))
     start_offset = self._handle.tell()
     assert len(block) <= 65536
     #Giving a negative window bits means no gzip/zlib headers, -15 used in samtools
     c = zlib.compressobj(self.compresslevel,
                          zlib.DEFLATED,
                          -15,
                          zlib.DEF_MEM_LEVEL,
                          0)
     compressed = c.compress(block) + c.flush()
     del c
     assert len(compressed) < 65536, "TODO - Didn't compress enough, try less data in this block"
     crc = zlib.crc32(block)
     #Should cope with a mix of Python platforms...
     if crc < 0:
         crc = struct.pack("<i", crc)
     else:
         crc = struct.pack("<I", crc)
     bsize = struct.pack("<H", len(compressed)+25)  # includes -1
     crc = struct.pack("<I", zlib.crc32(block) & 0xffffffffL)
     uncompressed_length = struct.pack("<I", len(block))
     #Fixed 16 bytes,
     # gzip magic bytes (4) mod time (4),
     # gzip flag (1), os (1), extra length which is six (2),
     # sub field which is BC (2), sub field length of two (2),
     #Variable data,
     #2 bytes: block length as BC sub field (2)
     #X bytes: the data
     #8 bytes: crc (4), uncompressed data length (4)
     data = _bgzf_header + bsize + compressed + crc + uncompressed_length
     self._handle.write(data)
Example #28
0
 def dump_inventory(self):
     self.info(bold('dumping object inventory... '), nonl=True)
     f = open(path.join(self.outdir, INVENTORY_FILENAME), 'wb')
     try:
         f.write((u'# Sphinx inventory version 2\n'
                  u'# Project: %s\n'
                  u'# Version: %s\n'
                  u'# The remainder of this file is compressed using zlib.\n'
                  % (self.config.project, self.config.version)).encode('utf-8'))
         compressor = zlib.compressobj(9)
         for domainname, domain in sorted(self.env.domains.items()):
             for name, dispname, type, docname, anchor, prio in \
                     sorted(domain.get_objects()):
                 if anchor.endswith(name):
                     # this can shorten the inventory by as much as 25%
                     anchor = anchor[:-len(name)] + '$'
                 uri = self.get_target_uri(docname)
                 if anchor:
                     uri += '#' + anchor
                 if dispname == name:
                     dispname = u'-'
                 f.write(compressor.compress(
                     (u'%s %s:%s %s %s %s\n' % (name, domainname, type,
                                                prio, uri, dispname)).encode('utf-8')))
         f.write(compressor.flush())
     finally:
         f.close()
     self.info('done')
Example #29
0
def compress(contents, request):
    request.setHeader("content-encoding", "gzip")
    compress = zlib.compressobj(6, zlib.DEFLATED, zlib.MAX_WBITS + 16,
        zlib.DEF_MEM_LEVEL,0)
    contents = compress.compress(contents)
    contents += compress.flush()
    return contents
Example #30
0
    def writebuffer(self, buffer, zinfo_or_arcname, compress_type=None):
        if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
            zinfo = zipfile.ZipInfo(
                filename=zinfo_or_arcname,
                date_time=time.localtime(time.time())[:6])

            zinfo.compress_type = self.compression
            zinfo.external_attr = 0600 << 16

        else:
            zinfo = zinfo_or_arcname

        if compress_type is not None:
            zinfo.compress_type = compress_type

        zinfo.file_size = file_size = 0
        zinfo.flag_bits = 0x00
        zinfo.header_offset = self.fp.tell()

        self._writecheck(zinfo)
        self._didModify = True

        zinfo.CRC = CRC = 0
        zinfo.compress_size = compress_size = 0
        self.fp.write(zinfo.FileHeader())
        if zinfo.compress_type == zipfile.ZIP_DEFLATED:
            cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15)
        else:
            cmpr = None

        while True:
            buf = buffer.read(1024 * 8)
            if not buf:
                break

            file_size = file_size + len(buf)
            CRC = binascii.crc32(buf, CRC) & 0xffffffff
            if cmpr:
                buf = cmpr.compress(buf)
                compress_size = compress_size + len(buf)

            self.fp.write(buf)

        if cmpr:
            buf = cmpr.flush()
            compress_size = compress_size + len(buf)
            self.fp.write(buf)
            zinfo.compress_size = compress_size
        else:
            zinfo.compress_size = file_size

        zinfo.CRC = CRC
        zinfo.file_size = file_size
        # Seek backwards and write CRC and file sizes
        position = self.fp.tell()
        self.fp.seek(zinfo.header_offset + 14, 0)
        self.fp.write(struct.pack("<LLL", zinfo.CRC, zinfo.compress_size, zinfo.file_size))
        self.fp.seek(position, 0)
        self.filelist.append(zinfo)
        self.NameToInfo[zinfo.filename] = zinfo
Example #31
0
local = []
central = []

local_size = 0
central_size = 0
for path, data in files:
    is_directory = path.endswith(b"/")

    version = 0x14
    attributes = 0x10 if is_directory else 0x20

    crc = zlib.crc32(data)

    if use_compression and not is_directory:
        compression = 8
        obj = zlib.compressobj(wbits=-zlib.MAX_WBITS)
        compressed_data = obj.compress(data)
        compressed_data += obj.flush()
    else:
        compression = 0
        compressed_data = data

    print("compressed_data:", len(compressed_data))

    local_header = b"PK\3\4"
    local_header += struct.pack("<HHHHHIIII", version, 0, compression, 0,
                                0, crc, len(compressed_data), len(data),
                                len(path))

    #print(" ".join("%2x"%x for x in local_header + path + compressed_data))
Example #32
0
def deflate_headerless(data):
    wbits = -15  # disable header/trailer by negating windowBits
    zobj = zlib.compressobj(wbits=wbits)
    compressed = zobj.compress(data) + zobj.flush()
    return compressed
Example #33
0
def deflate_compress(bs):
    do = zlib.compressobj(9, zlib.DEFLATED, -zlib.MAX_WBITS)
    return do.compress(bs) + do.flush()
    def write(self, filename, arcname=None, compress_type=None):
        if not self.fp:
            raise RuntimeError('Attempt to write to ZIP archive that was already closed')
        st = os.stat(filename)
        isdir = stat.S_ISDIR(st.st_mode)
        mtime = time.localtime(st.st_mtime)
        date_time = mtime[0:6]
        if arcname is None:
            arcname = filename
        arcname = os.path.normpath(os.path.splitdrive(arcname)[1])
        while arcname[0] in (os.sep, os.altsep):
            arcname = arcname[1:]

        if isdir:
            arcname += '/'
        zinfo = ZipInfo(arcname, date_time)
        zinfo.external_attr = (st[0] & 65535) << 16L
        if compress_type is None:
            zinfo.compress_type = self.compression
        else:
            zinfo.compress_type = compress_type
        zinfo.file_size = st.st_size
        zinfo.flag_bits = 0
        zinfo.header_offset = self.fp.tell()
        self._writecheck(zinfo)
        self._didModify = True
        if isdir:
            zinfo.file_size = 0
            zinfo.compress_size = 0
            zinfo.CRC = 0
            self.filelist.append(zinfo)
            self.NameToInfo[zinfo.filename] = zinfo
            self.fp.write(zinfo.FileHeader(False))
            return
        with open(filename, 'rb') as (fp):
            zinfo.CRC = CRC = 0
            zinfo.compress_size = compress_size = 0
            zip64 = self._allowZip64 and zinfo.file_size * 1.05 > ZIP64_LIMIT
            self.fp.write(zinfo.FileHeader(zip64))
            if zinfo.compress_type == ZIP_DEFLATED:
                cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15)
            else:
                cmpr = None
            file_size = 0
            while 1:
                buf = fp.read(8192)
                if not buf:
                    break
                file_size = file_size + len(buf)
                CRC = crc32(buf, CRC) & 4294967295L
                if cmpr:
                    buf = cmpr.compress(buf)
                    compress_size = compress_size + len(buf)
                self.fp.write(buf)

        if cmpr:
            buf = cmpr.flush()
            compress_size = compress_size + len(buf)
            self.fp.write(buf)
            zinfo.compress_size = compress_size
        else:
            zinfo.compress_size = file_size
        zinfo.CRC = CRC
        zinfo.file_size = file_size
        if not zip64 and self._allowZip64:
            if file_size > ZIP64_LIMIT:
                raise RuntimeError('File size has increased during compressing')
            if compress_size > ZIP64_LIMIT:
                raise RuntimeError('Compressed size larger than uncompressed size')
        position = self.fp.tell()
        self.fp.seek(zinfo.header_offset, 0)
        self.fp.write(zinfo.FileHeader(zip64))
        self.fp.seek(position, 0)
        self.filelist.append(zinfo)
        self.NameToInfo[zinfo.filename] = zinfo
        return
Example #35
0
 def __init__(self, output, level=1):
     self._compressor = zlib.compressobj(level)
     self._output = output
Example #36
0
 def __init__(self, stream):
     self.stream = stream
     self.C = zlib.compressobj()
Example #37
0
def gzip_encode(content):
    gzip_compress = zlib.compressobj(9, zlib.DEFLATED, zlib.MAX_WBITS | 16)
    data = gzip_compress.compress(content) + gzip_compress.flush()
    return data
Example #38
0
 def __init__(self, errors='strict'):
     raise errors == 'strict' or AssertionError
     self.errors = errors
     self.compressobj = zlib.compressobj()
    def add(self, entry):
        """
        Add an ENTRY to the CArchive.

        ENTRY must have:
          entry[0] is name (under which it will be saved).
          entry[1] is fullpathname of the file.
          entry[2] is a flag for it's storage format (0==uncompressed,
          1==compressed)
          entry[3] is the entry's type code.
          Version 5:
            If the type code is 'o':
              entry[0] is the runtime option
              eg: v  (meaning verbose imports)
                  u  (menaing unbuffered)
                  W arg (warning option arg)
                  s  (meaning do site.py processing.
        """
        (nm, pathnm, flag, typcd) = entry[:4]
        # FIXME Could we make the version 5 the default one?
        # Version 5 - allow type 'o' = runtime option.
        code_data = None
        fh = None
        try:
            if typcd in ('o', 'd'):
                ulen = 0
                flag = 0
            elif typcd == 's':
                # If it's a source code file, compile it to a code object and marshall
                # the object so it can be unmarshalled by the bootloader.

                code = get_code_object(nm, pathnm)
                code = strip_paths_in_code(code)

                code_data = marshal.dumps(code)
                ulen = len(code_data)
            else:
                fh = open(pathnm, 'rb')
                ulen = os.fstat(fh.fileno()).st_size
        except IOError:
            print("Cannot find ('%s', '%s', %s, '%s')" %
                  (nm, pathnm, flag, typcd))
            raise

        where = self.lib.tell()
        assert flag in range(3)
        if not fh and not code_data:
            # no need to write anything
            pass
        elif flag == 1:
            comprobj = zlib.compressobj(self.LEVEL)
            if code_data is not None:
                self.lib.write(comprobj.compress(code_data))
            else:
                assert fh
                while 1:
                    buf = fh.read(16 * 1024)
                    if not buf:
                        break
                    self.lib.write(comprobj.compress(buf))
            self.lib.write(comprobj.flush())

        else:
            if code_data is not None:
                self.lib.write(code_data)
            else:
                assert fh
                while 1:
                    buf = fh.read(16 * 1024)
                    if not buf:
                        break
                    self.lib.write(buf)

        dlen = self.lib.tell() - where
        if typcd == 'm':
            if pathnm.find('.__init__.py') > -1:
                typcd = 'M'

        # Record the entry in the CTOC
        self.toc.add(where, dlen, ulen, flag, typcd, nm)
Example #40
0
import base64
import zlib

if len(sys.argv) != 2:
    sys.exit(0)

print(sys.argv[1])
subprocess.Popen([
    "ffmpeg", "-i", sys.argv[1], "-ar", "48000", "-ac", "2", "-y", "temp.wav"
],
                 stderr=subprocess.PIPE,
                 stdout=subprocess.PIPE).wait()

fname = "songs/" + input("Song File Name >> ")
f = open(fname, "wb")
e = zlib.compressobj(9)
c = 0
b = ""

opusenc.initialize(256000)

wf = wave.open("temp.wav")
while True:
    rc = wf.readframes(480)
    if len(rc) != 1920:
        break

    opus = opusenc.encode(rc)
    b += base64.b64encode(opus).decode("utf-8") + "\n"
    c += 1
    if c >= 100:
Example #41
0
class TestParsePayload(unittest.TestCase):
    def setUp(self):
        self.stream = mock.Mock()
        asyncio.set_event_loop(None)

    def test_parse_eof_payload(self):
        out = aiohttp.FlowControlDataQueue(self.stream)
        p = HttpPayloadParser(out, readall=True)
        p.feed_data(b'data')
        p.feed_eof()

        self.assertTrue(out.is_eof())
        self.assertEqual([(bytearray(b'data'), 4)], list(out._buffer))

    def test_parse_no_body(self):
        out = aiohttp.FlowControlDataQueue(self.stream)
        p = HttpPayloadParser(out, method='PUT')

        self.assertTrue(out.is_eof())
        self.assertTrue(p.done)

    def test_parse_length_payload_eof(self):
        out = aiohttp.FlowControlDataQueue(self.stream)

        p = HttpPayloadParser(out, length=4)
        p.feed_data(b'da')

        with pytest.raises(http_exceptions.ContentLengthError):
            p.feed_eof()

    def test_parse_chunked_payload_size_error(self):
        out = aiohttp.FlowControlDataQueue(self.stream)
        p = HttpPayloadParser(out, chunked=True)
        self.assertRaises(http_exceptions.TransferEncodingError, p.feed_data,
                          b'blah\r\n')
        self.assertIsInstance(out.exception(),
                              http_exceptions.TransferEncodingError)

    def test_http_payload_parser_length(self):
        out = aiohttp.FlowControlDataQueue(self.stream)
        p = HttpPayloadParser(out, length=2)
        eof, tail = p.feed_data(b'1245')
        self.assertTrue(eof)

        self.assertEqual(b'12', b''.join(d for d, _ in out._buffer))
        self.assertEqual(b'45', tail)

    _comp = zlib.compressobj(wbits=-zlib.MAX_WBITS)
    _COMPRESSED = b''.join([_comp.compress(b'data'), _comp.flush()])

    def test_http_payload_parser_deflate(self):
        length = len(self._COMPRESSED)
        out = aiohttp.FlowControlDataQueue(self.stream)
        p = HttpPayloadParser(out, length=length, compression='deflate')
        p.feed_data(self._COMPRESSED)
        self.assertEqual(b'data', b''.join(d for d, _ in out._buffer))
        self.assertTrue(out.is_eof())

    def test_http_payload_parser_deflate_no_wbits(self):
        comp = zlib.compressobj()
        COMPRESSED = b''.join([comp.compress(b'data'), comp.flush()])

        length = len(COMPRESSED)
        out = aiohttp.FlowControlDataQueue(self.stream)
        p = HttpPayloadParser(out, length=length, compression='deflate')
        p.feed_data(COMPRESSED)
        self.assertEqual(b'data', b''.join(d for d, _ in out._buffer))
        self.assertTrue(out.is_eof())

    def test_http_payload_parser_length_zero(self):
        out = aiohttp.FlowControlDataQueue(self.stream)
        p = HttpPayloadParser(out, length=0)
        self.assertTrue(p.done)
        self.assertTrue(out.is_eof())
Example #42
0
 def enable_compression(
     self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
 ) -> None:
     zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else -zlib.MAX_WBITS
     self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy)
Example #43
0
class nocompress(object):
    def compress(self, x):
        return x

    def flush(self):
        return ""


bundletypes = {
    "":
    ("", nocompress),  # only when using unbundle on ssh and old http servers
    # since the unification ssh accepts a header but there
    # is no capability signaling it.
    "HG10UN": ("HG10UN", nocompress),
    "HG10BZ": ("HG10", lambda: bz2.BZ2Compressor()),
    "HG10GZ": ("HG10GZ", lambda: zlib.compressobj()),
}

# hgweb uses this list to communicate its preferred type
bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']


def writebundle(cg, filename, bundletype, vfs=None):
    """Write a bundle file and return its filename.

    Existing files will not be overwritten.
    If no filename is specified, a temporary file is created.
    bz2 compression can be turned off.
    The bundle file will be deleted in case of errors.
    """
Example #44
0
 def reset(self):
     self.compressobj = zlib.compressobj()
Example #45
0
    def add(self, entry):
        """
        Add an ENTRY to the CArchive.

        ENTRY must have:
          entry[0] is name (under which it will be saved).
          entry[1] is fullpathname of the file.
          entry[2] is a flag for it's storage format (0==uncompressed,
          1==compressed)
          entry[3] is the entry's type code.
          Version 5:
            If the type code is 'o':
              entry[0] is the runtime option
              eg: v  (meaning verbose imports)
                  u  (meaning unbuffered)
                  W arg (warning option arg)
                  s  (meaning do site.py processing.
        """
        (nm, pathnm, flag, typcd) = entry[:4]
        # FIXME Could we make the version 5 the default one?
        # Version 5 - allow type 'o' = runtime option.
        code_data = None
        fh = None
        try:
            if typcd in ('o', 'd'):
                ulen = 0
                flag = 0
            elif typcd == 's':
                # If it's a source code file, compile it to a code object and marshall
                # the object so it can be unmarshalled by the bootloader.

                code = get_code_object(nm, pathnm)
                code = strip_paths_in_code(code)

                code_data = marshal.dumps(code)
                ulen = len(code_data)
            elif typcd == 'm':
                fh = open(pathnm, 'rb')
                ulen = os.fstat(fh.fileno()).st_size
                # Check if it is a PYC file
                header = fh.read(4)
                fh.seek(0)
                if header == BYTECODE_MAGIC:
                    # Read whole header and load code.
                    # According to PEP-552, in python versions prior to
                    # 3.7, the PYC header consists of three 32-bit words
                    # (magic, timestamp, and source file size).
                    # From python 3.7 on, the PYC header was extended to
                    # four 32-bit words (magic, flags, and, depending on
                    # the flags, either timestamp and source file size,
                    # or a 64-bit hash).
                    if is_py37:
                        header = fh.read(16)
                    else:
                        header = fh.read(12)
                    code = marshal.load(fh)
                    # Strip paths from code, marshal back into module form.
                    # The header fields (timestamp, size, hash, etc.) are
                    # all referring to the source file, so our modification
                    # of the code object does not affect them, and we can
                    # re-use the original header.
                    code = strip_paths_in_code(code)
                    data = header + marshal.dumps(code)
                    # Create file-like object for timestamp re-write
                    # in the subsequent steps
                    fh = io.BytesIO(data)
                    ulen = len(data)
            else:
                fh = open(pathnm, 'rb')
                ulen = os.fstat(fh.fileno()).st_size
        except IOError:
            print("Cannot find ('%s', '%s', %s, '%s')" %
                  (nm, pathnm, flag, typcd))
            raise

        where = self.lib.tell()
        assert flag in range(3)
        if not fh and not code_data:
            # no need to write anything
            pass
        elif flag == 1:
            comprobj = zlib.compressobj(self.LEVEL)
            if code_data is not None:
                self.lib.write(comprobj.compress(code_data))
            else:
                assert fh
                # We only want to change it for pyc files
                modify_header = typcd in ('M', 'm', 's')
                while 1:
                    buf = fh.read(16 * 1024)
                    if not buf:
                        break
                    if modify_header:
                        modify_header = False
                        buf = fake_pyc_timestamp(buf)
                    self.lib.write(comprobj.compress(buf))
            self.lib.write(comprobj.flush())

        else:
            if code_data is not None:
                self.lib.write(code_data)
            else:
                assert fh
                while 1:
                    buf = fh.read(16 * 1024)
                    if not buf:
                        break
                    self.lib.write(buf)

        dlen = self.lib.tell() - where
        if typcd == 'm':
            if pathnm.find('.__init__.py') > -1:
                typcd = 'M'

        if fh:
            fh.close()

        # Record the entry in the CTOC
        self.toc.add(where, dlen, ulen, flag, typcd, nm)
Example #46
0
# copy of the license can be found in the License.html file at the root of this distribution. If
# you cannot locate the  Apache License, Version 2.0, please send an email to
# [email protected]. By using this source code in any fashion, you are agreeing to be bound
# by the terms of the Apache License, Version 2.0.
#
# You must not remove this notice, or any other, from this software.
#
#
#####################################################################################

from iptest.assert_util import *

import zlib

# crate test data
deflate_compress = zlib.compressobj(9, zlib.DEFLATED, -zlib.MAX_WBITS)
zlib_compress = zlib.compressobj(9, zlib.DEFLATED, zlib.MAX_WBITS)
# missing gzip support in compression
# gzip_compress = zlib.compressobj(9, zlib.DEFLATED, zlib.MAX_WBITS | 16)


def create_gzip():
    import gzip
    with gzip.open('test_data.gz', 'wb') as f:
        f.write(text)
    with open('test_data.gz', 'r') as f:
        gzip_compress = f.read()
    return gzip_compress


def test_gzip():
Example #47
0
def merge_inventories(name_map, **kwargs):
    global intersphinx_inventory

    # Create inventory entries from the name_map
    internal_inventory = {}
    for path_str, entry in name_map.items():
        EntryType = type(entry.type)  # so we don't need to import the enum
        if entry.type == EntryType.MODULE:
            type_string = 'py:module'
        elif entry.type == EntryType.CLASS:
            type_string = 'py:class'
        elif entry.type == EntryType.FUNCTION:
            # TODO: properly distinguish between 'py:function',
            # 'py:classmethod', 'py:staticmethod', 'py:method'
            type_string = 'py:function'
        elif entry.type == EntryType.OVERLOADED_FUNCTION:
            # TODO: what about the other overloads?
            type_string = 'py:function'
        elif entry.type == EntryType.PROPERTY:
            # datetime.date.year is decorated with @property and listed as a
            # py:attribute, so that's probably it
            type_string = 'py:attribute'
        elif entry.type == EntryType.ENUM:
            type_string = 'py:enum'  # this desn't exist in Sphinx
        elif entry.type == EntryType.ENUM_VALUE:
            type_string = 'py:enumvalue'  # these don't exist in Sphinx
        elif entry.type == EntryType.DATA:
            type_string = 'py:data'
        elif entry.type == EntryType.PAGE:
            type_string = 'std:doc'
        elif entry.type == EntryType.SPECIAL:
            # TODO: this will cause duplicates when multiple m.css projects
            #   gets together, solve better
            type_string = 'std:special'
        else:  # pragma: no cover
            assert False

        # Mark those with m-doc (as internal)
        internal_inventory.setdefault(type_string,
                                      {})[path_str] = (entry.url, '-',
                                                       ['m-doc'])

    # Add class / enum / enum value inventory entries to the name map for type
    # cross-linking
    for type_, type_string in [
            # TODO: this will blow up if the above loop is never entered (which is
            # unlikely) as EntryType is defined there
        (EntryType.CLASS, 'py:class'),
            # Otherwise we can't link to standard exceptions from :raise:
        (EntryType.CLASS, 'py:exception'),  # TODO: special type for these?
        (EntryType.DATA, 'py:data'),  # typing.Tuple or typing.Any is data
            # Those are custom to m.css, not in Sphinx
        (EntryType.ENUM, 'py:enum'),
        (EntryType.ENUM_VALUE, 'py:enumvalue'),
    ]:
        if type_string in intersphinx_inventory:
            for path, value in intersphinx_inventory[type_string].items():
                url, _, css_classes = value
                entry = Empty()
                entry.type = type_
                entry.object = None
                entry.path = path.split('.')
                entry.css_classes = css_classes
                entry.url = url
                name_map[path] = entry

    # Add stuff from the name map to our inventory
    for type_, data_internal in internal_inventory.items():
        data = intersphinx_inventory.setdefault(type_, {})
        for path, value in data_internal.items():
            # Ignore duplicate things (such as `index` etc.)
            # TODO: solve better
            if path in data: continue
            data[path] = value

    # Save the internal inventory, if requested. Again basically a copy of
    # sphinx.util.inventory.InventoryFile.dump().
    if inventory_filename:
        with open(os.path.join(inventory_filename), 'wb') as f:
            # Header
            # TODO: user-defined project/version
            f.write(
                b'# Sphinx inventory version 2\n'
                b'# Project: X\n'
                b'# Version: 0\n'
                b'# The remainder of this file is compressed using zlib.\n')

            # Body. Sorting so it's in a reproducible order for testing.
            compressor = zlib.compressobj(9)
            for type_, data in sorted(internal_inventory.items()):
                for path, value in data.items():
                    url, title, css_classes = value
                    # The type has to contain a colon. Wtf is the 2?
                    assert ':' in type_
                    f.write(
                        compressor.compress('{} {} 2 {} {}\n'.format(
                            path, type_, url, title).encode('utf-8')))
            f.write(compressor.flush())
Example #48
0
    def _send_frame(self, message, opcode, compress=None):
        """Send a frame over the websocket with message as its payload."""
        if self._closing:
            ws_logger.warning('websocket connection is closing.')

        rsv = 0

        # Only compress larger packets (disabled)
        # Does small packet needs to be compressed?
        # if self.compress and opcode < 8 and len(message) > 124:
        if (compress or self.compress) and opcode < 8:
            if compress:
                # Do not set self._compress if compressing is for this frame
                compressobj = zlib.compressobj(wbits=-compress)
            else:  # self.compress
                if not self._compressobj:
                    self._compressobj = zlib.compressobj(wbits=-self.compress)
                compressobj = self._compressobj

            message = compressobj.compress(message)
            message = message + compressobj.flush(
                zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH)
            if message.endswith(_WS_DEFLATE_TRAILING):
                message = message[:-4]
            rsv = rsv | 0x40

        msg_length = len(message)

        use_mask = self.use_mask
        if use_mask:
            mask_bit = 0x80
        else:
            mask_bit = 0

        if msg_length < 126:
            header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit)
        elif msg_length < (1 << 16):
            header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length)
        else:
            header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length)
        if use_mask:
            mask = self.randrange(0, 0xffffffff)
            mask = mask.to_bytes(4, 'big')
            message = bytearray(message)
            _websocket_mask(mask, message)
            self.writer.write(header + mask + message)
            self._output_size += len(header) + len(mask) + len(message)
        else:
            if len(message) > MSG_SIZE:
                self.writer.write(header)
                self.writer.write(message)
            else:
                self.writer.write(header + message)

            self._output_size += len(header) + len(message)

        if self._output_size > self._limit:
            self._output_size = 0
            return self.stream.drain()

        return noop()
Example #49
0
 def _make_compressor(self):
     return zlib.compressobj(COMPRESSION_LEVEL, zlib.DEFLATED,
                             -zlib.MAX_WBITS)
Example #50
0
def gae_post_ex(environ, start_response):
    headers, kwargs = decode_request(environ['HTTP_COOKIE'])

    method = kwargs['method']
    url = kwargs['url']

    #logging.info('%s "%s %s %s" - -', environ['REMOTE_ADDR'], method, url, 'HTTP/1.1')

    if __password__ and __password__ != kwargs.get('password', ''):
        start_response('403 Forbidden', [('Content-Type', 'text/html')])
        return [
            gae_error_html(errno='403',
                           error='Wrong password.',
                           description='GoAgent proxy.ini password is wrong!')
        ]

    if __hostsdeny__ and urlparse.urlparse(url).netloc.endswith(__hostsdeny__):
        start_response('403 Forbidden', [('Content-Type', 'text/html')])
        return [
            gae_error_html(errno='403',
                           error='Hosts Deny',
                           description='url=%r' % url)
        ]

    fetchmethod = getattr(urlfetch, method, '')
    if not fetchmethod:
        start_response('501 Unsupported', [('Content-Type', 'text/html')])
        return [
            gae_error_html(errno='501',
                           error=('Invalid Method: ' + str(method)),
                           description='Unsupported Method')
        ]

    deadline = Deadline
    headers = dict(headers)
    headers['Connection'] = 'close'
    payload = environ['wsgi.input'].read(
    ) if 'Content-Length' in headers else None

    accept_encoding = headers.get('Accept-Encoding', '')

    errors = []
    for i in xrange(int(kwargs.get('fetchmax', FetchMax))):
        try:
            response = urlfetch.fetch(url,
                                      payload,
                                      fetchmethod,
                                      headers,
                                      allow_truncated=False,
                                      follow_redirects=False,
                                      deadline=deadline,
                                      validate_certificate=False)
            break
        except apiproxy_errors.OverQuotaError as e:
            time.sleep(4)
        except urlfetch.DeadlineExceededError as e:
            errors.append('DeadlineExceededError %s(deadline=%s)' %
                          (e, deadline))
            logging.error('DeadlineExceededError(deadline=%s, url=%r)',
                          deadline, url)
            time.sleep(1)
            deadline = Deadline * 2
        except urlfetch.DownloadError as e:
            errors.append('DownloadError %s(deadline=%s)' % (e, deadline))
            logging.error('DownloadError(deadline=%s, url=%r)', deadline, url)
            time.sleep(1)
            deadline = Deadline * 2
        except urlfetch.ResponseTooLargeError as e:
            response = e.response
            logging.error(
                'ResponseTooLargeError(deadline=%s, url=%r) response(%r)',
                deadline, url, response)
            m = re.search(r'=\s*(\d+)-',
                          headers.get('Range') or headers.get('range') or '')
            if m is None:
                headers['Range'] = 'bytes=0-%d' % int(
                    kwargs.get('fetchmaxsize', FetchMaxSize))
            else:
                headers.pop('Range', '')
                headers.pop('range', '')
                start = int(m.group(1))
                headers['Range'] = 'bytes=%s-%d' % (start, start + int(
                    kwargs.get('fetchmaxsize', FetchMaxSize)))
            deadline = Deadline * 2
        except Exception as e:
            errors.append(str(e))
            if i == 0 and method == 'GET':
                deadline = Deadline * 2
    else:
        start_response('500 Internal Server Error',
                       [('Content-Type', 'text/html')])
        return [
            gae_error_html(errno='502',
                           error=('Python Urlfetch Error: ' + str(method)),
                           description='<br />\n'.join(errors) or 'UNKOWN')
        ]

    #logging.debug('url=%r response.status_code=%r response.headers=%r response.content[:1024]=%r', url, response.status_code, dict(response.headers), response.content[:1024])

    data = response.content
    if 'content-encoding' not in response.headers and len(
            response.content) < DeflateMaxSize and response.headers.get(
                'content-type', '').startswith(
                    ('text/', 'application/json', 'application/javascript')):
        if 'deflate' in accept_encoding:
            response.headers['Content-Encoding'] = 'deflate'
            data = zlib.compress(data)[2:-4]
        elif 'gzip' in accept_encoding:
            response.headers['Content-Encoding'] = 'gzip'
            compressobj = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
                                           zlib.DEFLATED, -zlib.MAX_WBITS,
                                           zlib.DEF_MEM_LEVEL, 0)
            dataio = cStringIO.StringIO()
            dataio.write('\x1f\x8b\x08\x00\x00\x00\x00\x00\x02\xff')
            dataio.write(compressobj.compress(data))
            dataio.write(compressobj.flush())
            dataio.write(
                struct.pack('<LL',
                            zlib.crc32(data) & 0xFFFFFFFFL,
                            len(data) & 0xFFFFFFFFL))
            data = dataio.getvalue()
    response.headers['Content-Length'] = str(len(data))
    start_response(
        '200 OK',
        [('Content-Type', 'image/gif'),
         ('Set-Cookie',
          encode_request(response.headers, status=str(response.status_code)))])
    return [data]
Example #51
0
 def _compress_body(self, zlib_mode: int) -> None:
     compressobj = zlib.compressobj(wbits=zlib_mode)
     body_in = self._body
     assert body_in is not None
     self._compressed_body = \
         compressobj.compress(body_in) + compressobj.flush()
Example #52
0
    def __init__(self,
                 filename=None,
                 mode=None,
                 compresslevel=9,
                 fileobj=None):
        """Constructor for the GzipFile class.

        At least one of fileobj and filename must be given a
        non-trivial value.

        The new class instance is based on fileobj, which can be a regular
        file, a StringIO object, or any other object which simulates a file.
        It defaults to None, in which case filename is opened to provide
        a file object.

        When fileobj is not None, the filename argument is only used to be
        included in the gzip file header, which may includes the original
        filename of the uncompressed file.  It defaults to the filename of
        fileobj, if discernible; otherwise, it defaults to the empty string,
        and in this case the original filename is not included in the header.

        The mode argument can be any of 'r', 'rb', 'a', 'ab', 'w', or 'wb',
        depending on whether the file will be read or written.  The default
        is the mode of fileobj if discernible; otherwise, the default is 'rb'.
        Be aware that only the 'rb', 'ab', and 'wb' values should be used
        for cross-platform portability.

        The compresslevel argument is an integer from 1 to 9 controlling the
        level of compression; 1 is fastest and produces the least compression,
        and 9 is slowest and produces the most compression.  The default is 9.

        """

        # guarantee the file is opened in binary mode on platforms
        # that care about that sort of thing
        if mode and 'b' not in mode:
            mode += 'b'
        if fileobj is None:
            fileobj = self.myfileobj = __builtin__.open(filename, mode or 'rb')
        if filename is None:
            if hasattr(fileobj, 'name'): filename = fileobj.name
            else: filename = ''
        if mode is None:
            if hasattr(fileobj, 'mode'): mode = fileobj.mode
            else: mode = 'rb'

        if mode[0:1] == 'r':
            self.mode = READ
            # Set flag indicating start of a new member
            self._new_member = True
            self.extrabuf = ""
            self.extrasize = 0
            self.filename = filename
            # Starts small, scales exponentially
            self.min_readsize = 100

        elif mode[0:1] == 'w' or mode[0:1] == 'a':
            self.mode = WRITE
            self._init_write(filename)
            self.compress = zlib.compressobj(compresslevel, zlib.DEFLATED,
                                             -zlib.MAX_WBITS,
                                             zlib.DEF_MEM_LEVEL, 0)
        else:
            raise IOError, "Mode " + mode + " not supported"

        self.fileobj = fileobj
        self.offset = 0

        if self.mode == WRITE:
            self._write_gzip_header()
Example #53
0
def _compress(data):
    o = zlib.compressobj(wbits=-zlib.MAX_WBITS)
    return o.compress(data) + o.flush()
    def setBody(self, body, title='', is_error=False, lock=None):
        """ Set the body of the response

        Sets the return body equal to the (string) argument "body". Also
        updates the "content-length" return header.

        If the body is already locked via a previous call, do nothing and
        return None.

        You can also specify a title, in which case the title and body
        will be wrapped up in html, head, title, and body tags.

        If the body is a 2-element tuple, then it will be treated
        as (title,body)

        If body is unicode, encode it.

        If body is not a string or unicode, but has an 'asHTML' method, use
        the result of that method as the body;  otherwise, use the 'str'
        of body.

        If is_error is true, format the HTML as a Zope error message instead
        of a generic HTML page.

        Return 'self' (XXX as a true value?).
        """
        # allow locking of the body in the same way as the status
        if self._locked_body:
            return
        elif lock:
            self._locked_body = 1

        if not body:
            return self

        if isinstance(body, tuple) and len(body) == 2:
            title, body = body

        if hasattr(body, 'asHTML'):
            body = body.asHTML()

        if isinstance(body, str):
            body = self._encode_unicode(body)
        elif isinstance(body, bytes):
            pass
        else:
            try:
                body = bytes(body)
            except (TypeError, UnicodeError):
                body = self._encode_unicode(str(body))

        # At this point body is always binary
        b_len = len(body)
        if b_len < 200 and \
           body[:1] == b'<' and \
           body.find(b'>') == b_len - 1 and \
           bogus_str_search(body) is not None:
            self.notFoundError(body[1:-1].decode(self.charset))
        else:
            if title:
                title = str(title)
                if not is_error:
                    self.body = body = self._html(
                        title, body.decode(self.charset)).encode(self.charset)
                else:
                    self.body = body = self._error_html(
                        title, body.decode(self.charset)).encode(self.charset)
            else:
                self.body = body

        content_type = self.headers.get('content-type')

        if content_type is None:
            if self.isHTML(body):
                content_type = 'text/html; charset=%s' % self.charset
            else:
                content_type = 'text/plain; charset=%s' % self.charset
            self.setHeader('content-type', content_type)
        else:
            if content_type.startswith('text/') and \
               'charset=' not in content_type:
                content_type = '%s; charset=%s' % (content_type,
                                                   self.charset)
                self.setHeader('content-type', content_type)

        self.setHeader('content-length', len(self.body))

        self.insertBase()

        if self.use_HTTP_content_compression and \
           self.headers.get('content-encoding', 'gzip') == 'gzip':
            # use HTTP content encoding to compress body contents unless
            # this response already has another type of content encoding
            if content_type.split('/')[0] not in uncompressableMimeMajorTypes:
                # only compress if not listed as uncompressable
                body = self.body
                startlen = len(body)
                co = zlib.compressobj(6, zlib.DEFLATED, -zlib.MAX_WBITS,
                                      zlib.DEF_MEM_LEVEL, 0)
                chunks = [_gzip_header, co.compress(body),
                          co.flush(),
                          struct.pack("<LL",
                                      zlib.crc32(body) & 0xffffffff,
                                      startlen)]
                z = b''.join(chunks)
                newlen = len(z)
                if newlen < startlen:
                    self.body = z
                    self.setHeader('content-length', newlen)
                    self.setHeader('content-encoding', 'gzip')
                    if self.use_HTTP_content_compression == 1:
                        # use_HTTP_content_compression == 1 if force was
                        # NOT used in enableHTTPCompression().
                        # If we forced it, then Accept-Encoding
                        # was ignored anyway, so cache should not
                        # vary on it. Otherwise if not forced, cache should
                        # respect Accept-Encoding client header
                        vary = self.getHeader('Vary')
                        if vary is None or 'Accept-Encoding' not in vary:
                            self.appendHeader('Vary', 'Accept-Encoding')
        return self
Example #55
0
def bibliotik_compress_html(html):
    obj = zlib.compressobj(level=9, zdict=BIBLIOTIK_ZDICT)
    return b'b\x01' + obj.compress(html.encode()) + obj.flush()
Example #56
0
 def __init__(self):
     self._comp = zlib.compressobj()
Example #57
0
 def _create_compressor(self):
     return zlib.compressobj(self._compression_level, zlib.DEFLATED,
                             -self._max_wbits, self._mem_level)
Example #58
0
    def __init__(self, filename=None, mode=None,
                 compresslevel=9, fileobj=None, mtime=None):
        """Constructor for the GzipFile class.

        At least one of fileobj and filename must be given a
        non-trivial value.

        The new class instance is based on fileobj, which can be a regular
        file, an io.BytesIO object, or any other object which simulates a file.
        It defaults to None, in which case filename is opened to provide
        a file object.

        When fileobj is not None, the filename argument is only used to be
        included in the gzip file header, which may includes the original
        filename of the uncompressed file.  It defaults to the filename of
        fileobj, if discernible; otherwise, it defaults to the empty string,
        and in this case the original filename is not included in the header.

        The mode argument can be any of 'r', 'rb', 'a', 'ab', 'w', 'wb', 'x', or
        'xb' depending on whether the file will be read or written.  The default
        is the mode of fileobj if discernible; otherwise, the default is 'rb'.
        A mode of 'r' is equivalent to one of 'rb', and similarly for 'w' and
        'wb', 'a' and 'ab', and 'x' and 'xb'.

        The compresslevel argument is an integer from 0 to 9 controlling the
        level of compression; 1 is fastest and produces the least compression,
        and 9 is slowest and produces the most compression. 0 is no compression
        at all. The default is 9.

        The mtime argument is an optional numeric timestamp to be written
        to the stream when compressing.  All gzip compressed streams
        are required to contain a timestamp.  If omitted or None, the
        current time is used.  This module ignores the timestamp when
        decompressing; however, some programs, such as gunzip, make use
        of it.  The format of the timestamp is the same as that of the
        return value of time.time() and of the st_mtime member of the
        object returned by os.stat().

        """

        if mode and ('t' in mode or 'U' in mode):
            raise ValueError("Invalid mode: {!r}".format(mode))
        if mode and 'b' not in mode:
            mode += 'b'
        if fileobj is None:
            fileobj = self.myfileobj = builtins.open(filename, mode or 'rb')
        if filename is None:
            filename = getattr(fileobj, 'name', '')
            if not isinstance(filename, (str, bytes)):
                filename = ''
        if mode is None:
            mode = getattr(fileobj, 'mode', 'rb')

        if mode.startswith('r'):
            self.mode = READ
            # Set flag indicating start of a new member
            self._new_member = True
            # Buffer data read from gzip file. extrastart is offset in
            # stream where buffer starts. extrasize is number of
            # bytes remaining in buffer from current stream position.
            self.extrabuf = b""
            self.extrasize = 0
            self.extrastart = 0
            self.name = filename
            # Starts small, scales exponentially
            self.min_readsize = 100
            fileobj = _PaddedFile(fileobj)

        elif mode.startswith(('w', 'a', 'x')):
            self.mode = WRITE
            self._init_write(filename)
            self.compress = zlib.compressobj(compresslevel,
                                             zlib.DEFLATED,
                                             -zlib.MAX_WBITS,
                                             zlib.DEF_MEM_LEVEL,
                                             0)
        else:
            raise ValueError("Invalid mode: {!r}".format(mode))

        self.fileobj = fileobj
        self.offset = 0
        self.mtime = mtime

        if self.mode == WRITE:
            self._write_gzip_header()
Example #59
0
def _encode_looseobj(type, content, compression_level=1):
    z = zlib.compressobj(compression_level)
    yield z.compress('%s %d\0' % (type, len(content)))
    yield z.compress(content)
    yield z.flush()
Example #60
0
 def compress(self):
     """Compress self.data."""
     c = zlib.compressobj(9, zlib.DEFLATED, -zlib.MAX_WBITS,
                          zlib.DEF_MEM_LEVEL, 0)
     self.data = c.compress(self.data)