Пример #1
0
 def test_compress_checksum(self):
     with self.assertRaises(TypeError):
         compress(SHORT_INPUT, checksum=None)
     self.check_compress_short(checksum=True)
     self.check_compress_short(checksum=False)
     for data in (SHORT_INPUT, LONG_INPUT):
         with self.assertRaisesLz4FramedError(LZ4F_ERROR_contentChecksum_invalid):
             # invalid checksum
             decompress(compress(data, checksum=True)[:-1] + b"0")
Пример #2
0
 def test_compress_level(self):
     with self.assertRaises(TypeError):
         compress(SHORT_INPUT, level='1')
     # negative values designate accelerattion
     for level in range(LEVEL_ACCELERATED_MAX, LZ4F_COMPRESSION_MAX + 1):
         self.check_compress_short(level=level)
     # large input, fast & hc levels (levels > 10 (v1.7.5) are significantly slower)
     self.check_compress_long(level=0)
     self.check_compress_long(level=10)
Пример #3
0
 def test_decompress_buffer_size(self):
     out = compress(SHORT_INPUT)
     with self.assertRaises(TypeError):
         decompress(out, buffer_size="1")
     with self.assertRaises(ValueError):
         decompress(out, buffer_size=0)
     out = compress(LONG_INPUT)
     for buffer_size in range(1, 1025, 128):
         self.assertEqual(LONG_INPUT, decompress(out, buffer_size=buffer_size))
Пример #4
0
 def test_compress_block_size(self):
     with self.assertRaises(TypeError):
         compress(SHORT_INPUT, block_size_id='1')
     with self.assertRaises(ValueError):
         compress(SHORT_INPUT, block_size_id=-1)
     for block_size in (LZ4F_BLOCKSIZE_DEFAULT, LZ4F_BLOCKSIZE_MAX64KB, LZ4F_BLOCKSIZE_MAX256KB,
                        LZ4F_BLOCKSIZE_MAX1MB, LZ4F_BLOCKSIZE_MAX4MB):
         self.check_compress_short(block_size_id=block_size)
         self.check_compress_long(block_size_id=block_size)
Пример #5
0
 def test_compress_checksum(self):
     with self.assertRaises(TypeError):
         compress(SHORT_INPUT, checksum=None)
     self.check_compress_short(checksum=True)
     self.check_compress_short(checksum=False)
     for data in (SHORT_INPUT, LONG_INPUT):
         with self.assertRaisesLz4FramedError(
                 LZ4F_ERROR_contentChecksum_invalid):
             # invalid checksum
             decompress(compress(data, checksum=True)[:-1] + b'0')
Пример #6
0
 def test_decompress_buffer_size(self):
     out = compress(SHORT_INPUT)
     with self.assertRaises(TypeError):
         decompress(out, buffer_size='1')
     with self.assertRaises(ValueError):
         decompress(out, buffer_size=0)
     out = compress(LONG_INPUT)
     for buffer_size in range(1, 1025, 128):
         self.assertEqual(LONG_INPUT,
                          decompress(out, buffer_size=buffer_size))
Пример #7
0
 def test_compress_level(self):
     with self.assertRaises(TypeError):
         compress(SHORT_INPUT, level='1')
     with self.assertRaises(ValueError):
         compress(SHORT_INPUT, level=-1)
     for level in range(LZ4F_COMPRESSION_MIN, LZ4F_COMPRESSION_MAX + 1):
         self.check_compress_short(level=level)
     # large input, fast & hc levels (levels > 10 (v1.7.5) are significantly slower)
     self.check_compress_long(level=0)
     self.check_compress_long(level=10)
Пример #8
0
 def test_compress_block_size(self):
     with self.assertRaises(TypeError):
         compress(SHORT_INPUT, block_size_id='1')
     with self.assertRaises(ValueError):
         compress(SHORT_INPUT, block_size_id=-1)
     for block_size in (LZ4F_BLOCKSIZE_DEFAULT, LZ4F_BLOCKSIZE_MAX64KB,
                        LZ4F_BLOCKSIZE_MAX256KB, LZ4F_BLOCKSIZE_MAX1MB,
                        LZ4F_BLOCKSIZE_MAX4MB):
         self.check_compress_short(block_size_id=block_size)
         self.check_compress_long(block_size_id=block_size)
Пример #9
0
 def test_compress_level(self):
     with self.assertRaises(TypeError):
         compress(SHORT_INPUT, level="1")
     with self.assertRaises(ValueError):
         compress(SHORT_INPUT, level=-1)
     for level in range(17):
         self.check_compress_short(level=level)
     # large input, fast & hc levels
     self.check_compress_long(level=0)
     self.check_compress_long(level=16)
Пример #10
0
def transmit(result, sock):

    pickler = pickle.Pickler(sock)
    cols = list(result.keys())
    pickler.dump(cols)

    for col in cols:
        if (result[col].dtype == object):
            colz = lz4framed.compress(pickle.dumps(result[col]))
        else:
            colz = lz4framed.compress(result[col])
        pickler.dump(result[col].dtype)
        pickler.dump(colz)
Пример #11
0
    def test_decompressor_fp(self):
        for level in (0, 16):
            out_bytes = BytesIO()
            for chunk in Decompressor(BytesIO(compress(LONG_INPUT, level=level))):
                out_bytes.write(chunk)
            self.assertEqual(out_bytes.getvalue(), LONG_INPUT)

        # incomplete frame
        out_bytes.truncate()
        with self.assertRaises(Lz4FramedNoDataError):
            for chunk in Decompressor(BytesIO(compress(LONG_INPUT)[:-32])):
                out_bytes.write(chunk)
        # some data should have been written
        out_bytes.seek(SEEK_END)
        self.assertTrue(out_bytes.tell() > 0)
Пример #12
0
    def sendLz4CoreRpc(targetNodeId, content, reqId, rpcId):
        logger.info("发送RPC记录,目标节点:%s,请求ID:%s,RPC ID:%s", targetNodeId, reqId,
                    rpcId)
        try:
            encodeContent = lz4framed.compress(content)
        except Exception:
            logger.error("发送RPC错误,压缩错误,目标节点:%s,请求ID:%s,RPC ID:%s",
                         targetNodeId,
                         reqId,
                         rpcId,
                         exc_info=True)
            return False

        dep = DataExchangeProtocol()
        dep.contentType = DataExchangeProtocol.ContentType.COMPRESSED_LZ4
        dep.reqId = reqId
        dep.rpcType = DataExchangeProtocol.RpcType.CORE_RPC
        dep.rpcId = rpcId
        dep.sourceNodeId = Config.nodeId
        dep.targetNodeId = targetNodeId
        dep.timestamp = int(round(time.time() * 1000))
        dep.contentBytes = encodeContent
        if not WebSocketClientHandler.sendData(dep.SerializeToString()):
            logger.error("发送RPC错误,目标节点:%s,请求ID:%s,RPC ID:%s", targetNodeId,
                         reqId, rpcId)
            return False
        return True
Пример #13
0
    def test_decompress_update_invalid(self):
        with self.assertRaises(TypeError):
            decompress_update()
        with self.assertRaises(TypeError):
            decompress_update(1)
        # invalid context
        with self.assertRaises(ValueError):
            decompress_update(create_compression_context(), b" ")

        ctx = create_decompression_context()

        with self.assertRaises(TypeError):
            decompress_update(ctx, b" ", chunk_len="1")
        with self.assertRaises(ValueError):
            decompress_update(ctx, b" ", chunk_len=0)

        in_raw = compress(LONG_INPUT, checksum=True)

        ret = decompress_update(ctx, in_raw[:512], chunk_len=2)
        # input_hint
        self.assertTrue(ret.pop() > 0)
        # chunk length
        self.assertTrue(len(ret) > 0)
        self.assertTrue(all(1 <= len(chunk) <= 2 for chunk in ret))

        # invalid input (from start of frame)
        with self.assertRaisesLz4FramedError(LZ4F_ERROR_GENERIC):
            decompress_update(ctx, in_raw)

        # checksum invalid
        in_raw = in_raw[:-4] + b"1234"
        ctx = create_decompression_context()
        with self.assertRaisesLz4FramedError(LZ4F_ERROR_contentChecksum_invalid):
            decompress_update(ctx, in_raw)
Пример #14
0
    def test_decompress_update_invalid(self):
        with self.assertRaises(TypeError):
            decompress_update()
        with self.assertRaises(TypeError):
            decompress_update(1)
        # invalid context
        with self.assertRaises(ValueError):
            decompress_update(create_compression_context(), b' ')

        ctx = create_decompression_context()

        with self.assertRaises(TypeError):
            decompress_update(ctx, b' ', chunk_len='1')
        with self.assertRaises(ValueError):
            decompress_update(ctx, b' ', chunk_len=0)

        in_raw = compress(LONG_INPUT, checksum=True)

        ret = decompress_update(ctx, in_raw[:512], chunk_len=2)
        # input_hint
        self.assertTrue(ret.pop() > 0)
        # chunk length
        self.assertTrue(len(ret) > 0)
        self.assertTrue(all(1 <= len(chunk) <= 2 for chunk in ret))

        # invalid input (from start of frame)
        with self.assertRaisesLz4FramedError(LZ4F_ERROR_GENERIC):
            decompress_update(ctx, in_raw)

        # checksum invalid
        in_raw = in_raw[:-4] + b'1234'
        ctx = create_decompression_context()
        with self.assertRaisesLz4FramedError(
                LZ4F_ERROR_contentChecksum_invalid):
            decompress_update(ctx, in_raw)
Пример #15
0
    def save_from_metashape(cls, geo_arr, tex_arr, save_path, frame, **kwargs):
        header = cls.get_header_template()
        header.update({'frame': frame, **kwargs})

        # geo
        print('Convert geo')
        geo_buffer = lz4framed.compress(geo_arr.tobytes())
        header['geo_buffer_size'] = len(geo_buffer)
        header['geo_faces'] = int(len(geo_arr[0]) / 3)

        # texture
        print('Convert texture')
        tex_arr = np.copy(tex_arr)
        texture_buffer = jpeg_coder.encode(tex_arr,
                                           quality=header['texture_quality'])
        header['texture_buffer_size'] = len(texture_buffer)
        header['texture_width'] = tex_arr.shape[1]
        header['texture_height'] = tex_arr.shape[0]

        # pack
        print('save 4dp')
        header_buffer = struct.pack(cls.header_format, *header.values())
        header_buffer = header_buffer.ljust(cls.header_size, b'\0')

        with open(save_path, 'wb') as f:
            for buffer in (header_buffer, geo_buffer, texture_buffer):
                f.write(buffer)
Пример #16
0
    def test_decompressor_fp(self):
        # levels > 10 (v1.7.5) are significantly slower
        for level in (0, 10):
            out_bytes = BytesIO()
            for chunk in Decompressor(
                    BytesIO(compress(LONG_INPUT, level=level))):
                out_bytes.write(chunk)
            self.assertEqual(out_bytes.getvalue(), LONG_INPUT)

        # incomplete frame
        out_bytes.truncate()
        with self.assertRaises(Lz4FramedNoDataError):
            for chunk in Decompressor(BytesIO(compress(LONG_INPUT)[:-32])):
                out_bytes.write(chunk)
        # some data should have been written
        out_bytes.seek(SEEK_END)
        self.assertTrue(out_bytes.tell() > 0)
Пример #17
0
def compressed_size(sequences,
                    algorithm,
                    reverse_complement=False,
                    save_directory=None,
                    BWT=False,
                    bwte_inputs={}):
    '''
    Calculates the compressed size of the sequences in a file or tuple of files.

    Args:
        sequences (pathlib.Path or tuple): Either the :obj:`~pathlib.Path` of the FASTA file to compress or a tuple of FASTA files to concatenate and compress.
        algorithm (str): Which algorithm to compress the file with. Valid options are [``lzma``, ``gzip``, ``bzip2``, ``zlib``, ``lz4``, ``bwt-disk-rle-range``, ``bwt-disk-dna5-symbol``].
        reverse_complement(bool, optional): Whether to take the reverse complement of the sequences in the file.
        save_directory (pathlib.Path, optional): If given, where to save the compressed file.

    Note:
        The entire file is not compressed, just the sequences within it.

    Returns:
        tuple: A tuple whose zeroth element is ``sequences`` (_i.e._ either a :obj:`~pathlib.Path` or a tuple) and first element is the number of bytes in the compressed file.
    '''

    sequence = extract_sequences(sequences,
                                 reverse_complement=reverse_complement)
    extension = {
        "lzma": ".lzma",
        "gzip": ".gz",
        "bzip2": ".bz2",
        "zlib": ".ZLIB",
        "lz4": ".lz4"
    }
    file_ext = extension[algorithm]

    sequence = bytes(sequence, encoding="utf-8")

    if algorithm == "lzma":
        compressed_seq = lzma.compress(sequence)
    elif algorithm == "gzip":
        compressed_seq = gzip.compress(sequence)
    elif algorithm == "bzip2":
        compressed_seq = bz2.compress(sequence)
    elif algorithm == "zlib":
        compressed_seq = zlib.compress(sequence)
    elif algorithm == "lz4":
        compressed_seq = lz4framed.compress(sequence)

    if save_directory:
        if type(sequences) == tuple:
            out_file = sequences[0].stem + sequences[1].name
        else:
            out_file = sequences.name
        with open(os.path.join(save_directory.absolute(), out_file + file_ext),
                  'wb') as f:
            f.write(compressed_seq)

    return (sequences, sys.getsizeof(compressed_seq))
Пример #18
0
 def store(self, fname):
     self.logger.info("storing compressed pickle stream to '{0}'".format(fname))
     import cPickle as pickle
     import lz4framed as LZ4
     pick = pickle.dumps(self.data, 2)
     self.logger.debug("pickle done. compressing {0} bytes...".format(len(pick)))
     comp = LZ4.compress(pick)
     self.logger.debug("compression done. writing {0} bytes...".format(len(comp)))
     file(fname,'wb').write(comp)
     self.logger.info("done.")
Пример #19
0
def compressed_size(sequences, algorithm, saveCompression, comparison):
    extension = {
        "lzma": ".lzma",
        "gzip": ".gz",
        "bzip2": ".bz2",
        "zlib": ".ZLIB",
        "lz4": ".lz4"
    }
    if algorithm == "lzma":
        compressed_seq1 = lzma.compress(sequences[0])
        compressed_seq2 = lzma.compress(sequences[1])
        compressed_seqconcat = lzma.compress(sequences[2])
    if algorithm == "gzip":
        compressed_seq1 = gzip.compress(sequences[0])
        compressed_seq2 = gzip.compress(sequences[1])
        compressed_seqconcat = gzip.compress(sequences[2])
    if algorithm == "bzip2":
        compressed_seq1 = bz2.compress(sequences[0])
        compressed_seq2 = bz2.compress(sequences[1])
        compressed_seqconcat = bz2.compress(sequences[2])
    if algorithm == "zlib":
        compressed_seq1 = zlib.compress(sequences[0])
        compressed_seq2 = zlib.compress(sequences[1])
        compressed_seqconcat = zlib.compress(sequences[2])
    if algorithm == "lz4":
        compressed_seq1 = lz4framed.compress(sequences[0])
        compressed_seq2 = lz4framed.compress(sequences[1])
        compressed_seqconcat = lz4framed.compress(sequences[2])
    if algorithm == 'snappy':
        compressed_seq1 = snappy.compress(sequences[0])
        compressed_seq2 = snappy.compress(sequences[1])
        compressed_seqconcat = snappy.compress(sequences[2])

    if saveCompression != "":
        f = open(os.path.join(saveCompression, comparison[0] + extension[algorithm]), 'wb')
        f.write(compressed_seq1)
        f.close()
        f = open(os.path.join(saveCompression, comparison[1] + extension[algorithm]), 'wb')
        f.write(compressed_seq2)
        f.close()
        f = open(os.path.join(saveCompression, comparison[2] + extension[algorithm]), 'wb')
        f.write(compressed_seqconcat)
            f.close()
Пример #20
0
 def test_decompress_invalid_input(self):
     with self.assertRaisesLz4FramedError(LZ4F_ERROR_frameType_unknown):
         decompress(b'invalidheader')
     with self.assertRaisesRegex(ValueError, 'frame incomplete'):
         decompress(compress(SHORT_INPUT)[:-5])
     # incomplete data (length not specified in header)
     with BytesIO() as out:
         with Compressor(out) as compressor:
             compressor.update(SHORT_INPUT)
         output = out.getvalue()
         with self.assertRaisesRegex(ValueError, 'frame incomplete'):
             decompress(output[:-20])
Пример #21
0
 def test_decompress_invalid_input(self):
     with self.assertRaisesLz4FramedError(LZ4F_ERROR_frameHeader_incomplete):
         decompress(b"invalidheader")
     with self.assertRaisesRegex(ValueError, "frame incomplete"):
         decompress(compress(SHORT_INPUT)[:-5])
     # incomplete data (length not specified in header)
     with BytesIO() as out:
         with Compressor(out) as compressor:
             compressor.update(SHORT_INPUT)
         output = out.getvalue()
         with self.assertRaisesRegex(ValueError, "frame incomplete"):
             decompress(output[:-20])
Пример #22
0
 def store(self, fname):
     self.logger.info(
         "storing compressed pickle stream to '{0}'".format(fname))
     import cPickle as pickle
     import lz4framed as LZ4
     pick = pickle.dumps(self.data, 2)
     self.logger.debug("pickle done. compressing {0} bytes...".format(
         len(pick)))
     comp = LZ4.compress(pick)
     self.logger.debug("compression done. writing {0} bytes...".format(
         len(comp)))
     file(fname, 'wb').write(comp)
     self.logger.info("done.")
Пример #23
0
    def compress_file(fname,
                      chunksize=10 * 1024 * 1024,
                      alt_src=None,
                      level=2):
        import tempfile
        tab_file = tempfile.NamedTemporaryFile(mode="w",
                                               dir=os.path.dirname(fname),
                                               delete=False)
        tab_file_name = fname + '.lzot'
        comp_file_name = fname + '.lzoc'
        comp_file = file(comp_file_name, 'wb')
        comp_base = 0
        cum_size = 0
        t0 = time()

        tab_file.write('{0}\n'.format(chunksize))

        for chunk in chunks(fname, chunksize, alt_src=alt_src):
            uncomp_size = len(chunk)

            t1 = time()
            comp = Z.compress(chunk, level=level)
            comp_size = len(comp)
            comp_file.write(comp)
            ratio = 100. * float(comp_size) / uncomp_size
            t2 = time()
            throughput = cum_size / (t2 - t0)

            tab_file.write('{0}\n'.format(comp_base))
            comp_base += comp_size
            cum_size += uncomp_size

            logging.debug(
                "compressed {0}MB ({1:.1f}%) in {2:.1f} sec, {3:.2f} MB/s  sec"
                .format(chunksize / MB, ratio, t2 - t1, throughput / MB))

        tab_file.write('{0}\n'.format(comp_base))
        tab_file.write('{0}\n'.format(cum_size))

        # make sure everything is on disk
        os.fsync(tab_file)
        tab_file.close()

        # this is atomic on POSIX as we have created tmp in the same directory,
        # therefore same filesystem
        os.rename(tab_file.name, tab_file_name)

        # make it accessible to everyone
        import stat
        os.chmod(tab_file_name, stat.S_IROTH | stat.S_IRGRP | stat.S_IRUSR)
        os.chmod(comp_file_name, stat.S_IROTH | stat.S_IRGRP | stat.S_IRUSR)
Пример #24
0
def newGame():
	global ob, reward, step_count, episode_reward, lastFrame, lastFrameOrig, lastOb, lastObOrig, lastFrameCompressed, lastObCompressed
	ob = env.reset()
	noop_count = np.random.random_integers(0, NO_OP_MAX)
	for I in range(noop_count):
		ob, _, done, _ = env.step(0)
		if done:
			print('Game terminated during warm up')
	reward = 0
	step_count = 0
	episode_reward = 0
	lastFrame, lastFrameOrig = preprocess(ob)
	lastOb = None
	lastObOrig = None
	lastObCompressed = None
	lastFrameCompressed = lz4framed.compress(lastFrame)
Пример #25
0
def numpy_to_lz4f(data, savepath=None, level=9, overwrite=None):
    """Do lz4-framed compression on `data`. (Install compressor via
    `!pip install py-lz4framed`)

    Arguments:
        data: np.ndarray
            Data to compress.
        savepath: str
            Path to where to save file.
        level: int
            1 to 9; higher = greater compression
        overwrite: bool
            If `savepath` file exists,

                - True  -> replace it
                - False -> don't replace it
                - None  -> ask confirmation via user input

    **Returns**:
        np.ndarray - compressed array.

    **Example**:

    >>> numpy_to_lz4f(savedata, savepath=path)
    ...
    >>> # load & decompress
    >>> bytes_npy = lz4f.decompress(np.load(path))
    >>> loaddata = np.frombuffer(bytes_npy,
    ...                          dtype=savedata.dtype,  # must be original's
    ...                          ).reshape(*savedata.shape)
    """

    if lz4f is None:
        raise Exception(
            "cannot convert to lz4f without `lz4framed` installed; "
            "run `pip install py-lz4framed`")
    data = data.tobytes()
    data = lz4f.compress(data, level=level)

    if savepath is not None:
        if Path(savepath).suffix != '.npy':
            print(WARN, "`savepath` extension must be '.npy'; will append")
            savepath += '.npy'
        _validate_savepath(savepath, overwrite)
        np.save(savepath, data)
        print("lz4f-compressed data saved to", savepath)
    return data
Пример #26
0
    def test_get_frame_info(self):
        with self.assertRaises(TypeError):
            get_frame_info()
        with self.assertRaises(ValueError):
            get_frame_info(create_compression_context())

        ctx = create_decompression_context()
        with self.assertRaisesLz4FramedError(LZ4F_ERROR_frameHeader_incomplete):
            get_frame_info(ctx)
        # compress with non-default arguments, check info structure
        args = {"checksum": True, "block_size_id": LZ4F_BLOCKSIZE_MAX256KB, "block_mode_linked": False}
        # Using long input since lz4 adjusts block size is input smaller than one block
        decompress_update(ctx, compress(LONG_INPUT, **args)[:15])
        info = get_frame_info(ctx)
        self.assertTrue(info.pop("input_hint", 0) > 0)
        args["length"] = len(LONG_INPUT)
        self.assertEqual(info, args)
Пример #27
0
    def compress_file( fname, chunksize=10*1024*1024, alt_src = None, level=2):
        import tempfile
        tab_file = tempfile.NamedTemporaryFile(mode="w",dir = os.path.dirname(fname),delete=False)
        tab_file_name = fname + '.lzot'
        comp_file_name = fname + '.lzoc'
        comp_file = file(comp_file_name,'wb')
        comp_base = 0
        cum_size = 0
        t0 = time()
        
        tab_file.write('{0}\n'.format(chunksize))

        for chunk in chunks(fname, chunksize, alt_src = alt_src):
            uncomp_size = len(chunk)

            t1 = time()
            comp = Z.compress(chunk, level=level)
            comp_size = len(comp)
            comp_file.write(comp)
            ratio = 100. * float(comp_size) / uncomp_size 
            t2 = time()
            throughput = cum_size / (t2-t0)

            tab_file.write('{0}\n'.format(comp_base))
            comp_base += comp_size
            cum_size += uncomp_size

            logging.debug("compressed {0}MB ({1:.1f}%) in {2:.1f} sec, {3:.2f} MB/s  sec".format(chunksize/MB, ratio, t2-t1, throughput/MB))

        tab_file.write('{0}\n'.format(comp_base))
        tab_file.write('{0}\n'.format(cum_size))

        # make sure everything is on disk
        os.fsync(tab_file)
        tab_file.close()
                
        # this is atomic on POSIX as we have created tmp in the same directory, 
        # therefore same filesystem
        os.rename(tab_file.name,tab_file_name)

        # make it accessible to everyone
        import stat
        os.chmod(tab_file_name, stat.S_IROTH | stat.S_IRGRP | stat.S_IRUSR)
        os.chmod(comp_file_name, stat.S_IROTH | stat.S_IRGRP | stat.S_IRUSR)
Пример #28
0
    def test_get_frame_info(self):
        with self.assertRaises(TypeError):
            get_frame_info()
        with self.assertRaises(ValueError):
            get_frame_info(create_compression_context())

        ctx = create_decompression_context()
        with self.assertRaisesLz4FramedError(LZ4F_ERROR_srcPtr_wrong):
            get_frame_info(ctx)
        # compress with non-default arguments, check info structure
        args = {
            'checksum': True,
            'block_size_id': LZ4F_BLOCKSIZE_MAX256KB,
            'block_mode_linked': False
        }
        # Using long input since lz4 adjusts block size is input smaller than one block
        decompress_update(ctx, compress(LONG_INPUT, **args)[:15])
        info = get_frame_info(ctx)
        self.assertTrue(info.pop('input_hint', 0) > 0)
        args['length'] = len(LONG_INPUT)
        self.assertEqual(info, args)
Пример #29
0
def retreive_screenshot(conn):
    with mss() as sct:
        # The region to capture
        rect = {'top': 0, 'left': 0, 'width': WIDTH, 'height': HEIGHT}

        while 'recording':
            # Capture the screen
            img = sct.grab(rect)
            # Tweak the compression level here (0-9)
            pixels = lz4framed.compress(img.rgb)  # compress(img.rgb, 6)

            print(img.rgb)

            # Send the size of the pixels length
            size = len(pixels)
            size_len = (size.bit_length() + 7) // 8
            conn.send(bytes([size_len]))

            # Send the actual pixels length
            size_bytes = size.to_bytes(size_len, 'big')
            conn.send(size_bytes)

            # Send pixels
            conn.sendall(pixels)
Пример #30
0
 def check_compress_long(self, *args, **kwargs):
     self.assertEqual(LONG_INPUT, decompress(compress(LONG_INPUT, *args, **kwargs)))
Пример #31
0
 def test_compress_linked_mode(self):
     with self.assertRaises(TypeError):
         compress(SHORT_INPUT, block_mode_linked=None)
     self.check_compress_short(block_mode_linked=True)
     self.check_compress_short(block_mode_linked=False)
Пример #32
0
 def __init__(self, arr):
     self._shape = arr.shape
     self._type = arr.dtype
     self._data = lz4framed.compress(arr.tobytes())
Пример #33
0
 def test_compress_minimal(self):
     with self.assertRaises(TypeError):
         compress()
     with self.assertRaises(Lz4FramedNoDataError):
         compress(b'')
     self.check_compress_short()
Пример #34
0
 def check_compress_long(self, *args, **kwargs):
     self.assertEqual(LONG_INPUT,
                      decompress(compress(LONG_INPUT, *args, **kwargs)))
Пример #35
0
 def check_compress_short(self, *args, **kwargs):
     self.assertEqual(SHORT_INPUT,
                      decompress(compress(SHORT_INPUT, *args, **kwargs)))
Пример #36
0
 def test_compress_block_checksum(self):
     with self.assertRaises(TypeError):
         compress(SHORT_INPUT, block_checksum=None)
     self.check_compress_short(block_checksum=True)
     self.check_compress_short(block_checksum=False)
Пример #37
0
 def test_compress_memoryview(self):
     view = memoryview(LONG_INPUT)
     self.assertEqual(view, decompress(compress(view)))
Пример #38
0
 def test_decompress_memoryview(self):
     view = memoryview(compress(LONG_INPUT))
     self.assertEqual(LONG_INPUT, decompress(view))
Пример #39
0
 def test_decompress_update_memoryview(self):  # pylint: disable=invalid-name
     ctx = create_decompression_context()
     data = decompress_update(ctx, memoryview(compress(LONG_INPUT)))
     self.assertEqual(b''.join(data[:-1]), LONG_INPUT)
Пример #40
0
 def check_compress_short(self, *args, **kwargs):
     self.assertEqual(SHORT_INPUT, decompress(compress(SHORT_INPUT, *args, **kwargs)))
Пример #41
0
 def test_compress_minimal(self):
     with self.assertRaises(TypeError):
         compress()
     with self.assertRaises(Lz4FramedNoDataError):
         compress(b"")
     self.check_compress_short()
Пример #42
0
 def test_compress_linked_mode(self):
     with self.assertRaises(TypeError):
         compress(SHORT_INPUT, block_mode_linked=None)
     self.check_compress_short(block_mode_linked=True)
     self.check_compress_short(block_mode_linked=False)
def dumps_pyarrow(obj):
    return lz4framed.compress(pa.serialize(obj).to_buffer())
Пример #44
0
def serialize_and_compress(obj: Any):
    return lz4framed.compress(pyarrow.serialize(obj).to_buffer())