def write_blosc(stream, data, compression='lz4', level=5, shuffle=True): if isinstance(compression, six.string_types) and compression.startswith('blosc:'): compression = compression[6:] data = np.asanyarray(data) if data.dtype == np.dtype('O'): raise ValueError('unable to serialize: invalid dtype') if not data.flags.contiguous: raise ValueError('expected contiguous array') payload = blosc.compress_ptr( data.__array_interface__['data'][0], data.size, data.dtype.itemsize, cname=compression, clevel=level, shuffle=shuffle ) meta = { 'size': data.size * data.dtype.itemsize, 'length': len(payload), 'comp': (compression, level, int(shuffle)), 'shape': data.shape, 'dtype': flatten_dtype(data.dtype) } meta_length = write_json(stream, meta) stream.write(payload) return len(payload) + meta_length
def test_read_write_json(data): stream = BytesIO() total = write_json(stream, data) assert stream.tell() == total stream.seek(0) length = read_i64(stream) assert stream.tell() == total - length assert json.loads(stream.read(length).decode('utf-8')) == data stream.seek(0) assert read_json(stream) == data
def _write_index(self): self._handle.truncate() write_json(self._handle, self._index) write_i64(self._handle, self._seek)