def recv_mm(recieveSocket): """ Used to receive compressed numpy data on the socket provided. This function calls the __recv_all_mm() to achieve this to retrieve the compressed data. Once receiving the compressed data enclosed in a struct, zfpy uncompresses uncompresses the data. Args: recieveSocket (socket): The socket on which the data is being received Returns: ndarray: The uncompressed numpy array """ try: rawDataLen = __recv_all_mm(recieveSocket, 4) if not rawDataLen: return None except Exception as e: raise e dataLen = struct.unpack('>I', rawDataLen)[0] data = __recv_all_mm(recieveSocket, dataLen) frame = zfpy.decompress_numpy(bytes(data)) return frame
def mat_receive_comp(recieveSocket, logger): rawDataLen = __mat_receive_all_comp(recieveSocket, 4) if not rawDataLen: return None dataLen = struct.unpack('>I', rawDataLen)[0] data = __mat_receive_all_comp(recieveSocket, dataLen) frame = zfpy.decompress_numpy(bytes(data)) logger.debug("frame received") return frame
def queue_data_to_numpy_data(self, queue_data): """ Convert queue data that was serialized through pyro to numpy data. :param queue_data: Queue data. :return: Numpy array. """ if self.compression_type == 'lz4': return np.frombuffer(lz4.frame.decompress(queue_data[0]), dtype=queue_data[2]).reshape(queue_data[1]) elif self.compression_type == 'zfp': return zfpy.decompress_numpy(queue_data) return np.frombuffer(queue_data[0], dtype=queue_data[2]).reshape(queue_data[1])
def test_TS_01(self): import xarray as xr import zfpy ds = xr.open_dataset('../data/orig.TS.100days.nc') TS = ds.TS.values TS_compressed = zfpy.compress_numpy(TS, tolerance=0.01) TS_decompressed = zfpy.decompress_numpy(TS_compressed) em = ErrorMetrics(observed=TS, modelled=TS_decompressed) print("mean squared error: ", em.mean_squared_error) em.get_all_metrics() print(em.get_all_metrics(exclude={"error", "squared_error", "absolute_error"}))
def decode(self, buf, out=None): # normalise inputs buf = ensure_bytes(buf) if out is not None: out = ensure_contiguous_ndarray(out) # do decompression dec = _zfpy.decompress_numpy(buf) # handle destination if out is not None: return ndarray_copy(dec, out) else: return dec
def zfpy_decompress(c_bitstring): """ """ np_arr = zfpy.decompress_numpy(c_bitstring) return np_arr.tolist()
def zfp_decode(data, shape=None, dtype=None, out=None): """Decompress ZFP.""" return zfp.decompress_numpy(data)
def lossless_round_trip(self, orig_array): compressed_array = zfpy.compress_numpy(orig_array, write_header=True) decompressed_array = zfpy.decompress_numpy(compressed_array) self.assertIsNone(np.testing.assert_array_equal(decompressed_array, orig_array))
def test_utils(self): for ndims in range(1, 5): for ztype, ztype_str in [ (zfpy.type_float, "float"), (zfpy.type_double, "double"), (zfpy.type_int32, "int32"), (zfpy.type_int64, "int64"), ]: orig_random_array = test_utils.getRandNumpyArray(ndims, ztype) orig_random_array_dims = orig_random_array.shape + tuple(0 for i in range(4 - orig_random_array.ndim)) orig_checksum = test_utils.getChecksumOrigArray(orig_random_array_dims, ztype) actual_checksum = test_utils.hashNumpyArray(orig_random_array) self.assertEqual(orig_checksum, actual_checksum) for stride_str, stride_config in [ ("as_is", test_utils.stride_as_is), ("permuted", test_utils.stride_permuted), ("interleaved", test_utils.stride_interleaved), #("reversed", test_utils.stride_reversed), ]: # permuting a 1D array is not supported if stride_config == test_utils.stride_permuted and ndims == 1: continue random_array = test_utils.generateStridedRandomNumpyArray( stride_config, orig_random_array ) random_array_dims = random_array.shape + tuple(0 for i in range(4 - random_array.ndim)) self.assertTrue(np.equal(orig_random_array, random_array).all()) for compress_param_num in range(3): modes = [(zfpy.mode_fixed_accuracy, "tolerance"), (zfpy.mode_fixed_precision, "precision"), (zfpy.mode_fixed_rate, "rate")] if ztype in [zfpy.type_int32, zfpy.type_int64]: modes = [modes[-1]] # only fixed-rate is supported for integers for mode, mode_str in modes: # Compression compression_kwargs = { mode_str: test_utils.computeParameterValue( mode, compress_param_num ), } compressed_array = zfpy.compress_numpy( random_array, write_header=False, **compression_kwargs ) compressed_checksum = test_utils.getChecksumCompArray( random_array_dims, ztype, mode, compress_param_num ) actual_checksum = test_utils.hashCompressedArray( compressed_array ) self.assertEqual(compressed_checksum, actual_checksum) # Decompression decompressed_checksum = test_utils.getChecksumDecompArray( random_array_dims, ztype, mode, compress_param_num ) # Decompression using the "public" interface # requires a header, so re-compress with the header # included in the stream compressed_array = zfpy.compress_numpy( random_array, write_header=True, **compression_kwargs ) decompressed_array = zfpy.decompress_numpy( compressed_array, ) actual_checksum = test_utils.hashNumpyArray( decompressed_array ) self.assertEqual(decompressed_checksum, actual_checksum)
def get_se_zfp(ie, dir=None, loadinto=None): loadinto.real[:, :] = zfpy.decompress_numpy( (dir / f"SE_E{ie:>03d}_REAL.zfp").open("rb").read()) loadinto.imag[:, :] = zfpy.decompress_numpy( (dir / f"SE_E{ie:>03d}_IMAG.zfp").open("rb").read()) return loadinto