def decompress(points): """ 'points' is a pcpatch in wkb """ # retrieve number of points in wkb pgpointcloud patch npoints = utils.npoints_from_wkb_pcpatch(points) hexbuffer = utils.hexdata_from_wkb_pcpatch(points) hexbuffer += utils.hexa_signed_int32(npoints) # uncompress s = json.dumps(GreyhoundReadSchema().json()).replace("\\", "") dtype = buildNumpyDescription(json.loads(s)) lazdata = bytes(hexbuffer) arr = numpy.fromstring(lazdata, dtype=numpy.uint8) d = Decompressor(arr, s) output = numpy.zeros(npoints * dtype.itemsize, dtype=numpy.uint8) decompressed = d.decompress(output) decompressed_str = numpy.ndarray.tostring(decompressed) # import struct # for i in range(0, npoints): # point = decompressed_str[dtype.itemsize*i:dtype.itemsize*(i+1)] # x = point[0:4] # y = point[4:8] # z = point[8:12] # xd = struct.unpack("i", x) # yd = struct.unpack("i", y) # zd = struct.unpack("i", z) return [decompressed_str, dtype.itemsize]
def test_decompressor(self): s = json.dumps(schema) with open('test/compressed.bin', 'rb') as f: data = f.read() with open('test/uncompressed.bin', 'rb') as f: original = f.read() self.assertEqual(len(data), len_compressed, "compressed file length is correct") self.assertEqual(len(original), len_uncompressed, "uncompressed file length is correct") # last four bytes are the point count compressed_point_count = struct.unpack('<L', data[-4:])[0] uncompressed_point_count = struct.unpack('<L', original[-4:])[0] self.assertEqual(compressed_point_count, uncompressed_point_count, "compressed point count matches expected") self.assertEqual(uncompressed_point_count, expected_point_count, "uncompressed point count matches expected") arr = np.frombuffer(data, dtype=np.uint8) dtype = buildNumpyDescription(json.loads(s)) self.assertEqual(dtype.itemsize, 54) d = Decompressor(arr, s) decompressed = d.decompress(compressed_point_count) uncompressed = np.frombuffer(original[0:-4], dtype=dtype) self.assertEqual(uncompressed.shape[0], expected_point_count) self.assertEqual(decompressed.shape[0], expected_point_count) for i in range(len(uncompressed)): self.assertEqual(uncompressed[i], decompressed[i])
def test_full_loop(self): s = json.dumps(schema) with open('test/uncompressed.bin', 'rb') as f: original = f.read() dtype = buildNumpyDescription(json.loads(s)) uncompressed = np.frombuffer(original[0:-4], dtype=dtype) c = Compressor(s) compressed = c.compress(uncompressed) d = Decompressor(compressed, s) decompressed = d.decompress(expected_point_count) self.assertEqual(len(decompressed), len(uncompressed)) for i in range(len(decompressed)): self.assertEqual(decompressed[i], uncompressed[i]) # confirm we can build from dtypes instead of json descriptions _ = Compressor(dtype) _ = Decompressor(compressed, dtype)
def test_decompressor(self): s = json.dumps(schema) with open('test/compressed.bin', 'rb') as f: data = f.read() with open('test/uncompressed.bin','rb') as f: original = f.read() self.assertEqual(len(data), len_compressed, "compressed file length is correct") self.assertEqual(len(original), len_uncompressed, "uncompressed file length is correct") # last four bytes are the point count compressed_point_count = struct.unpack('<L',data[-4:])[0] uncompressed_point_count = struct.unpack('<L',original[-4:])[0] self.assertEqual(compressed_point_count, uncompressed_point_count, "compressed point count matches expected") self.assertEqual(uncompressed_point_count, expected_point_count, "uncompressed point count matches expected") arr = np.frombuffer(data, dtype = np.uint8) dtype = buildNumpyDescription(json.loads(s)) self.assertEqual(dtype.itemsize, 54) d = Decompressor(arr, s) output = np.zeros(compressed_point_count * dtype.itemsize, dtype=np.uint8) decompressed = d.decompress(output) uncompressed = np.frombuffer(original[0:-4], dtype = dtype) self.assertEqual(uncompressed.shape[0], expected_point_count) self.assertEqual(decompressed.shape[0], expected_point_count) for i in range(len(uncompressed)): self.assertEqual(uncompressed[i], decompressed[i])
def decompress(points, schema): """ Decode patch encoded with lazperf. 'points' is a pcpatch in wkb """ # retrieve number of points in wkb pgpointcloud patch npoints = patch_numpoints(points) hexbuffer = unhexlify(points[34:]) hexbuffer += hexa_signed_int32(npoints) # uncompress s = json.dumps(schema).replace("\\", "") dtype = buildNumpyDescription(json.loads(s)) lazdata = bytes(hexbuffer) arr = np.fromstring(lazdata, dtype=np.uint8) d = Decompressor(arr, s) output = np.zeros(npoints * dtype.itemsize, dtype=np.uint8) decompressed = d.decompress(output) return decompressed