Пример #1
0
    def read(self, bounds, depthBegin, depthEnd, compress=False):
        import lazperf
        import json
        import numpy as np

        compressed = 'false'
        if compress:
            compressed = 'true'
        command = self.url + '/read?'
        command += 'bounds=%s&depthEnd=%d&depthBegin=%d&compress=%s' % (
            bounds.url, depthEnd, depthBegin, compressed)
        u = urlopen(command)
        data = u.read()

        # last four bytes are the point count
        count = struct.unpack('<L', data[-4:])[0]

        if compress:
            arr = np.frombuffer(data[:-4], dtype=np.uint8)
            schema = self.info['schema']
            s = json.dumps(schema)
            dt = lazperf.buildNumpyDescription(s)
            d = lazperf.Decompressor(arr, s)
            output = np.zeros(count * dt.itemsize, dtype=np.uint8)
            decompressed = d.decompress(output)
            array = np.ndarray(shape=(count, ), buffer=decompressed, dtype=dt)
        else:

            array = np.ndarray(shape=(count, ),
                               buffer=data,
                               dtype=self.info['dtype'])
        return array
Пример #2
0
def decompress(points):
    """
    'points' is a pcpatch in wkb
    """

    # retrieve number of points in wkb pgpointcloud patch
    npoints = utils.npoints_from_wkb_pcpatch(points)
    hexbuffer = utils.hexdata_from_wkb_pcpatch(points)
    hexbuffer += utils.hexa_signed_int32(npoints)

    # uncompress
    s = json.dumps(GreyhoundReadSchema().json()).replace("\\", "")
    dtype = buildNumpyDescription(json.loads(s))

    lazdata = bytes(hexbuffer)

    arr = numpy.fromstring(lazdata, dtype=numpy.uint8)
    d = Decompressor(arr, s)
    output = numpy.zeros(npoints * dtype.itemsize, dtype=numpy.uint8)
    decompressed = d.decompress(output)

    decompressed_str = numpy.ndarray.tostring(decompressed)

    # import struct
    # for i in range(0, npoints):
    #     point = decompressed_str[dtype.itemsize*i:dtype.itemsize*(i+1)]
    #     x = point[0:4]
    #     y = point[4:8]
    #     z = point[8:12]
    #     xd = struct.unpack("i", x)
    #     yd = struct.unpack("i", y)
    #     zd = struct.unpack("i", z)

    return [decompressed_str, dtype.itemsize]
Пример #3
0
    def test_decompressor(self):
        s = json.dumps(schema)

        with open('test/compressed.bin', 'rb') as f:
            data = f.read()

        with open('test/uncompressed.bin', 'rb') as f:
            original = f.read()

        self.assertEqual(len(data), len_compressed,
                         "compressed file length is correct")
        self.assertEqual(len(original), len_uncompressed,
                         "uncompressed file length is correct")

        # last four bytes are the point count
        compressed_point_count = struct.unpack('<L', data[-4:])[0]
        uncompressed_point_count = struct.unpack('<L', original[-4:])[0]

        self.assertEqual(compressed_point_count, uncompressed_point_count,
                         "compressed point count matches expected")
        self.assertEqual(uncompressed_point_count, expected_point_count,
                         "uncompressed point count matches expected")

        arr = np.frombuffer(data, dtype=np.uint8)
        dtype = buildNumpyDescription(json.loads(s))
        self.assertEqual(dtype.itemsize, 54)

        d = Decompressor(arr, s)
        decompressed = d.decompress(compressed_point_count)
        uncompressed = np.frombuffer(original[0:-4], dtype=dtype)

        self.assertEqual(uncompressed.shape[0], expected_point_count)
        self.assertEqual(decompressed.shape[0], expected_point_count)
        for i in range(len(uncompressed)):
            self.assertEqual(uncompressed[i], decompressed[i])
Пример #4
0
    def test_compressor(self):
        s = json.dumps(schema)

        with open('test/compressed.bin', 'rb') as f:
            data = f.read()

        with open('test/uncompressed.bin','rb') as f:
            original = f.read()

        self.assertEqual(len(data),
                         len_compressed,
                         "compressed file length is correct")
        self.assertEqual(len(original),
                         len_uncompressed,
                         "uncompressed file length is correct")

        # last four bytes are the point count
        compressed_point_count = struct.unpack('<L',data[-4:])[0]
        uncompressed_point_count = struct.unpack('<L',original[-4:])[0]

        self.assertEqual(compressed_point_count,
                         uncompressed_point_count,
                         "compressed point count matches expected")
        self.assertEqual(uncompressed_point_count,
                         expected_point_count,
                         "uncompressed point count matches expected")

        dtype = buildNumpyDescription(json.loads(s))

        uncompressed = np.frombuffer(original[0:-4], dtype = dtype)
        self.assertEqual(uncompressed.shape[0], expected_point_count)

        point_data = np.frombuffer(original[:-4], dtype = dtype)
        empty = np.zeros(uncompressed_point_count, dtype = np.uint8)

        c = Compressor(s)

        compressed = c.compress(point_data)

        original_compressed = np.frombuffer(data[0:-4], dtype = np.uint8)
        self.assertEqual(len(original_compressed), len_compressed - 4)
        for i in range(len(compressed)):
            self.assertEqual(compressed[i], original_compressed[i])
Пример #5
0
    def test_full_loop(self):
        s = json.dumps(schema)

        with open('test/uncompressed.bin', 'rb') as f:
            original = f.read()

        dtype = buildNumpyDescription(json.loads(s))
        uncompressed = np.frombuffer(original[0:-4], dtype=dtype)

        c = Compressor(s)
        compressed = c.compress(uncompressed)

        d = Decompressor(compressed, s)
        decompressed = d.decompress(expected_point_count)
        self.assertEqual(len(decompressed), len(uncompressed))
        for i in range(len(decompressed)):
            self.assertEqual(decompressed[i], uncompressed[i])

        # confirm we can build from dtypes instead of json descriptions
        _ = Compressor(dtype)
        _ = Decompressor(compressed, dtype)
Пример #6
0
    def test_full_loop(self):
        s = json.dumps(schema)

        with open('test/uncompressed.bin', 'rb') as f:
            original = f.read()

        dtype = buildNumpyDescription(json.loads(s))
        uncompressed = np.frombuffer(original[0:-4], dtype=dtype)

        c = Compressor(s)
        compressed = c.compress(uncompressed)

        d = Decompressor(compressed, s)
        decompressed = d.decompress(expected_point_count)
        self.assertEqual(len(decompressed), len(uncompressed))
        for i in range(len(decompressed)):
            self.assertEqual(decompressed[i], uncompressed[i])

        # confirm we can build from dtypes instead of json descriptions
        _ = Compressor(dtype)
        _ = Decompressor(compressed, dtype)
Пример #7
0
    def test_decompressor(self):
        s = json.dumps(schema)

        with open('test/compressed.bin', 'rb') as f:
            data = f.read()

        with open('test/uncompressed.bin','rb') as f:
            original = f.read()

        self.assertEqual(len(data),
                         len_compressed,
                         "compressed file length is correct")
        self.assertEqual(len(original),
                         len_uncompressed,
                         "uncompressed file length is correct")

        # last four bytes are the point count
        compressed_point_count = struct.unpack('<L',data[-4:])[0]
        uncompressed_point_count = struct.unpack('<L',original[-4:])[0]

        self.assertEqual(compressed_point_count,
                         uncompressed_point_count,
                         "compressed point count matches expected")
        self.assertEqual(uncompressed_point_count,
                         expected_point_count,
                         "uncompressed point count matches expected")

        arr = np.frombuffer(data, dtype = np.uint8)
        dtype = buildNumpyDescription(json.loads(s))
        self.assertEqual(dtype.itemsize, 54)

        d = Decompressor(arr, s)
        output = np.zeros(compressed_point_count * dtype.itemsize, dtype=np.uint8)
        decompressed = d.decompress(output)
        uncompressed = np.frombuffer(original[0:-4], dtype = dtype)

        self.assertEqual(uncompressed.shape[0], expected_point_count)
        self.assertEqual(decompressed.shape[0], expected_point_count)
        for i in range(len(uncompressed)):
            self.assertEqual(uncompressed[i], decompressed[i])
Пример #8
0
def decompress(points, schema):
    """
    Decode patch encoded with lazperf.
    'points' is a pcpatch in wkb
    """

    # retrieve number of points in wkb pgpointcloud patch
    npoints = patch_numpoints(points)
    hexbuffer = unhexlify(points[34:])
    hexbuffer += hexa_signed_int32(npoints)

    # uncompress
    s = json.dumps(schema).replace("\\", "")
    dtype = buildNumpyDescription(json.loads(s))
    lazdata = bytes(hexbuffer)

    arr = np.fromstring(lazdata, dtype=np.uint8)
    d = Decompressor(arr, s)
    output = np.zeros(npoints * dtype.itemsize, dtype=np.uint8)
    decompressed = d.decompress(output)

    return decompressed
Пример #9
0
    def test_compressor(self):
        s = json.dumps(schema)

        with open('test/compressed.bin', 'rb') as f:
            data = f.read()

        with open('test/uncompressed.bin', 'rb') as f:
            original = f.read()

        self.assertEqual(len(data), len_compressed,
                         "compressed file length is correct")
        self.assertEqual(len(original), len_uncompressed,
                         "uncompressed file length is correct")

        # last four bytes are the point count
        compressed_point_count = struct.unpack('<L', data[-4:])[0]
        uncompressed_point_count = struct.unpack('<L', original[-4:])[0]

        self.assertEqual(compressed_point_count, uncompressed_point_count,
                         "compressed point count matches expected")
        self.assertEqual(uncompressed_point_count, expected_point_count,
                         "uncompressed point count matches expected")

        dtype = buildNumpyDescription(json.loads(s))

        uncompressed = np.frombuffer(original[0:-4], dtype=dtype)
        self.assertEqual(uncompressed.shape[0], expected_point_count)

        point_data = np.frombuffer(original[:-4], dtype=dtype)
        empty = np.zeros(uncompressed_point_count, dtype=np.uint8)

        c = Compressor(s)

        compressed = c.compress(point_data)
        original_compressed = np.frombuffer(data[0:-4], dtype=np.uint8)

        self.assertEqual(len(original_compressed), len_compressed - 4)
        for i in range(len(compressed)):
            self.assertEqual(compressed[i], original_compressed[i])
Пример #10
0
def decompress(points, schema):
    """
    Decode patch encoded with lazperf.
    'points' is a pcpatch in wkb
    """

    # retrieve number of points in wkb pgpointcloud patch
    npoints = patch_numpoints(points)
    hexbuffer = unhexlify(points[34:])
    hexbuffer += hexa_signed_int32(npoints)

    # uncompress
    s = json.dumps(schema).replace("\\", "")
    dtype = buildNumpyDescription(json.loads(s))
    lazdata = bytes(hexbuffer)

    arr = np.fromstring(lazdata, dtype=np.uint8)
    d = Decompressor(arr, s)
    output = np.zeros(npoints * dtype.itemsize, dtype=np.uint8)
    decompressed = d.decompress(output)

    return decompressed