def test_decompression_is_same_as_uncompressed(): u_las = pylas.read(simple_las) c_las = pylas.read(simple_laz) u_point_buffer = bytes(u_las.points.memoryview()) c_points_buffer = bytes(c_las.points.memoryview()) assert u_point_buffer == c_points_buffer
def test_decompression_is_same_as_uncompressed(): u_las = pylas.read(simple_las) c_las = pylas.read(simple_laz) u_point_buffer = u_las.points_data.raw_bytes() c_points_buffer = c_las.points_data.raw_bytes() assert u_point_buffer == c_points_buffer
def test_extra_dimensions_names_property(): simple = pylas.read(simple_las) assert simple.points_data.extra_dimensions_names == () extra = pylas.read(extra_bytes_las) assert extra.points_data.extra_dimensions_names == ( "Colors", "Reserved", "Flags", "Intensity", "Time", )
def append_self_and_check(las_path_fixture): with open(las_path_fixture, mode="rb") as f: file = io.BytesIO(f.read()) las = pylas.read(las_path_fixture) with pylas.open(file, mode='a', closefd=False) as laz_file: laz_file.append_points(las.points) file.seek(0, io.SEEK_SET) rlas = pylas.read(file) assert rlas.header.point_count == 2 * las.header.point_count assert rlas.points[:rlas.header.point_count // 2] == las.points assert rlas.points[rlas.header.point_count // 2:] == las.points return rlas
def test_adding_extra_bytes_keeps_values_of_all_existing_fields( extra_bytes_params, simple_las_path ): """ Test that when extra bytes are added, the existing fields keep their values and then we don't somehow drop them """ las = pylas.read(simple_las_path) las.add_extra_dim(extra_bytes_params) original = pylas.read(simple_las_path) for name in original.point_format.dimension_names: assert np.allclose(las[name], original[name])
def test_chunked_writing_gives_expected_points(file_path, backend): """ Write in chunked mode then test that the points are correct """ original_las = pylas.read(file_path) iter_size = 51 do_compress = True if backend is not None else False with io.BytesIO() as tmp_output: with pylas.open(tmp_output, mode="w", closefd=False, header=original_las.header, do_compress=do_compress, laz_backend=backend) as las: for i in range(int(math.ceil(len(original_las.points) / iter_size))): original_points = original_las.points[i * iter_size:(i + 1) * iter_size] las.write_points(original_points) tmp_output.seek(0) with pylas.open(tmp_output, closefd=False) as reader: check_chunked_reading_is_gives_expected_points( original_las, reader, iter_size)
def test_creating_extra_byte_with_invalid_type(simple_las_path): """ Test the error message when creating extra bytes with invalid type """ las = pylas.read(simple_las_path) with pytest.raises(TypeError): las.add_extra_dim(pylas.ExtraBytesParams("just_a_test", "i16"))
def test_extra_dims_not_equal(): """ Test to confirm that two point format with same id but not same extra dimension are not equal """ las = pylas.read(extra_bytes_laz) i = las.points_data.point_format.id assert las.points_data.point_format != PointFormat(i)
def test_write_extra_dimensions_gps_time(data): data["new_stuff"] = (np.random.random(100) * 100).astype("u1") jaklas.write(data, TEMP_OUTPUT) f = pylas.read(str(TEMP_OUTPUT)) assert np.allclose(f.new_stuff, data["new_stuff"]) assert np.allclose(f.gps_time, data["gps_time"]) assert f.new_stuff.dtype == np.dtype("u1")
def read_laz(laz_path): las = pylas.read(laz_path) colors = np.vstack([las.points['red'], las.points['green'], las.points['blue']]).T / 65536 points = np.vstack([las.x, las.y, las.z]).T return points, colors
def test_point_count_stays_synchronized(): las = pylas.read(test_common.simple_las) assert las.header.point_count == len(las.points_data) las.points = las.points[:120] assert 120 == las.header.point_count assert las.header.point_count == len(las.points_data)
def test_mmap(mmapped_file_path): with pylas.mmap(mmapped_file_path) as las: las.classification[:] = 25 assert np.all(las.classification == 25) las = pylas.read(mmapped_file_path) assert np.all(las.classification == 25)
def test_packing_overflow(): las = pylas.read(simple_las) las.points_data = las.points_data.to_unpacked() las.classification[:] = 177 with pytest.raises(OverflowError): las.points_data = las.points_data.to_packed()
def test_scaled_extra_byte_array_type(simple_las_path): """ To make sure we handle scaled extra bytes """ las = pylas.read(simple_las_path) las.add_extra_dim( pylas.ExtraBytesParams( name="test_dim", type="3int32", scales=np.array([1.0, 2.0, 3.0], np.float64), offsets=np.array([10.0, 20.0, 30.0], np.float64), ) ) assert np.allclose(las.test_dim[..., 0], 10.0) assert np.allclose(las.test_dim[..., 1], 20.0) assert np.allclose(las.test_dim[..., 2], 30.0) las.test_dim[..., 0][:] = 42.0 las.test_dim[..., 1][:] = 82.0 las.test_dim[..., 2][:] = 123.0 assert np.allclose(las.test_dim[..., 0], 42.0) assert np.allclose(las.test_dim[..., 1], 82.0) assert np.allclose(las.test_dim[..., 2], 123.0) las = write_then_read_again(las) assert np.allclose(las.test_dim[..., 0], 42.0) assert np.allclose(las.test_dim[..., 1], 82.0) assert np.allclose(las.test_dim[..., 2], 123.0)
def test_set_uuid(): import uuid las = pylas.read(test_common.simple_las) u = uuid.uuid4() las.header.uuid = u las = test_common.write_then_read_again(las) assert las.header.uuid == u
def test_write_simple(data): jaklas.write(data, TEMP_OUTPUT) f = pylas.read(str(TEMP_OUTPUT)) assert np.allclose(f.x, data["x"], atol=0.0001) assert np.allclose(f.y, data["y"], atol=0.0001) assert np.allclose(f.z, data["z"], atol=0.0001) assert np.allclose(f.intensity, data["intensity"].astype("u2")) assert np.allclose(f.classification, data["classification"])
def test_sub_field_view_with_self(simple_las_path): las = pylas.read(simple_las_path) rn = np.array(las.return_number) order = np.argsort(las.return_number)[::-1] las.return_number[:] = las.return_number[order] assert np.all(las.return_number == rn[order])
def test_write_simple_laz(data): jaklas.write(data, TEMP_OUTPUT_LAZ) f = pylas.read(str(TEMP_OUTPUT_LAZ)) assert f.header.are_points_compressed assert np.allclose(f.x, data["x"], atol=0.0001) assert np.allclose(f.y, data["y"], atol=0.0001) assert np.allclose(f.z, data["z"], atol=0.0001) assert np.allclose(f.intensity, data["intensity"].astype("u2")) assert np.allclose(f.classification, data["classification"])
def test_write_offset(data): xyz_offset = (1, 2, 3) jaklas.write(data, TEMP_OUTPUT, xyz_offset=xyz_offset) f = pylas.read(str(TEMP_OUTPUT)) assert np.allclose(f.x, data["x"] + xyz_offset[0], atol=0.0001) assert np.allclose(f.y, data["y"] + xyz_offset[1], atol=0.0001) assert np.allclose(f.z, data["z"] + xyz_offset[2], atol=0.0001) assert np.allclose(f.intensity, data["intensity"].astype("u2")) assert np.allclose(f.classification, data["classification"])
def test_write_crs(): jaklas.write(point_data_gps_time, TEMP_OUTPUT, crs=2950) f = pylas.read(str(TEMP_OUTPUT)) # note: there is a bug in pylas (to be fixed) # where WktCoordinateSystemVlr is read as WktMathTransformVlr # wkt = f.vlrs.get("WktCoordinateSystemVlr")[0].string wkt = f.vlrs[0].string expected_wkt = pyproj.CRS.from_epsg(2950).to_wkt() assert expected_wkt == wkt[:-1] # null-terminated string in las file
def test_read_write_example_extra_bytes_file(las_file_path_with_extra_bytes): """ Test that we can write extra bytes without problem """ original = pylas.read(las_file_path_with_extra_bytes) las = write_then_read_again(original) for name in original.point_format.dimension_names: assert np.allclose(las[name], original[name])
def test_header_date(): las = pylas.read(test_common.extra_bytes_las) with io.BytesIO() as out: las.header.write_to(out) out.seek(0) header_2 = LasHeader.read_from(out) expected_date = date(year=2015, month=2, day=22) assert las.header.creation_date == expected_date assert las.header.creation_date == header_2.creation_date
def test_write_large_classifications(): data = point_data_large_classification jaklas.write(data, TEMP_OUTPUT) f = pylas.read(str(TEMP_OUTPUT)) assert f.point_format.id == 6 assert np.allclose(f.x, data["x"], atol=0.0001) assert np.allclose(f.y, data["y"], atol=0.0001) assert np.allclose(f.z, data["z"], atol=0.0001) assert np.allclose(f.intensity, data["intensity"].astype("u2")) assert np.allclose(f.classification, data["classification"])
def convert(input, output, point_format_id, file_version, force): """ Converts INPUT to a file with point_format_id and file_version Writes the result to OUTPUT If no file version or point_format_id is provided this will result in a copy. Examples: 1) Compress a file pylas convert stormwind.las stormwind.laz 2) Convert file to point format 3 pylas convert ironforge.las forgeiron.las --point-format-id 3 """ if (point_format_id is not None and point_format_id not in pylas.supported_point_formats()): click.echo( click.style( "Point format {} is not supported".format(point_format_id), fg="red")) raise click.Abort() if file_version is not None and file_version not in pylas.supported_versions( ): click.echo( click.style("LAS version {} is not supported".format(file_version), fg="red")) raise click.Abort() las = pylas.read(openbin_file(input)) if point_format_id is not None and not force: lost_dimensions = pylas.lost_dimensions( las.points_data.point_format.id, point_format_id) if lost_dimensions: click.echo("Converting will lose: {}".format(lost_dimensions)) click.confirm("Continue ?", abort=True) try: las = pylas.convert(las, point_format_id=point_format_id, file_version=file_version) except pylas.errors.PylasError as e: click.echo( click.style("{}: {}".format(e.__class__.__name__, e), fg="red")) raise click.Abort() except Exception as e: click.echo(click.style(str(e), fg="red")) raise click.Abort() else: las.write(openbin_file(output, mode='w'), do_compress=output.endswith('.laz'))
def test_header_copy(): import copy las = pylas.read(test_common.simple_las) header_copy = copy.copy(las.header) assert header_copy.point_format_id == las.header.point_format_id assert header_copy.version == las.header.version header_copy.point_format_id = 0 assert header_copy.point_format_id != las.header.point_format_id assert header_copy.version == las.header.version
def test_writing_las_with_evlrs(): las = pylas.read(test1_4_las) assert las.evlrs == [] evlr = pylas.VLR(user_id="pylastest", record_id=42, description="Just a test", record_data=b"And so he grinds his own hands") las.evlrs.append(evlr) las_1 = write_then_read_again(las, do_compress=False) assert las_1.evlrs == [evlr]
def test_write_large_coordinates(data): data = deepcopy(data) data["x"] += 320000 data["y"] += 5000000 data["z"] -= 20 jaklas.write(data, TEMP_OUTPUT) f = pylas.read(str(TEMP_OUTPUT)) assert np.allclose(f.x, data["x"], atol=0.0001) assert np.allclose(f.y, data["y"], atol=0.0001) assert np.allclose(f.z, data["z"], atol=0.0001) assert np.allclose(f.intensity, data["intensity"].astype("u2")) assert np.allclose(f.classification, data["classification"])
def test_write_xyz(data): data = deepcopy(data) data["xyz"] = np.vstack([data["x"], data["y"], data["z"]]).T del data["x"] del data["y"] del data["z"] jaklas.write(data, TEMP_OUTPUT) f = pylas.read(str(TEMP_OUTPUT)) assert np.allclose(f.x, data["xyz"][:, 0], atol=0.0001) assert np.allclose(f.y, data["xyz"][:, 1], atol=0.0001) assert np.allclose(f.z, data["xyz"][:, 2], atol=0.0001) assert np.allclose(f.intensity, data["intensity"].astype("u2")) assert np.allclose(f.classification, data["classification"])
def test_write_large_coordinates_xyz_offset(data): data = deepcopy(data) xyz_offset = (3e5, 5e6, 100) data["x"] = data["x"].astype("f") data["y"] = data["y"].astype("f") data["z"] = data["z"].astype("f") jaklas.write(data, TEMP_OUTPUT, xyz_offset=xyz_offset) f = pylas.read(str(TEMP_OUTPUT)) assert np.allclose(f.x, data["x"].astype("d") + xyz_offset[0], atol=0.0001) assert np.allclose(f.y, data["y"].astype("d") + xyz_offset[1], atol=0.0001) assert np.allclose(f.z, data["z"].astype("d") + xyz_offset[2], atol=0.0001) assert np.allclose(f.intensity, data["intensity"].astype("u2")) assert np.allclose(f.classification, data["classification"])
def test_read_example_extra_bytes_las(las_file_path_with_extra_bytes): """ Test that we can read the files with extra bytes with have as examples """ las = pylas.read(las_file_path_with_extra_bytes) expected_names = [ "Colors", "Reserved", "Flags", "Intensity", "Time", ] assert expected_names == list(las.point_format.extra_dimension_names)