def lasOpen(lasDir, h5file): # Open las db lasdb = open(lasDir + "/lasDB.json", "r") db = json.load(lasdb) lasdb.close() # Get list of las files lasfiles = db[h5file.split("/")[-1]].keys() lasfiles = list(lasfiles) if len(lasfiles) == 0: log.warning("%s has no associated LAS files", os.path.basename(h5file)) return ([], [], [], []) lshort = [] for file in lasfiles: lshort.append(os.path.basename(file)) log.info("%s matched to %s", os.path.basename(h5file), lshort) # Open and merge las files into one array f = lasf.File(lasDir + "/" + lasfiles[0], mode="r") xl = f.x yl = f.y zl = f.z f.close() if len(lasfiles) > 1: for i in range(1, len(lasfiles)): f = lasf.File(lasDir + "/" + lasfiles[i], mode="r") xl = np.append(xl, f.x) yl = np.append(yl, f.y) zl = np.append(zl, f.z) f.close() # Check that same coordinate system assumption is valid crs = [None] * len(lasfiles) for i in range(len(lasfiles)): # info = subprocess.run(['lasinfo', '--xml', sys.argv[1] + "/" + lasfiles[i]], stdout=subprocess.PIPE) # going to try with pdal instead info = subprocess.run( ["pdal", "info", lasDir + "/" + lasfiles[i], "--metadata"], stdout=subprocess.PIPE, ) # decode stdout from bytestring and convert to a dictionary json_result = json.loads(info.stdout.decode()) # # Parse xml # root = et.fromstring(info.stdout) # Coordinate reference system # crs[i] = root.find("header").find("srs").find("wkt").text crs[i] = json_result["metadata"]["srs"]["wkt"] # Set only contains unique if len(set(crs)) > 1: log.warning("%s not all coordinate systems the same %s", os.path.basename(h5file), lshort) return 1 return (xl, yl, zl, crs[0])
def cloudDifference(firstfilename, secondfilename, outputfname=None): firstFile = file.File(firstfilename, mode="r") secondFile = file.File(secondfilename, mode="r") outputfname = outputfname if not ( outputfname == None) else "Diff_{0}_{1}".format( os.path.splitext(os.path.basename(firstfilename))[0], os.path.basename(secondfilename)) points = np.setdiff1d(firstFile.points, secondFile.points) saveCloud(outputfname, firstFile.header, points)
def toFloor(inputfname, outputfname=None, floor=0.0): inFile = file.File(inputfname, mode="r") outputfname = outputfname if not ( outputfname == None) else "toFloor_" + os.path.basename(inputfname) outFile = file.File(outputfname, mode="w", header=inFile.header) outFile.points = inFile.points z = outFile.z z.fill(floor) outFile.z = z inFile.close() outFile.close()
def projectY(inputfname, outputfname=None): inFile = file.File(inputfname, mode="r") outputfname = outputfname if not ( outputfname == None) else "projectY_" + os.path.basename(inputfname) outFile = file.File(outputfname, mode="w", header=inFile.header) outFile.points = inFile.points y = outFile.y y.fill(outFile.header.min[1]) outFile.y = y outFile.close() inFile.close()
def test_x(self): """Fetch and test X, Y, Z dimensions""" compressed = File.File(self.simple_z, mode="r") uncompressed = File.File(self.simple_s, mode="r") X_z = list(compressed.X) Y_z = list(compressed.Y) Z_z = list(compressed.Z) X_s = list(uncompressed.X) Y_s = list(uncompressed.Y) Z_s = list(uncompressed.Z) self.assertTrue((list(X_z) == list(X_s))) self.assertTrue((list(Y_z) == list(Y_s))) self.assertTrue((list(Z_z) == list(Z_s)))
def test_evlr(self): """ Testing v1.4 EVLR support""" File2 = File.File(self.output_tempfile, mode = "w", header = self.File1.header) self.assertEqual(self.File1.header.evlrs[0].to_byte_string(), File2.header.evlrs[0].to_byte_string()) File2.points = self.File1.points outevlrs = [] [outevlrs.append(File2.header.evlrs[0]) for i in xrange(50)] File2.header.evlrs = outevlrs File2.close() File2 = File.File(self.output_tempfile, mode = "r") self.assertEqual(len(File2.header.evlrs), 50) File2.close(ignore_header_changes = True)
def test_vlr_parsing_api(self): """Testing VLR body parsing api""" shutil.copyfile(self.simple14, self.tempfile2) VLRFile = File.File(self.tempfile2, mode="rw") vlr0 = VLRFile.header.vlrs[0] vlr0.parsed_body[-1] += 1 pb = vlr0.parsed_body vlr0.pack_data() VLRFile.header.save_vlrs() VLRFile.close() VLRFile = File.File(self.tempfile2, mode="rw") self.assertTrue(all(pb == VLRFile.header.vlrs[0].parsed_body)) VLRFile.close()
def test_vlr_parsing_api(self): """Testing VLR body parsing api""" really_copyfile(self.simple14, self.tempfile2) VLRFile = File.File(self.tempfile2, mode="rw") vlr0 = VLRFile.header.vlrs[0] pb = vlr0.parsed_body vlr0.parsed_body = pb[:-1] + (pb[-1] + 1, ) pb = vlr0.parsed_body vlr0.pack_data() VLRFile.header.save_vlrs() VLRFile.close() VLRFile = File.File(self.tempfile2, mode="rw") self.assertTrue(pb == VLRFile.header.vlrs[0].parsed_body) VLRFile.close()
def test_vlr_defined_dimensions2(self): """Testing VLR defined dimension API""" File2 = File.File(self.output_tempfile, mode = "w", header = self.FileObject.header) File2.define_new_dimension("test_dimension", 5, "This is a test.") File2.X = self.FileObject.X self.assertTrue(File2.test_dimension[500] == 0) File2.close(ignore_header_changes = True)
def on_click(): f = file.File(inFile1, mode='r') ptcloud = np.vstack((f.x, f.y, f.z)).transpose() f.close() # Centred the data ptcloud_centred = ptcloud - np.mean(ptcloud, 0) # Simulate an intensity information between 0 and 1 ptcloud_centred = sc.append(ptcloud_centred, np.zeros((ptcloud.shape[0], 1)), axis=1) # Ajout d'une ligne (axis=0) for i in range(ptcloud_centred.shape[0] - 1): ptcloud_centred[i, 3] = random.random() p = pcl.PointCloud_PointXYZI() p.from_array(np.array(ptcloud_centred, dtype=np.float32)) visual = pcl_visualization.CloudViewing() visual.ShowGrayCloud(p) def check_was_stopped(): visual.WasStopped() root.after(100, check_was_stopped) check_was_stopped()
def toFloor(inputfname,outputfname="",floor=0,verbose=True): inFile = file.File(inputfname, mode = "rw") z=inFile.z z.fill(floor); inFile.z=z outputfname = outputfname if not(outputfname == None) else "toFloor_"+os.path.basename(inputfname) saveCloud(outputfname,inFile.header,inFile.points)
def main(): import pcl # laspy librairy, read las file f = file.File('28XXX10000075-18.las', mode='r') # Store pointcloud in array ptcloud = np.vstack((f.x, f.y, f.z)).transpose() f.close() # cloud = pcl.load('./examples/pcldata/tutorials/table_scene_mug_stereo_textured.pcd') # ptcloud = cloud.to_list() # Centred the data ptcloud_centred = ptcloud - np.mean(ptcloud, 0) # Simulate an intensity information between 0 and 1 ptcloud_centred = sc.append(ptcloud_centred, np.zeros((ptcloud.shape[0], 1)), axis=1) for i in range(ptcloud_centred.shape[0] - 1): ptcloud_centred[i, 3] = random.random() p = pcl.PointCloud_PointXYZI() p.from_array(np.array(ptcloud_centred, dtype=np.float32)) # Visualization visual = pcl_visualization.CloudViewing() visual.ShowGrayCloud(p, b'cloud') v = True while v: v = not (visual.WasStopped())
def test_classification_variables(self): """Testing v1.4 classification support""" classification = self.File1.classification classification_flags = self.File1.classification_flags scanner_channel = self.File1.scanner_channel scan_dir_flag = self.File1.scan_dir_flag edge_flight_line = self.File1.edge_flight_line return_num = self.File1.return_num num_returns = self.File1.num_returns File2 = File.File(self.output_tempfile, mode="w", header=self.File1.header) File2.classification = classification File2.classification_flags = classification_flags File2.scan_dir_flag = scan_dir_flag File2.scanner_channel = scanner_channel File2.edge_flight_line = edge_flight_line File2.return_num = return_num File2.num_returns = num_returns self.assertTrue(all(num_returns == File2.get_num_returns())) self.assertTrue(all(return_num == File2.get_return_num())) self.assertTrue(all(edge_flight_line == File2.get_edge_flight_line())) self.assertTrue(all(scan_dir_flag == File2.get_scan_dir_flag())) self.assertTrue( all(classification_flags == File2.get_classification_flags())) self.assertTrue(all(classification == File2.get_classification())) self.assertTrue(all(scanner_channel == File2.get_scanner_channel())) File2.close(ignore_header_changes=True)
def displayInfo(inputfname): inFile = file.File(inputfname, mode="r") displayHeader(inFile.header) print("\r\nPoint format:") for spec in inFile.point_format: print(spec.name) inFile.close()
def main(): # RGB : NG # f = file.File('28XXX10000075-18.las', mode='r') f = file.File('28W0608011101-1.las', mode='r') # f = file.File('28XXX00020001-1.las', mode='r') # f = file.File('simple1_4.las', mode='r') # check las file version # RGB contains if f._header.data_format_id in (2, 3, 5): red = (f.red) green = (f.green) blue = (f.blue) # 16bit to convert 8bit data(data Storage First 8 bits case) red = np.right_shift(red, 8).astype(np.uint8) green = np.right_shift(green, 8).astype(np.uint8) blue = np.right_shift(blue, 8).astype(np.uint8) # (data Storage After 8 bits case) # red = red.astype(np.uint8) # green = green.astype(np.uint8) # blue = blue.astype(np.uint8) red = red.astype(np.uint32) green = green.astype(np.uint32) blue = blue.astype(np.uint32) rgb = np.left_shift(red, 16) + np.left_shift(green, 8) + np.left_shift( blue, 0) ptcloud = np.vstack((f.x, f.y, f.z, rgb)).transpose() cloud = pcl.PointCloud_PointXYZRGBA() # set raw points # cloud.from_array(np.array(ptcloud, dtype=np.float32)) # set point centered mean_param = np.concatenate([np.mean(ptcloud, 0)[0:3], np.zeros(1)]) ptcloud_centred = ptcloud - mean_param # print(ptcloud_centred) cloud.from_array(np.array(ptcloud_centred, dtype=np.float32)) # Visualization visual = pcl.pcl_visualization.CloudViewing() visual.ShowColorACloud(cloud, b'cloud') else: ptcloud = np.vstack((f.x, f.y, f.z)).transpose() mean_param = np.mean(ptcloud, 0) cloud = pcl.PointCloud() # set raw points # cloud.from_array(np.array(ptcloud, dtype=np.float32)) # set point centered # mean_param = np.concatenate([np.mean(ptcloud, 0)[0:3], np.zeros(1)]) ptcloud_centred = ptcloud - mean_param # print(ptcloud_centred) cloud.from_array(np.array(ptcloud_centred, dtype=np.float32)) # Visualization visual = pcl.pcl_visualization.CloudViewing() visual.ShowMonochromeCloud(cloud, b'cloud') v = True while v: v = not (visual.WasStopped())
def test_evlr(self): """Testing v1.3 EVLR support.""" File2 = File.File(self.output_tempfile, mode="w", header=self.File1.header) self.assertEqual(self.File1.header.evlrs[0].to_byte_string(), File2.header.evlrs[0].to_byte_string()) File2.close(ignore_header_changes=True)
def test_vlr_defined_dimensions3(self): """Testing VLR defined dimensions (HL API)""" File2 = File.File(self.output_tempfile, mode = "w", header = self.File1.header) File2.define_new_dimension("Test_Dimension", 5, "This is a test.") File2.X = self.File1.X self.assertEqual(File2.Test_Dimension[500], 0) File2.close(ignore_header_changes = True)
def f(): sys.path.append("../") from laspy import file as File inFile = File.File(sys.argv[1], mode="r") outFile = File.File(sys.argv[2], mode="w", header=inFile.header) cls = [int(x) for x in sys.argv[3].split(",")] #outFile.writer.set_padding(outFile.header.data_offset) vf = np.vectorize(lambda x: x in cls) print("Writing") outData = inFile.reader.get_points()[vf(inFile.raw_classification)] outFile.writer.set_points(outData) #outFile.writer.data_provider._mmap.write(inFile.reader.get_raw_point(i)) print("Closing") inFile.close() outFile.close(ignore_header_changes=True)
def splitCells(inputfname, cellsize=50, verbose=False): start = time.time() inFile = file.File(inputfname, mode = "r") accepted_logic = [] # List to save the true/false for each looping maxStep = [] # List to save the number of cells in X and Y dimension of original data warningMsg = [] xmin = inFile.x.min() xmax = inFile.x.max() ymin = inFile.y.min() ymax = inFile.y.max() maxStep.append(math.ceil((xmax - xmin) / cellsize)) maxStep.append(math.ceil((ymax - ymin) / cellsize)) if verbose: print("The original cloud was divided in {0} by {1} cells.".format(maxStep[0],maxStep[1])) # In[44]: n = 0 for stepX in range(int(maxStep[0])): # Looping over the lines for stepY in range(int(maxStep[1])): # Looping over the columns # Step 1 - Filter data from the analized cell # Return True or False for return inside the selected cell X_valid = np.logical_and((xmin + (stepX * cellsize) <= inFile.x), (xmin + ((stepX + 1) * cellsize) > inFile.x)) Y_valid = np.logical_and((ymin + (stepY * cellsize) <= inFile.y), (ymin + ((stepY + 1) * cellsize) > inFile.y)) logicXY = np.logical_and(X_valid, Y_valid) validXY = np.where(logicXY) # show progress before 'continue' n += 1 if(verbose): percent = n/(maxStep[0] * maxStep[1]) hashes = '#' * int(round(percent * 20)) spaces = ' ' * (20 - len(hashes)) print("\r[{0}] {1:.2f}%".format(hashes + spaces, percent * 100)), if(len(validXY[0]) == 0): accepted_logic.append(False) if(verbose): warningMsg.append("Cell {0},{1} has no points, corresponding file was not created.".format(stepX,stepY)) continue fnametile = "{0}_{1}_{2}".format(stepX,stepY,os.path.basename(inputfname)) saveCloud(fnametile,inFile.header,inFile.points[logicXY]) # In[48]: if(verbose): print if (len(warningMsg) > 0): print print("Warnings:") print("{0}".format("\r\n".join(str(i) for i in warningMsg))) print print("Done in {0}s.".format(int(time.time()-start)))
def test_z_t(self): """Testing z_t""" test1 = self.File1.z_t File2 = File.File(self.output_tempfile, mode = "w", header = self.File1.header) File2.points = self.File1.points self.assertTrue(all(test1 == File2.z_t)) File2.z_t += 1 self.assertTrue(all(test1 != File2.z_t)) File2.close(ignore_header_changes = True)
def test_wave_packet_desc_index(self): """Testing wave_packet_desc_index.""" test1 = self.File1.wave_packet_desc_index File2 = File.File(self.output_tempfile, mode = "w", header = self.File1.header) File2.points = self.File1.points self.assertTrue(all(test1 == File2.wave_packet_desc_index)) File2.wave_packet_desc_index += 1 self.assertTrue(all(test1 != File2.wave_packet_desc_index)) File2.close(ignore_header_changes = True)
def test_byte_offset_to_waveform_data(self): """Testing byte_offset_to_waveform_data""" test1 = self.File1.byte_offset_to_waveform_data File2 = File.File(self.output_tempfile, mode = "w", header = self.File1.header) File2.points = self.File1.points self.assertTrue(all(test1 == File2.byte_offset_to_waveform_data)) File2.byte_offset_to_waveform_data += 1 self.assertTrue(all(test1 != File2.byte_offset_to_waveform_data)) File2.close(ignore_header_changes = True)
def test_waveform_packet_size(self): """Testing waveform_packet_size""" test1 = self.File1.waveform_packet_size File2 = File.File(self.output_tempfile, mode = "w", header = self.File1.header) File2.points = self.File1.points self.assertTrue(all(test1 == File2.waveform_packet_size)) File2.waveform_packet_size += 1 self.assertTrue(all(test1 != File2.waveform_packet_size)) File2.close(ignore_header_changes = True)
def test_return_point_waveform_loc(self): """Testing return_point_waveform_loc""" test1 = self.File1.return_point_waveform_loc File2 = File.File(self.output_tempfile, mode = "w", header = self.File1.header) File2.points = self.File1.points self.assertTrue(all(test1 == File2.return_point_waveform_loc)) File2.return_point_waveform_loc += 1 self.assertTrue(all(test1 != File2.return_point_waveform_loc)) File2.close(ignore_header_changes = True)
def test_vlr_defined_dimension_dtypes(self): """Testing v1.4 VLR defined dimension data types (LL API)""" from laspy.util import edim_fmt_dict, fmtLen, LEfmt,defaults,defaults_test for dfList in [defaults, defaults_test]: for i in range(1,31): print("...data format: "+ str(i)) # Create a new header new_header = self.File1.header.copy() # Create new dimension dimname = "test_dimension_" + str(i) new_dimension = header.ExtraBytesStruct( name = dimname, data_type = i) # Collect bytes for new dimension specification new_dim_raw = new_dimension.to_byte_string() # Create a VLR defining our new dim new_VLR_rec = header.VLR(user_id = "LASF_Spec", record_id = 4, description = "Testing Extra Bytes.", VLR_body = (new_dim_raw)) # Figure out how much we need to pad the point records new_dim_fmt = edim_fmt_dict[i] new_dim_num = new_dim_fmt[1] new_dim_bytelen = fmtLen[LEfmt[new_dim_fmt[0]]] new_total_bytes = new_dim_bytelen*new_dim_num new_header.data_record_length += (new_total_bytes) File2 = File.File(self.output_tempfile, mode = "w", header = new_header, vlrs = [new_VLR_rec], evlrs = self.File1.header.evlrs) File2.X = self.File1.X dim_default = dfList[LEfmt[edim_fmt_dict[i][0]]] is_str = type(dim_default) == type(" ") if new_dim_num == 1: new_dim_val = [dim_default]*len(self.File1) elif is_str: new_dim_val = [dim_default*new_dim_num]*len(self.File1) else: new_dim_val = [[dim_default]*new_dim_num]*len(self.File1) File2._writer.set_dimension(dimname, new_dim_val) current_dim_val = File2._writer.get_dimension(dimname) if (is_str): self.assertEqual([x.encode() for x in new_dim_val], current_dim_val.tolist(), msg = "Problem with data format " + str(i)) else: self.assertEqual(current_dim_val.tolist(), new_dim_val, msg = "Problem with data format " + str(i)) File2.close(ignore_header_changes = True)
def parse_las(filename, nbits): """Read las format point data and return header and points.""" pointfile = lasfile.File(filename, mode='r') header = pointfile.header maxheight = header.max[2] points = np.array( (pointfile.x, pointfile.y, pointfile.z)).transpose() # get all points, change matrix orientation pointsdata = np.zeros((len(pointfile), 7), dtype=np.int) if nbits > 0: # if want to set other data, find in matrices try: red = pointfile.red except LaspyException: red = [0] * len(points) try: green = pointfile.green except LaspyException: green = [0] * len(points) try: blue = pointfile.blue except LaspyException: blue = [0] * len(points) coldata = np.int64(np.array([red, green, blue]).transpose() / 256) scaleddata = np.array([ pointfile.get_z(), pointfile.get_num_returns(), pointfile.intensity, pointfile.raw_classification ], dtype='int64').transpose() min = np.array([0, 1, 0, 0]) max = np.array([maxheight, 7, 1000, 31]) normdata = np.int64(bu.normalize_np(scaleddata, min, max) * 255) coldata[(coldata[:, 0] == 0) & (coldata[:, 1] == 0) & (coldata[:, 2] == 0)] = 200 # if all three colours are 0, set to 200 pointsdata = np.concatenate([coldata, normdata], axis=1) if len(points) == 0: return [], [], None bbox = [points.min(axis=0).tolist(), points.max(axis=0).tolist()] if nbits: return bbox, points, pointsdata else: return bbox, points, None
def topBottom(command,inputfname,outputfname="",percent=5.0,verbose=True): inFile = file.File(inputfname, mode = "rw") r=(inFile.z.max() - inFile.z.min()) * percent / 100.0 + inFile.z.min() if(command == 'b'): Z_valid = np.logical_and(inFile.z <= r,True) strFunc='bottom_' else: # top r=inFile.z.max()-r Z_valid = np.logical_and(inFile.z >= r,True) strFunc='top_' validZ = np.where(Z_valid) outputfname = outputfname if not(outputfname == None) else strFunc+os.path.basename(inputfname) saveCloud(outputfname,inFile.header,inFile.points[validZ])
def test_using_existing_header(self): """Test file creation using an existing header""" header_object = self.File1.header File2 = File.File(self.output_tempfile, mode="w", header=header_object) X = list(self.File1.X) Y = list(self.File1.Y) Z = list(self.File1.Z) File2.Z = Z File2.Y = Y File2.X = X self.assertTrue((list(X) == list(File2.get_x()))) self.assertTrue((list(Y) == list(File2.get_y()))) self.assertTrue((list(Z) == list(File2.get_z()))) File2.close(ignore_header_changes=True)
def test_glob_encode(self): """Testing v1.4 Global Encoding""" old = self.File1.header.gps_time_type self.assertEqual(old, '0') self.File1.header.gps_time_type = '1' self.assertEqual(self.File1.header.get_gps_time_type(), '1') File2 = File.File(self.output_tempfile, mode = "w", header = self.File1.header) self.assertEqual(self.File1.header.waveform_data_packets_internal, File2.header.waveform_data_packets_internal) self.assertEqual(self.File1.header.waveform_data_packets_external, File2.header.waveform_data_packets_external) self.assertEqual(self.File1.header.synthetic_return_num, File2.header.synthetic_return_num) File2.close(ignore_header_changes = True)
def test_format_change_and_extra_bytes(self): """Testing format change and extra_bytes""" File1 = self.File1 new_header = File1.header.copy() new_header.format = 1.2 new_header.data_format_id = 0 new_header.data_record_length = 50 File2 = File.File(self.output_tempfile, mode="w", header=new_header) for dim in File1.point_format: in_dim = File1.reader.get_dimension(dim.name) if dim.name in File2.point_format.lookup: File2.writer.set_dimension(dim.name, in_dim) File2.extra_bytes = [b"Test"] * len(File2) self.assertTrue(b"Test" in bytes(File2.get_extra_bytes()[14])) File2.close(ignore_header_changes=True)