def cgal_simplify(incld, outcld=None, method='grid', k=None): """ Simplify a pointcloud via cgal methods Parameters ---------- incld: string input cloud outcld: string output cloud, if none, input will be overwritten method: string a choice of 'grid', 'hierarch', 'wlop' k: int k neighbours for spacing, if none it is estimated from data """ points = Point_set_3(incld) if k == None: k = estimate_global_k_neighbor_scale(points) print("K-neighbor scale is", k) avg_space = compute_average_spacing(points, k) print("Average point spacing is", avg_space) print('Using ', method) if method == 'grid': grid_simplify_point_set(points, avg_space) elif method == 'hierarch': hierarchy_simplify_point_set(points) elif method == 'wlop': wlop = Point_set_3() #TODO seem to lose rgb values - perhaps need to get value from nearest # point?? wlop_simplify_and_regularize_point_set(points, # input wlop) # Output wlop.write(outcld) if method == 'grid' or method == 'hierarch': print(points.size(), "point(s) remaining,", points.garbage_size(), "point(s) removed") points.collect_garbage() points.write(outcld)
def cgal_outlier(incld, outcld=None, k=24, distance=0.1, percentage=100, recursive=False): """ Point cloud outlier removal via cgal Parameters ---------- incld: string input cloud outcld: string output cloud, if none, input will be overwritten k: int k-neighbours distance: int min distance to outliers percentage: int max percentage of points to remove """ points = Point_set_3(incld) if recursive == True: nopoints = 1 while nopoints > 0: remove_outliers(points, k, neighbor_radius=0.0, threshold_distance=distance, threshold_percent=percentage) nopoints = points.garbage_size() points.collect_garbage() else: remove_outliers(points, k, neighbor_radius=0.0, threshold_distance=distance) print(points.size(), "point(s) remaining,", points.garbage_size(), "point(s) removed") # bin it points.collect_garbage() if outcld == None: outcld = incld points.write(outcld)
def cgal_normals(incld, outcld=None, k=24, method='jet'): """ Normal estimation via cgal Parameters ---------- incld: string input cloud outcld: string output cloud, if none, input will be overwritten k: int k-neighbours method: method of estimating normals one of: jet, mst or pca """ points = Point_set_3(incld) points.add_normal_map() if method == 'jet': print("Running jet_estimate_normals...") jet_estimate_normals(points, k) elif method == 'mst': print("Running mst_orient_normals...") mst_orient_normals(points, k) elif method == 'pca': print("Running pca_estimate_normals...") pca_estimate_normals(points, k) if outcld == None: outcld = incld points.write(outcld)
def cgal_edge_upsample(incld, outcld=None, nopoints=1000, sharpness=30.0, edge=1.0, n_radius=-1.0): """ Edge aware upsampling Parameters ---------- incld: string input cloud outcld: string output cloud, if none, input will be overwritten nopoints: int no of points retained sharpness: float sharpness angle edge: float edge sensitivity n_radius: flaat neighbor radiu """ points = Point_set_3(incld) edge_aware_upsample_point_set(points, number_of_output_points=nopoints, sharpness_angle=sharpness, edge_sensitivity=edge, neighbor_radius=n_radius) if outcld == None: outcld = incld points.write(outcld)
from __future__ import print_function from CGAL.CGAL_Kernel import Point_3 from CGAL.CGAL_Kernel import Vector_3 from CGAL.CGAL_Point_set_3 import Point_set_3 from CGAL.CGAL_Classification import * import sys import os datadir = os.environ.get('DATADIR', '../data') datafile = datadir+'/b9_training.ply' print("Reading input...") points = Point_set_3(datafile) print(points.size(), "points read") labels = Label_set() ground = labels.add("ground") vegetation = labels.add("vegetation") building = labels.add("building") print("Computing feature...") features = Feature_set() generator = Point_set_feature_generator(points, 5) features.begin_parallel_additions() generator.generate_point_based_features(features) if points.has_normal_map(): generator.generate_normal_based_features(features, points.normal_map()) if points.has_int_map("red") and points.has_int_map("green") and points.has_int_map("blue"):
from __future__ import print_function from CGAL.CGAL_Kernel import Point_3 from CGAL.CGAL_Kernel import Vector_3 from CGAL.CGAL_Point_set_3 import Point_set_3 from CGAL.CGAL_Point_set_processing_3 import * import os datadir = os.environ.get('DATADIR', '../data') datafile = datadir + '/oni.xyz' print("Running read_xyz_points...") points = Point_set_3(datafile) print(points.size(), "points read") k = estimate_global_k_neighbor_scale(points) print("K-neighbor scale is", k) avg_space = compute_average_spacing(points, k) print("Average point spacing is", avg_space) print("Running bilateral_smooth_point_set...") bilateral_smooth_point_set(points, 3 * k) scale = estimate_global_range_scale(points) print("Range scale is", scale) print("Running jet_smooth_point_set...") jet_smooth_point_set(points, 3 * k, neighbor_radius=scale * 2) print("Running edge_aware_upsample_point_set...") edge_aware_upsample_point_set(points, number_of_output_points=2000)
from __future__ import print_function from CGAL.CGAL_Kernel import Point_3 from CGAL.CGAL_Kernel import Vector_3 from CGAL.CGAL_Point_set_3 import Point_set_3 from CGAL.CGAL_Shape_detection import * import os datadir = os.environ.get('DATADIR', '../data') datafile = datadir+'/cube.pwn' points = Point_set_3(datafile) print(points.size(), "points read") print("Detecting planes with region growing (sphere query)") plane_map = points.add_int_map("plane_index") nb_planes = region_growing (points, plane_map, min_points = 20) print(nb_planes, "planes(s) detected") print("Detecting planes with region growing (k-neighbor query)") nb_planes = region_growing (points, plane_map, min_points = 20, k = 12) print(nb_planes, "planes(s) detected") print("Detecting planes with efficient RANSAC") planes = efficient_RANSAC (points, plane_map) print(len(planes), "planes(s) detected, first 10 planes are:") for s in range(min(len(planes),10)): print(" *", s, ":", planes[s]) print("Detecting cylinders with efficient RANSAC") cylinder_map = points.add_int_map("cylinder_index") cylinders = efficient_RANSAC (points, cylinder_map, planes = False, cylinders = True)
def cgal_features(incld, outcld=None, k=5, rgb=True, parallel=True): """ Calculate CGAL-based point cloud features and write to file. Files will be hefty! Parameters ----------- incld: string the input point cloud outcld: string the output point cloud if None then write to incld k: int he no of scales at which to calculate features rgb: bool whether to include RGB-based features parallel: bool if true, process multi thread tofile: bool whether to write the attributes to file or return in memory """ print("Reading pointcloud") points = Point_set_3(incld) print("Computing features") features = Feature_set() generator = Point_set_feature_generator(points, k) if parallel is True: features.begin_parallel_additions() generator.generate_point_based_features(features) if points.has_normal_map(): generator.generate_normal_based_features(features, points.normal_map()) if rgb is True: if points.has_int_map("red") and points.has_int_map( "green") and points.has_int_map("blue"): generator.generate_color_based_features(features, points.int_map("red"), points.int_map("green"), points.int_map("blue")) if parallel is True: features.end_parallel_additions() print("Features calculated") names = _get_featnames(features) if rgb is True: rgbList = [points.add_float_map(n) for n in names[-3:]] for ftr, r in enumerate(tqdm(rgbList)): ftr += 50 # not ideal # list comp is no quicker... # _ = [r.set(p, features.get(ftr).value(i)) for i, # p in enumerate(points.indices())] for i, p in enumerate(points.indices()): r.set(p, features.get(ftr).value(i)) # scrub the hsv del names[-3:] # to go in the loop below attribList = [points.add_float_map(n) for n in names] # This is of course unacceptably slow. # TODO C++ function and wrap for ft, a in enumerate(tqdm(attribList)): # is list comp is quicker - NOT REALLY # ~11-12 minutes for 6.4million points w/ lcomp #_ = [a.set(p, features.get(ft).value(i)) for i, p in enumerate(points.indices())] # ~10 minutes for 6.4 million points with standard loop for i, p in enumerate(points.indices()): a.set(p, features.get(ft).value(i)) if outcld == None: outcld = incld points.write(outcld)
def cgal_features_mem(incld, k=5, rgb=True, parallel=True): """ Calculate CGAL-based point cloud features and return as a pandas df. Saves on disk space but many of these will take longer. Parameters ----------- incld: string the input point cloud k: int he no of scales at which to calculate features rgb: bool whether to include RGB-based features parallel: bool if true, process multi thread tofile: bool whether to write the attributes to file or return in memory """ print("Reading pointcloud") points = Point_set_3(incld) print("Computing features") features = Feature_set() generator = Point_set_feature_generator(points, k) #TODO Not convinced this is actually running more than 1 thread if parallel is True: features.begin_parallel_additions() generator.generate_point_based_features(features) if points.has_normal_map(): generator.generate_normal_based_features(features, points.normal_map()) if rgb is True: if points.has_int_map("red") and points.has_int_map( "green") and points.has_int_map("blue"): generator.generate_color_based_features(features, points.int_map("red"), points.int_map("green"), points.int_map("blue")) if parallel is True: features.end_parallel_additions() print("Features calculated") names = _get_featnames(features) # could return this or the feat names.... featarray = np.zeros(shape=(points.size(), len(names))) # for ref in case ressurrected #df = pd.DataFrame(columns=[names]) # if we are pulling from a shared mem object can we do in para # oh yeah cant pickle a swig object.....then inserting a list into a df is # much slower #cnt = np.arange(0, points.size()) #bigList = Parallel(n_jobs=nt, verbose=2)( #delayed(_cgalfeat)(cnt, features, n, idx) for idx, n in enumerate(names)) # ~ 8 minutes for 6.4 million points for ftr, r in enumerate(tqdm(names)): # loops in loops, this is not ideal - # TODO need c++ func to output np array featarray[:, ftr] = [ features.get(ftr).value(i) for i, p in enumerate(points.indices()) ] df = pd.DataFrame(data=featarray, columns=[names]) return df
from CGAL.CGAL_Kernel import Point_3 from CGAL.CGAL_Kernel import Vector_3 from CGAL.CGAL_Point_set_3 import Point_set_3 import os datadir = os.environ.get('DATADIR', '../data') datafile = datadir + '/oni.xyz' points = Point_set_3() # Insertions idx = points.insert() print("Point", idx, "inserted =", points.point(idx)) idx = points.insert(Point_3(0, 1, 2)) print("Point", idx, "inserted =", points.point(idx)) points.insert_range([2., 4., 5., 2, 3, 4]) # Iterate and display points print("Point set:") for p in points.points(): print(" *", p) # With normal points.add_normal_map() idx = points.insert(Point_3(6, 7, 8), Vector_3(1, 1, 1)) print("Point", idx, "inserted = (", points.point(idx), "), (", points.normal(idx), ")") # Access/modification through normal map normal_map = points.normal_map() if normal_map.is_valid:
from __future__ import print_function from sys import exit from sys import stderr from CGAL.CGAL_Kernel import Point_3 from CGAL.CGAL_Polyhedron_3 import Polyhedron_3 from CGAL.CGAL_Point_set_3 import Point_set_3 from CGAL.CGAL_Advancing_front_surface_reconstruction import * points = Point_set_3("../data/oni.xyz") if points.empty(): stderr.write("Error: cannot read file ../data/oni.xyz\n") exit() P = Polyhedron_3() advancing_front_surface_reconstruction(points, P); P.write_to_file("oni.off");