def show_points_and_skeleton(point_file, skeleton_file): ps.init() point_xyz = [] with open(point_file) as f_in: for line in f_in: xyz_list = line.split(' ') point_xyz.append(np.array([float(xyz_list[1]), float(xyz_list[2]), float(xyz_list[3])])) points = np.zeros((len(point_xyz), 3)) for i in range(len(point_xyz)): points[i] = point_xyz[i] ps.register_point_cloud("points", points) ps_points = ps.get_point_cloud("points") ps_points.set_radius(0.0015) vertex_pos = [] line_indices = [] with open(skeleton_file) as f_in: for line in f_in: temp = line.split(' ') if temp[0] == 'v': vertex_pos.append(np.array([float(temp[1]), float(temp[2]), float(temp[3])])) # if temp[0] == 'l': # line_indices.append(np.array([int(temp[1]) - 1, int(temp[2]) - 1])) points = np.array(vertex_pos) # edges = np.array(line_indices) # skeleton = ps.register_point_cloud("skeleton curve", points, edges) skeleton = ps.register_point_cloud("skeleton curve", points) skeleton.set_radius(0.005) skeleton.set_color((1,0,0)) ps.set_autoscale_structures(True) ps.show()
def show_points_in_obj_file(obj_file): ps.init() point_xyz = [] with open(obj_file) as f_in: for line in f_in: xyz_list = line.split(' ') point_xyz.append(np.array([float(xyz_list[1]), float(xyz_list[2]), float(xyz_list[3])])) points = np.zeros((len(point_xyz), 3)) for i in range(len(point_xyz)): points[i] = point_xyz[i] ps.register_point_cloud("points", points) ps_points = ps.get_point_cloud("points") ps_points.set_radius(0.0015) ps.set_autoscale_structures(True) ps.show()
def show_polyscope(self): # Shows a polyscope window (https://polyscope.run/py/) of the current MeshSet, rendering all the meshes contained # in it. Requires the polyscope package (pip install polyscope). import polyscope import numpy polyscope.init() for m in self: is_enabled = m.is_visible() if m.is_point_cloud(): psm = polyscope.register_point_cloud(m.label(), m.transformed_vertex_matrix(), enabled=is_enabled) else: psm = polyscope.register_surface_mesh( m.label(), m.transformed_vertex_matrix(), m.face_matrix(), enabled=is_enabled) if m.has_vertex_color(): vc = m.vertex_color_matrix() vc = numpy.delete(vc, 3, 1) psm.add_color_quantity('vertex_color', vc) if m.has_vertex_scalar(): psm.add_scalar_quantity('vertex_scalar', m.vertex_scalar_array()) if not m.is_point_cloud() and m.has_face_color(): fc = m.face_color_matrix() fc = numpy.delete(fc, 3, 1) psm.add_color_quantity('face_color', fc, defined_on='faces') if not m.is_point_cloud() and m.has_face_scalar(): psm.add_scalar_quantity('face_scalar', m.face_scalar_array(), defined_on='faces') polyscope.show()
def add_point_cloud(points, name="my_point_cloud", color=(0.109, 0.388, 0.890), radius=0.001): my_point_cloud = ps.register_point_cloud(name, points) my_point_cloud.set_color(color) my_point_cloud.set_radius(radius) return my_point_cloud
def show_skeleton_in_obj_file(obj_file): ps.init() vertex_pos = [] line_indices = [] with open(obj_file) as f_in: for line in f_in: temp = line.split(' ') if temp[0] == 'v': vertex_pos.append(np.array([float(temp[1]), float(temp[2]), float(temp[3])])) if temp[0] == 'l': line_indices.append(np.array([int(temp[1]) - 1, int(temp[2]) - 1])) points = np.array(vertex_pos) edges = np.array(line_indices) skeleton = ps.register_curve_network("skeleton curve", points, edges) point_cloud = ps.register_point_cloud("point cloud", points) skeleton.set_radius(0.001) point_cloud.set_radius(0.005) point_cloud.set_color((1,0,0)) ps.set_autoscale_structures(True) ps.show()
def draw_points(self, points, cls_name, point_color=(0.5, 0.5, 0.5), radius=0.0001): """Draw points on visualizer. Args: points (numpy.array | torch.tensor, shape=[N, 3+C]): points to visualize. cls_name (str): name of the class. point_color (tuple[float]) : color of points. Default: (0.5, 0.5, 0.5). radius (float): the size of points to show on visualizer. Default: 0.0001. Returns: ps_pcd: polyscope point cloud interactive object. """ if isinstance(points, torch.Tensor): points = points.cpu().numpy() point_color = (0.5, 0.5, 0.5) if self.points_by_class.get(cls_name, None) is None: self.points_by_class[cls_name] = points else: self.points_by_class[cls_name] = np.concatenate( [self.points_by_class[cls_name], points], axis=0) ps_pcd = ps.register_point_cloud( cls_name, self.points_by_class[cls_name], radius=radius, color=self.class2color[cls_name] if cls_name in self.class2color.keys() else self.class2color['Unknown']) return ps_pcd
from pyheliostools import outputToNumpy # Polyscope. import polyscope as ps print('Preparing data for polyscope plot...') # Create numpy Array with points from trajectory. measurement_points, trajectory_points = outputToNumpy(output) # Points to be plotted: # First three cols are x, y and z vals. t_points = trajectory_points[:, 0:3] m_points = measurement_points[:, :3] # Initialize polyscope. ps.init() # Set correct direction for visualization of point clouds. ps.set_up_dir("z_up") # Register both the trajectory and measurement point clouds seperately. measurement_cloud = ps.register_point_cloud("Measurements", m_points) trajectory_cloud = ps.register_point_cloud("Scanner Trajectory", t_points) # Set more visually appealing point radiuses. measurement_cloud.set_radius(0.00091, relative=True) trajectory_cloud.set_radius(0.00191, relative=True) ps.show()
def main(): parser = argparse.ArgumentParser() parser.add_argument('model_weights_path', type=str, help='path to the model checkpoint') parser.add_argument('input_path', type=str, help='path to the input') parser.add_argument('--disable_cuda', action='store_true', help='disable cuda') parser.add_argument('--sample_cloud', type=int, help='run on sampled points') parser.add_argument('--n_rounds', type=int, default=5, help='number of rounds') parser.add_argument('--prob_thresh', type=float, default=.9, help='threshold for final surface') parser.add_argument( '--output', type=str, help='path to save the resulting high prob mesh to. also disables viz') parser.add_argument('--output_trim_unused', action='store_true', help='trim unused vertices when outputting') # Parse arguments args = parser.parse_args() set_args_defaults(args) viz = not args.output args.polyscope = False # Initialize polyscope if viz: polyscope.init() # === Load the input if args.input_path.endswith(".npz"): record = np.load(args.input_path) verts = torch.tensor(record['vert_pos'], dtype=args.dtype, device=args.device) surf_samples = torch.tensor(record['surf_pos'], dtype=args.dtype, device=args.device) samples = verts.clone() faces = torch.zeros((0, 3), dtype=torch.int64, device=args.device) polyscope.register_point_cloud("surf samples", toNP(surf_samples)) if args.input_path.endswith(".xyz"): raw_pts = np.loadtxt(args.input_path) verts = torch.tensor(raw_pts, dtype=args.dtype, device=args.device) samples = verts.clone() faces = torch.zeros((0, 3), dtype=torch.int64, device=args.device) polyscope.register_point_cloud("surf samples", toNP(verts)) else: print("reading mesh") verts, faces = utils.read_mesh(args.input_path) print(" {} verts {} faces".format(verts.shape[0], faces.shape[0])) verts = torch.tensor(verts, dtype=args.dtype, device=args.device) faces = torch.tensor(faces, dtype=torch.long, device=args.device) # verts = verts[::10,:] if args.sample_cloud: samples = mesh_utils.sample_points_on_surface( verts, faces, args.sample_cloud) else: samples = verts.clone() # === Load the model print("loading model weights") model = PointTriNet_Mesher() model.load_state_dict(torch.load(args.model_weights_path)) model.eval() with torch.no_grad(): # Sample lots of faces from the vertices print("predicting") candidate_triangles, candidate_probs = model.predict_mesh( samples.unsqueeze(0), n_rounds=args.n_rounds) candidate_triangles = candidate_triangles.squeeze(0) candidate_probs = candidate_probs.squeeze(0) print("done predicting") # Visualize high_prob = args.prob_thresh high_faces = candidate_triangles[candidate_probs > high_prob] closed_faces = mesh_utils.fill_holes_greedy(high_faces) if viz: polyscope.register_point_cloud("input points", toNP(samples)) spmesh = polyscope.register_surface_mesh("all faces", toNP(samples), toNP(candidate_triangles), enabled=False) spmesh.add_scalar_quantity("probs", toNP(candidate_probs), defined_on='faces') spmesh = polyscope.register_surface_mesh( "high prob mesh " + str(high_prob), toNP(samples), toNP(high_faces)) spmesh.add_scalar_quantity( "probs", toNP(candidate_probs[candidate_probs > high_prob]), defined_on='faces') spmesh = polyscope.register_surface_mesh("hole-closed mesh " + str(high_prob), toNP(samples), toNP(closed_faces), enabled=False) polyscope.show() # Save output if args.output: high_prob = args.prob_thresh out_verts = toNP(samples) out_faces = toNP(high_faces) out_faces_closed = toNP(closed_faces) if args.output_trim_unused: out_verts, out_faces, _, _ = igl.remove_unreferenced( out_verts, out_faces) igl.write_triangle_mesh(args.output + "_mesh.ply", out_verts, out_faces) write_ply_points(args.output + "_samples.ply", toNP(samples)) igl.write_triangle_mesh(args.output + "_pred_mesh.ply", out_verts, out_faces) igl.write_triangle_mesh(args.output + "_pred_mesh_closed.ply", out_verts, out_faces_closed) write_ply_points(args.output + "_samples.ply", toNP(samples))
tree = Union('u2', [ Cylinder(1.0, 2.0, 's1'), Pose([1.0, 0.0, 0.0], [45.0, 45.0, 45.0], [Box([1.0, 1.0, 1.0], 'b1')], 'p1') ]) with open('data/bobbin.json') as json_file: tree = node_from_old_json_format(json.load(json_file), False) print('loaded tree: {}'.format(tree.to_dict())) print(tree.to_dict()) tree2 = CSGNode.from_dict(tree.to_dict()) print(tree2.to_dict()) print(json.dumps(tree2.to_dict())) # uncomment the following line for stack test # tree = node_from_stack_format('data/stack.txt') pc = point_cloud_from_node(tree, [-20, -20, -20], [20, 20, 20], 0.2, 0.2) v, f, n = node_to_mesh(tree, [-20, -20, -20], [20, 20, 20], 0.8) ps.init() ps.register_surface_mesh("mesh 1", v, f) ps.register_point_cloud("points 1", pc) ps.show()
# Vector heat (transport vector) ext = solver.transport_tangent_vector(1, [6., 6.]) ext3D = ext[:,0,np.newaxis] * basisX + ext[:,1,np.newaxis] * basisY ps_mesh.add_vector_quantity("transport vec", ext3D) ext = solver.transport_tangent_vectors([1, 22], [[6., 6.], [3., 4.]]) ext3D = ext[:,0,np.newaxis] * basisX + ext[:,1,np.newaxis] * basisY ps_mesh.add_vector_quantity("transport vec2", ext3D) # Vector heat (log map) logmap = solver.compute_log_map(1) ps_mesh.add_parameterization_quantity("logmap", logmap) ## = Point cloud test P = V ps_cloud = ps.register_point_cloud("cloud", P) # == heat solver solver = pp3d.PointCloudHeatSolver(P) # distance dists = solver.compute_distance(4) dists2 = solver.compute_distance_multisource([4, 13, 784]) ps_cloud.add_scalar_quantity("dist", dists) ps_cloud.add_scalar_quantity("dist2", dists2) # scalar extension ext = solver.extend_scalar([1, 22], [0., 6.]) ps_cloud.add_scalar_quantity("ext", ext) # Vector heat (tangent frames)
def register_point_cloud(self, name, tensor, **kwargs): if 'cpu' not in str(tensor.device): tensor = tensor.cpu().detach() self.pcls[name] = ps.register_point_cloud( name, tensor.reshape(-1, 3).numpy(), **kwargs)
def show_points(points): ps.init() ps.register_point_cloud("points", points) ps_points = ps.get_point_cloud("points") ps_points.set_radius(0.0015) ps.show()
Vertex(pos) for i, e in enumerate(es): Edge(e[0], e[1], esChannel[i]) idsChannel = list(set(esChannel)) idsChannel.sort() if show: polyscope.init() data['channel'] = [] for v in tqdm(Vertex.all): ps, es, ic, ans, rs, fs, fps, ics = vertexToChannelCurves(v) ps_cloud = polyscope.register_point_cloud("my points", ps) ps_net = polyscope.register_curve_network("my network", ps, es) all_ic = list(set(ic)) ic = [all_ic.index(i) for i in ic] cs = [ np.array([0.7, 0.4, 0.2]) if i == 0 else np.array([0.4, 0.7, 0.2]) if i == 1 else np.array([0.4, 0.2, 0.7]) for i in ic ] cs = np.array(cs) # print(cs) # data['channel'].append({'ps': ps.tolist(), 'es': es.tolist()})
def display_sketch_versions(self): # remove old displays ps.remove_all_structures() #for i in self.sketch_version_counter: # display in total_score order sketch_versions = self.get_n_best_sketch_versions(len(self.sketch_versions_reference.keys())) #sketch_versions = list(self.sketch_versions_reference.keys())[:10] #sketch_versions = self.get_n_best_sketch_versions(1) for sketch_version_id, sketch_version in enumerate(sketch_versions): # collect 3D lines lines = [] scores = [] line_coverages = [] axis_alignments = [] orthogonalities = [] tangentialities = [] planarities = [] foreshortenings = [] curve_geoms = [] circularities = [] is_assigned = [] dep_node_ids = [] for dep_node_id, (cand_id, _, score_container) in enumerate(self.sketch_versions_reference[sketch_version]): if len(self.dependency_nodes[dep_node_id].candidate_nodes) == 0: continue dep_node_ids.append(dep_node_id) lines.append(self.dependency_nodes[dep_node_id].candidate_nodes[cand_id].geometry) #if dep_node_id == 26: # print("dep_node_id: ", dep_node_id) # print(lines[-1]) scores.append(score_container.total_score) line_coverages.append(score_container.line_coverage) axis_alignments.append(score_container.axis_alignment) orthogonalities.append(score_container.orthogonality) tangentialities.append(score_container.tangentiality) planarities.append(score_container.planarity) foreshortenings.append(score_container.foreshortening) curve_geoms.append(score_container.curve_geom) circularities.append(score_container.circularity) is_assigned.append(self.dependency_nodes[dep_node_id].is_assigned) nodes = [] edge_counter = 0 edges = [] enabled = False if sketch_version_id == 0: enabled = True line_ids = [] for line_id, line in enumerate(lines): for p in line: nodes.append(p) for p_id in range(len(line)-1): edges.append([edge_counter, edge_counter+1]) line_ids.append(line_id) edge_counter += 1 edge_counter += 1 #edge_counter += 1 #nodes.append(line[0]) #nodes.append(line[-1]) #edges.append([edge_counter, edge_counter+1]) #edge_counter += 2 sketch_3d = ps.register_curve_network("sketch_version: "+str(sketch_version), nodes=np.array(nodes), edges=np.array(edges), enabled=enabled) sketch_3d.add_scalar_quantity("dep_node_ids", np.array(dep_node_ids)[line_ids], defined_on="edges", enabled=False, cmap="reds") sketch_3d.add_scalar_quantity("line_coverage", np.array(line_coverages)[line_ids], defined_on="edges", enabled=True, cmap="jet", vminmax=(0., 1.)) sketch_3d.add_scalar_quantity("axis_alignment", np.array(axis_alignments)[line_ids], defined_on="edges", enabled=True, cmap="jet", vminmax=(0., 1.)) sketch_3d.add_scalar_quantity("orthogonality", np.array(orthogonalities)[line_ids], defined_on="edges", enabled=True, cmap="jet", vminmax=(0., 1.)) sketch_3d.add_scalar_quantity("tangentiality", np.array(tangentialities)[line_ids], defined_on="edges", enabled=True, cmap="jet", vminmax=(0., 1.)) sketch_3d.add_scalar_quantity("planarity", np.array(planarities)[line_ids], defined_on="edges", enabled=True, cmap="jet", vminmax=(0., 1.)) sketch_3d.add_scalar_quantity("foreshortening", np.array(foreshortenings)[line_ids], defined_on="edges", enabled=True, cmap="jet", vminmax=(0., 1.)) sketch_3d.add_scalar_quantity("curve_geom", np.array(curve_geoms)[line_ids], defined_on="edges", enabled=True, cmap="jet", vminmax=(0., 1.)) sketch_3d.add_scalar_quantity("circularity", np.array(circularities)[line_ids], defined_on="edges", enabled=True, cmap="jet", vminmax=(0., 1.)) sketch_3d.add_scalar_quantity("is_assigned", np.array(is_assigned)[line_ids], defined_on="edges", enabled=True, cmap="jet", vminmax=(0., 1.)) sketch_3d.add_scalar_quantity("total_score", np.array(scores)[line_ids], defined_on="edges", enabled=True, cmap="jet", vminmax=(0., 1.)) #print("sketch_version: "+str(sketch_version)) #print("score: "+str(np.sum(scores))) #print(scores[-1]) # collect 3D intersections points = [] angles = [] dep_node_ids = [] distances = [] curve_length = [] for dep_node_id, (line_id, inter_set, _) in enumerate(self.sketch_versions_reference[sketch_version]): line = self.dependency_nodes[dep_node_id].candidate_nodes[line_id] line_length = tools_3d.line_3d_length(line.geometry) for inter in inter_set: points.append(inter.coords_3d) angles.append(inter.tangents_angle_3d) dep_node_ids.append(self.stroke_id_to_dep_node_id[inter.stroke_ids]) dist = tools_3d.distance_point_to_polyline_vectorized(inter.coords_3d, line.geometry) distances.append(dist) curve_length.append(line_length) if len(points) > 0: inter_cloud = ps.register_point_cloud("sketch_version: "+str(sketch_version), points=np.array(points), enabled=enabled, radius=0.01) inter_cloud.add_scalar_quantity("tangents_angle_3d", np.array(angles), enabled=True, cmap="jet", vminmax=(0., 90.)) inter_cloud.add_scalar_quantity("first_stroke", np.array(dep_node_ids)[:, 0]) inter_cloud.add_scalar_quantity("snd_stroke", np.array(dep_node_ids)[:, 1]) inter_cloud.add_scalar_quantity("distance", np.array(distances)) inter_cloud.add_scalar_quantity("line_length", np.array(curve_length))
(plydata['vertex']['x'], plydata['vertex']['y'], plydata['vertex']['z'])).T # for meshes # tri_data = plydata['face'].data['vertex_indices'] # faces = np.vstack(tri_data) # Build Laplacian L, M = robust_laplacian.point_cloud_laplacian(points, mollify_factor=1e-5) # for meshes # L, M = robust_laplacian.mesh_laplacian(points, faces, mollify_factor=1e-5) # Compute some eigenvectors n_eig = 10 evals, evecs = sla.eigsh(L, n_eig, M, sigma=1e-8) # Visualize ps.init() ps_cloud = ps.register_point_cloud("my cloud", points) for i in range(n_eig): ps_cloud.add_scalar_quantity("eigenvector_" + str(i), evecs[:, i], enabled=True) # for meshes # ps_surf = ps.register_surface_mesh("my surf", points, faces) # for i in range(n_eig): # ps_surf.add_scalar_quantity("eigenvector_"+str(i), evecs[:,i], enabled=True) ps.show()
import numpy as np from pygel3d import hmesh m = hmesh.load("../cube.obj") print(m.positions()) faces = m.faces() ## Old school allfaces=[] for f in faces: face=[] for v in m.circulate_face(f): face.append(v) allfaces = allfaces + [face] ## Fancy version allfaces2 = [[v for v in m.circulate_face(f)] for f in faces] polyscope.init() #Display the vertices as point cloud polyscope.register_point_cloud("data", m.positions()) #Display the vertices as mesh polyscope.register_surface_mesh("data mesh", m.positions(), allfaces) #Display the vertices as mesh polyscope.register_surface_mesh("data mesh2", m.positions(), allfaces2) polyscope.show()
# Demo polyscope + python # # make sure to have `pip install polyscope numpy` # import polyscope import numpy as np pts = np.array([[0, 0, 0], [0, 0, 1], [0, 1, 0], [1, 0, 0], [1, 1, 0], [0, 1, 1], [1, 0, 1], [1, 1, 1]]) val = np.array([0.3, 3.4, 0.2, 0.4, 1.2, 4.0, 3.6, 5.0]) polyscope.init() polyscope.register_point_cloud("My Points", pts) polyscope.get_point_cloud("My Points").add_scalar_quantity("Some values", val) polyscope.show()
def draw_bboxes(self, bbox3d, bbox_color=(0, 1, 0), points_in_box_color=(1, 0, 0), rot_axis=2, center_mode='lidar_bottom', cls_names=None): """Draw bbox on visualizer and change the color of points inside bbox3d. Args: bbox3d (numpy.array | torch.tensor, shape=[M, 7]): 3d bbox (x, y, z, dx, dy, dz, yaw) to visualize. points_colors (numpy.array): color of each points. pcd (:obj:`open3d.geometry.PointCloud`): point cloud. Default: None. bbox_color (tuple[float]): the color of bbox. Default: (0, 1, 0). points_in_box_color (tuple[float], or list[tuple[float]]): the color of points inside each bbox3d. Default: (1, 0, 0). rot_axis (int): rotation axis of bbox. Default: 2. center_mode (bool): indicate the center of bbox is bottom center or gravity center. avaliable mode ['lidar_bottom', 'camera_bottom']. Default: 'lidar_bottom'. """ if isinstance(bbox3d, torch.Tensor): bbox3d = bbox3d.cpu().numpy() bbox3d = bbox3d.copy() for i in range(len(bbox3d)): if isinstance(points_in_box_color, list): in_box_color = np.array(points_in_box_color[i]) else: in_box_color = np.array(points_in_box_color) center = bbox3d[i, 0:3] dim = bbox3d[i, 3:6] yaw = np.zeros(3) yaw[rot_axis] = -bbox3d[i, 6] rot_mat = geometry.get_rotation_matrix_from_xyz(yaw) if center_mode == 'lidar_bottom': center[rot_axis] += dim[ rot_axis] / 2 # bottom center to gravity center elif center_mode == 'camera_bottom': center[rot_axis] -= dim[ rot_axis] / 2 # bottom center to gravity center box3d = geometry.OrientedBoundingBox(center, rot_mat, dim) line_set = geometry.LineSet.create_from_oriented_bounding_box( box3d) cls_name = cls_names[i] ps_line_set = ps.register_curve_network( f'box{self.bbox_count}-{cls_names[i]}', np.array(line_set.points), np.array(line_set.lines), radius=0.0003, color=np.array(bbox_color)) self.bbox_count += 1 # change the color of points which are in box indices = box3d.get_point_indices_within_bounding_box( o3d.utility.Vector3dVector(self.bg_points)) points_in_box = self.bg_points[indices] self.draw_points(points_in_box, cls_name) self.bg_points = np.delete(self.bg_points, indices, axis=0) ps.register_point_cloud('background', self.bg_points)
# edges to show horizontal = ps.register_curve_network("horizontal point to skeleton vertex", points, horizontal_edge) horizontal.set_radius(0.005) horizontal.set_color((0, 1, 0)) parent = ps.register_curve_network("parent vertex to skeleton vertex", points, parent_edge) parent.set_radius(0.005) parent.set_color((0, 0, 0)) point = ps.register_curve_network("point to skeleton vertex", points, point_edge) point.set_radius(0.005) point.set_color((1, 0, 0)) # input sampled points ps.register_point_cloud("points", points) ps_points = ps.get_point_cloud("points") ps_points.set_color((1, 0, 0)) ps_points.set_radius(0.015) ps.register_point_cloud("points network", points) # this skeleton vertex ps.register_point_cloud("skeleton point", skeleton_point) ps_skeleton_point = ps.get_point_cloud("skeleton point") ps_skeleton_point.set_color((0, 1, 0)) ps_skeleton_point.set_radius(0.025) ps.register_point_cloud("skeleton point", skeleton_point) # parent vertex of this skeleton vertex ps.register_point_cloud("parent of skeleton point", skeleton_point_parent) ps_parent_skeleton_point = ps.get_point_cloud("parent of skeleton point") ps_parent_skeleton_point.set_color((1, 0, 1)) ps_parent_skeleton_point.set_radius(0.025)