def test_poisson_disk_sampling(self): import point_cloud_utils as pcu import numpy as np # v is a nv by 3 NumPy array of vertices # f is an nf by 3 NumPy array of face indexes into v # n is a nv by 3 NumPy array of vertex normals v, f, n = pcu.read_obj(os.path.join(self.test_path, "cube_twist.obj")) bbox = np.max(v, axis=0) - np.min(v, axis=0) bbox_diag = np.linalg.norm(bbox) # Generate very dense random samples on the mesh (v, f, n) # Note that this function works with no normals, just pass in an empty array np.array([], dtype=v.dtype) # v_dense is an array with shape (100*v.shape[0], 3) where each row is a point on the mesh (v, f) # n_dense is an array with shape (100*v.shape[0], 3) where each row is a the normal of a point in v_dense v_dense, n_dense = pcu.sample_mesh_random(v, f, n, num_samples=v.shape[0] * 100) # Downsample v_dense to be from a blue noise distribution: # # v_poisson is a downsampled version of v where points are separated by approximately # `radius` distance, use_geodesic_distance indicates that the distance should be measured on the mesh. # # n_poisson are the corresponding normals of v_poisson v_poisson, n_poisson = pcu.sample_mesh_poisson_disk( v_dense, f, n_dense, radius=0.01 * bbox_diag, use_geodesic_distance=True)
def sample_pointcloud_mesh(obj_path): off_v, off_f, off_n = pcu.read_obj(obj_path) if off_n.shape[0] != off_v.shape[0]: off_n = np.array([]) v_dense, n_dense = pcu.sample_mesh_random(off_v, off_f, off_n, num_samples=point_num) return v_dense
def test_estimate_point_cloud_normals(self): import point_cloud_utils as pcu import numpy as np # v is a nv by 3 NumPy array of vertices # f is an nf by 3 NumPy array of face indexes into v # n is a nv by 3 NumPy array of vertex normals if they are specified, otherwise an empty array v, f, n = pcu.read_obj(os.path.join(self.test_path, "cube_twist.obj")) # Estimate normals for the point set, v using 12 nearest neighbors per point n = pcu.estimate_point_cloud_normals(v, k=12) self.assertEqual(n.shape, v.shape)
def test_lloyd_relaxation(self): import point_cloud_utils as pcu # v is a nv by 3 NumPy array of vertices # f is an nf by 3 NumPy array of face indexes into v v, f, n = pcu.read_obj(os.path.join(self.test_path, "cube_twist.obj")) # Generate 1000 points on the mesh with Lloyd's algorithm samples = pcu.sample_mesh_lloyd(v, f, 1000) # Generate 100 points on the unit square with Lloyd's algorithm samples_2d = pcu.lloyd_2d(100)
def load_mesh_by_file_extension(file_name): """ Load a mesh stored in a OBJ, OFF, or PLY file and return a Numpy array of the vertex positions. I.e. an array with shape [n, 3] where each row [i, :] is a vertex position :param file_name: The name of the mesh file to load :return: An [n, 3] array of vertex positions """ if file_name.endswith(".obj"): v, f, n = pcu.read_obj(file_name, dtype=np.float32) elif file_name.endswith(".ply"): v, f, n, uv = pcu.read_ply(file_name, dtype=np.float32) elif file_name.endswith(".off"): v, f, n = pcu.read_off(file_name, dtype=np.float32) else: raise ValueError("Input mesh file must end in .ply, .obj, or .off") return v
def load_point_cloud_by_file_extension(file_name, compute_normals=False): import point_cloud_utils as pcu if file_name.endswith(".obj"): v, f, n = pcu.read_obj(file_name, dtype=np.float32) elif file_name.endswith(".off"): v, f, n = pcu.read_off(file_name, dtype=np.float32) elif file_name.endswith(".ply"): v, f, n, _ = pcu.read_ply(file_name, dtype=np.float32) elif file_name.endswith(".npts"): v, n = load_srb_range_scan(file_name) f = [] else: raise ValueError( "Invalid file extension must be one of .obj, .off, .ply, or .npts") if compute_normals and f.shape[0] > 0: n = pcu.per_vertex_normals(v, f) return v, n
def test_downsample_point_cloud_voxel_grid(self): import point_cloud_utils as pcu import numpy as np # v is a nv by 3 NumPy array of vertices # f is an nf by 3 NumPy array of face indexes into v # n is a nv by 3 NumPy array of vertex normals if they are specified, otherwise an empty array v, f, n = pcu.read_obj(os.path.join(self.test_path, "cube_twist.obj")) bbox = np.max(v, axis=0) - np.min(v, axis=0) bbox_diag = np.linalg.norm(bbox) vox_grid_size = 1.0 / 128.0 # Make sure we have normals self.assertEqual(n.shape, v.shape) # Vanilla case pts, nms, clr = pcu.downsample_point_cloud_voxel_grid(vox_grid_size, v) self.assertIsNone(nms) self.assertIsNone(clr) self.assertGreater(pts.shape[0], 0) self.assertEqual(pts.shape[1], 3) # With normals pts, nms, clr = pcu.downsample_point_cloud_voxel_grid(vox_grid_size, v, n) self.assertIsNone(clr) self.assertEqual(nms.shape, pts.shape) self.assertGreater(pts.shape[0], 0) self.assertEqual(pts.shape[1], 3) # With RBG colors c = np.random.rand(v.shape[0], 3) pts, nms, clr = pcu.downsample_point_cloud_voxel_grid(vox_grid_size, v, None, c) self.assertIsNone(nms) self.assertEqual(clr.shape, pts.shape) self.assertGreater(pts.shape[0], 0) self.assertEqual(pts.shape[1], 3) # With RBGA colors c = np.random.rand(v.shape[0], 4) pts, nms, clr = pcu.downsample_point_cloud_voxel_grid(vox_grid_size, v, None, c) self.assertIsNone(nms) self.assertEqual(clr.shape[0], pts.shape[0]) self.assertEqual(clr.shape[1], 4) self.assertGreater(pts.shape[0], 0) self.assertEqual(pts.shape[1], 3) # With normals and RGB colors c = np.random.rand(v.shape[0], 3) pts, nms, clr = pcu.downsample_point_cloud_voxel_grid(vox_grid_size, v, n, c) self.assertEqual(nms.shape, pts.shape) self.assertEqual(clr.shape, pts.shape) self.assertGreater(pts.shape[0], 0) self.assertEqual(pts.shape[1], 3) # With normals and RBGA colors c = np.random.rand(v.shape[0], 4) pts, nms, clr = pcu.downsample_point_cloud_voxel_grid(vox_grid_size, v, n, c) self.assertEqual(nms.shape, pts.shape) self.assertEqual(clr.shape[0], pts.shape[0]) self.assertEqual(clr.shape[1], 4) self.assertGreater(pts.shape[0], 0) self.assertEqual(pts.shape[1], 3) # With different voxel size per axis vox_grid_size = [1.0/128.0, 1.0/99.0, 1.0/222.0] c = np.random.rand(v.shape[0], 4) pts, nms, clr = pcu.downsample_point_cloud_voxel_grid(vox_grid_size, v, n, c) self.assertEqual(nms.shape, pts.shape) self.assertEqual(clr.shape[0], pts.shape[0]) self.assertEqual(clr.shape[1], 4) self.assertGreater(pts.shape[0], 0) self.assertEqual(pts.shape[1], 3) # With bounding box dimensions vox_grid_size = np.array([1.0/128.0, 1.0/99.0, 1.0/222.0]) min_bound = np.min(v, axis=0) - 0.5 * np.array(vox_grid_size) max_bound = np.max(v, axis=0) + 0.5 * np.array(vox_grid_size) c = np.random.rand(v.shape[0], 4) pts, nms, clr = pcu.downsample_point_cloud_voxel_grid(vox_grid_size, v, n, c, min_bound=min_bound, max_bound=max_bound) self.assertEqual(nms.shape, pts.shape) self.assertEqual(clr.shape[0], pts.shape[0]) self.assertEqual(clr.shape[1], 4) self.assertGreater(pts.shape[0], 0) self.assertEqual(pts.shape[1], 3) # Should raise if the voxel size is too small with self.assertRaises(ValueError): vox_grid_size = [1e-16, 1.0/99.0, 1.0/222.0] c = np.random.rand(v.shape[0], 4) pcu.downsample_point_cloud_voxel_grid(vox_grid_size, v, n, c) # Should raise if the voxel size is negative with self.assertRaises(ValueError): vox_grid_size = [1.0/100.0, -1.0/99.0, 1.0/222.0] c = np.random.rand(v.shape[0], 4) pcu.downsample_point_cloud_voxel_grid(vox_grid_size, v, n, c) # Invalid color dimension with self.assertRaises(ValueError): c = np.random.rand(v.shape[0], 2) pcu.downsample_point_cloud_voxel_grid(vox_grid_size, v, n, c) # Invalid normal dimension with self.assertRaises(ValueError): c = np.random.rand(v.shape[0], 2) pcu.downsample_point_cloud_voxel_grid(vox_grid_size, v, n[:, :1], c) # Invalid number of normals with self.assertRaises(ValueError): c = np.random.rand(v.shape[0], 3) pcu.downsample_point_cloud_voxel_grid(vox_grid_size, v, n[1:, :], c) # Invalid number of colors with self.assertRaises(ValueError): c = np.random.rand(v.shape[0]//2, 3) pcu.downsample_point_cloud_voxel_grid(vox_grid_size, v, n, c) # Negative bounding box with self.assertRaises(ValueError): min_bound = np.min(v, axis=0) - 0.5 * np.array(vox_grid_size) max_bound = np.max(v, axis=0) + 0.5 * np.array(vox_grid_size) pcu.downsample_point_cloud_voxel_grid(vox_grid_size, v, n, c, max_bound=min_bound, min_bound=max_bound) # Badly shaped grid size with self.assertRaises(ValueError): vox_grid_size = [1.0/100.0, 1.0/99.0] min_bound = np.min(v, axis=0) - 0.5 * np.array(vox_grid_size) max_bound = np.max(v, axis=0) + 0.5 * np.array(vox_grid_size) pcu.downsample_point_cloud_voxel_grid(vox_grid_size, v, n, c, max_bound=max_bound, min_bound=min_bound) # Badly shaped max bound with self.assertRaises(ValueError): vox_grid_size = [1.0/100.0, 1.0/99.0, 1.0/77.0] min_bound = np.min(v, axis=0) - 0.5 * np.array(vox_grid_size) max_bound = np.max(v, axis=0) + 0.5 * np.array(vox_grid_size) pcu.downsample_point_cloud_voxel_grid(vox_grid_size, v, n, c, max_bound=max_bound[:1], min_bound=min_bound) # Badly shaped max bound with self.assertRaises(ValueError): vox_grid_size = [1.0/100.0, 1.0/99.0, 1.0/77.0] min_bound = np.min(v, axis=0) - 0.5 * np.array(vox_grid_size) max_bound = np.max(v, axis=0) + 0.5 * np.array(vox_grid_size) pcu.downsample_point_cloud_voxel_grid(vox_grid_size, v, n, c, max_bound=max_bound[:1], min_bound=(1.0, 1.0))
def test_mesh_sampling(self): import point_cloud_utils as pcu import numpy as np # v is a nv by 3 NumPy array of vertices # f is an nf by 3 NumPy array of face indexes into v # n is a nv by 3 NumPy array of vertex normals if they are specified, otherwise an empty array v, f, n = pcu.read_obj(os.path.join(self.test_path, "cube_twist.obj")) bbox = np.max(v, axis=0) - np.min(v, axis=0) bbox_diag = np.linalg.norm(bbox) f_idx1, bc1 = pcu.sample_mesh_random(v, f, num_samples=1000, random_seed=1234567) f_idx2, bc2 = pcu.sample_mesh_random(v, f, num_samples=1000, random_seed=1234567) f_idx3, bc3 = pcu.sample_mesh_random(v, f, num_samples=1000, random_seed=7654321) self.assertTrue(np.all(f_idx1 == f_idx2)) self.assertTrue(np.all(bc1 == bc2)) self.assertFalse(np.all(f_idx1 == f_idx3)) self.assertFalse(np.all(bc1 == bc3)) # Generate very dense random samples on the mesh (v, f) f_idx, bc = pcu.sample_mesh_random(v, f, num_samples=v.shape[0] * 4) v_dense = (v[f[f_idx]] * bc[:, np.newaxis]).sum(1) s_idx = pcu.downsample_point_cloud_poisson_disk(v_dense, 0, 0.1*bbox_diag, random_seed=1234567) s_idx2 = pcu.downsample_point_cloud_poisson_disk(v_dense, 0, 0.1*bbox_diag, random_seed=1234567) s_idx3 = pcu.downsample_point_cloud_poisson_disk(v_dense, 0, 0.1 * bbox_diag, random_seed=7654321) self.assertTrue(np.all(s_idx == s_idx2)) if s_idx3.shape == s_idx.shape: self.assertFalse(np.all(s_idx == s_idx3)) else: self.assertFalse(s_idx.shape == s_idx3.shape) # Ensure we can request more samples than vertices and get something reasonable s_idx_0 = pcu.downsample_point_cloud_poisson_disk(v_dense, 2*v_dense.shape[0], random_seed=1234567) s_idx = pcu.downsample_point_cloud_poisson_disk(v_dense, 1000, random_seed=1234567) s_idx2 = pcu.downsample_point_cloud_poisson_disk(v_dense, 1000, random_seed=1234567) s_idx3 = pcu.downsample_point_cloud_poisson_disk(v_dense, 1000, random_seed=7654321) self.assertTrue(np.all(s_idx == s_idx2)) if s_idx3.shape == s_idx.shape: self.assertFalse(np.all(s_idx == s_idx3)) else: self.assertFalse(s_idx.shape == s_idx3.shape) f_idx1, bc1 = pcu.sample_mesh_poisson_disk(v, f, num_samples=1000, random_seed=1234567, use_geodesic_distance=True, oversampling_factor=5.0) f_idx2, bc2 = pcu.sample_mesh_poisson_disk(v, f, num_samples=1000, random_seed=1234567, use_geodesic_distance=True, oversampling_factor=5.0) f_idx3, bc3 = pcu.sample_mesh_poisson_disk(v, f, num_samples=1000, random_seed=7654321, use_geodesic_distance=True, oversampling_factor=5.0) self.assertTrue(np.all(f_idx1 == f_idx2)) self.assertTrue(np.all(bc1 == bc2)) if f_idx1.shape == f_idx3.shape: self.assertFalse(np.all(f_idx1 == f_idx3)) if bc1.shape == bc3.shape: self.assertFalse(np.all(bc1 == bc3)) f_idx1, bc1 = pcu.sample_mesh_poisson_disk(v, f, num_samples=-1, radius=0.01*bbox_diag, random_seed=1234567, oversampling_factor=5.0) f_idx2, bc2 = pcu.sample_mesh_poisson_disk(v, f, num_samples=-1, radius=0.01*bbox_diag, random_seed=1234567, oversampling_factor=5.0) f_idx3, bc3 = pcu.sample_mesh_poisson_disk(v, f, num_samples=-1, radius=0.01*bbox_diag, random_seed=7654321, oversampling_factor=5.0) self.assertTrue(np.all(f_idx1 == f_idx2)) self.assertTrue(np.all(bc1 == bc2)) if f_idx1.shape == f_idx3.shape: self.assertFalse(np.all(f_idx1 == f_idx3)) if bc1.shape == bc3.shape: self.assertFalse(np.all(bc1 == bc3))
filenames = [f for f in files_in_subdirs(args.dataset_path, args.termination)] for i, fi in enumerate(filenames): path = os.path.split(fi)[0] foldername = path.replace(args.dataset_path + '/', '') name = os.path.split(fi)[-1].split('.')[0] if args.save_path == 'None': args.save_path = os.path.split( args.dataset_path)[0] + '/' + os.path.split( args.dataset_path)[1] + '_resampled' if not os.path.exists(args.save_path): os.makedirs(args.save_path) if os.path.split(foldername)[-1] == args.dataset_path.split( '/')[-1]: #Single folder structure destination_filename = args.save_path + '/' + name else: if not os.path.exists(args.save_path + '/' + foldername): os.makedirs(args.save_path + '/' + foldername) destination_filename = args.save_path + '/' + foldername + '/' + name if args.termination == '.off': v, f, n = pcu.read_off(fi) elif args.termination == '.obj': v, f, n = pcu.read_obj(fi) else: print('Invalid termination') sys.exit(1) if len(f) != 0: samples = pcu.sample_mesh_lloyd( v, f, args.n_points) #normals inside v, poorly saved np.save(destination_filename + '.npy', samples)
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Thu Jun 25 11:21:05 2020 @author: tamir """ import os import numpy as np import point_cloud_utils as pcu THIS_DIR = os.path.dirname(os.path.abspath(__file__)) points = os.path.join(THIS_DIR, "data/point_cloud.obj") # v is a nv by 3 NumPy array of vertices v, f, n = pcu.read_obj(points) # Estimate a normal at each point (row of v) using its 5 nearest neighbors n = pcu.estimate_normals(v, k=5) np.testing.assert_allclose(n[0], np.asarray([0.96283305, 0.11186423, 0.24584327]))