def sample_pc_from_mesh_file(mesh_file, texture_file, npoints=1024): #print(mesh_file) #resolver = FilePathResolver(texture_file) if os.path.exists(texture_file) else None #scene = trimesh.load(mesh_file, resolver=resolver) scene = trimesh.load(mesh_file) mesh = scene.geometry[list(scene.geometry.keys())[0]] faces = mesh.faces vertex_colors = mesh.visual.to_color().vertex_colors #print(vertex_colors.shape) #print(vertex_colors[:5]) #print(vertex_colors[-5:]) #exit() #if not os.path.exists(texture_file): if len(vertex_colors.shape)==1: vertex_colors = np.tile(vertex_colors, (mesh.vertices.shape[0],1)) face_colors = trimesh.visual.color.vertex_to_face_color(vertex_colors, faces) #face_colors = mesh.visual.to_color().face_colors pc, face_idx = sample_surface(mesh, npoints) pcolors= np.array(face_colors[face_idx], dtype=np.float32) / 255 points = o3d.utility.Vector3dVector(pc.reshape([-1, 3])) colors = o3d.utility.Vector3dVector(pcolors[:,:3].reshape([-1, 3])) pcd = o3d.geometry.PointCloud() pcd.points = points pcd.colors = colors pcd.estimate_normals(o3d.geometry.KDTreeSearchParamHybrid(radius=0.01, max_nn=30)) return pcd
def main(): """Passes command line arguments into utility function.""" arguments = docopt.docopt(__doc__) input_path = arguments['--data'] output_dir = arguments['--out'] n = int(arguments['--n']) scale = float(arguments['--scale']) os.makedirs(output_dir, exist_ok=True) paths = list(glob.iglob(input_path)) if not paths: raise UserWarning('No files found at %s' % input_path) for path in paths: mesh = trimesh.load(path) name = os.path.basename(path).split('.')[0] if name in mesh_hooks: mesh = mesh_hooks[name](mesh) points = sample_surface(mesh, n) * scale points -= points.mean(axis=0) if name in array_hooks: for function in array_hooks[name]: points = function(points) output_path = os.path.join(output_dir, name) np.save(output_path, points)
def __init__(self, split, dataset_path, dist_file_name): model = trimesh.load(dataset_path) if not type(model) == trimesh.PointCloud: model = utils.as_mesh(trimesh.load(dataset_path)) if (model.faces.shape[0] > 0): self.points = sample_surface(model, 250000)[0] else: self.points = model.vertices else: self.points = model.vertices self.points = self.points - self.points.mean(0, keepdims=True) scale = np.abs(self.points).max() self.points = self.points / scale sigmas = [] ptree = cKDTree(self.points) for p in tqdm(np.array_split(self.points, 10, axis=0)): d = ptree.query(p, np.int(51.0)) sigmas.append(d[0][:, -1]) sigmas = np.concatenate(sigmas) sigmas = np.tile(sigmas, [250000 // sigmas.shape[0]]) sigmas_big = 1.0 * np.ones_like(sigmas) pnts = np.tile(self.points, [250000 // self.points.shape[0], 1]) sample = np.concatenate([ pnts + np.expand_dims(sigmas, -1) * np.random.normal(0.0, 1.0, size=pnts.shape), pnts + np.expand_dims(sigmas_big, -1) * np.random.normal(0.0, 1.0, size=pnts.shape) ], axis=0) dists = [] for np_query in tqdm(sample): dist = ptree.query(np_query)[0] dists.append(dist) dists = np.array(dists) self.dists = np.concatenate( [sample, np.expand_dims(dists, axis=-1)], axis=-1) self.npyfiles_mnfld = [dataset_path]
def generate_points(mesh, count=250000, sigma_2=0.3): pts = sample_surface(mesh, count)[0] tree = cKDTree(pts) # batch KD-tree lookup over sets of 5k points sigmas = [] for p in np.split(pts, count / 5000): d, i = tree.query(p, [51]) sigmas.append(d) # concatenate batch lookups sigmas = np.concatenate(sigmas) # generate gaussian noise norm1 = np.random.normal(0.0, 1.0, size=pts.shape) norm2 = np.random.normal(0.0, 1.0, size=pts.shape) # add gaussian noise to pts # sigmas uses 50th-nearest distance as SD # sigma_2 is a fixed SD # also note: multiplying sample by SD changes SD of distribution pts_1 = norm1 * sigmas + pts pts_2 = norm2 * sigma_2 + pts return np.concatenate([pts_1, pts_2])
print("cat {0} : b{1}".format(cat,counterb)) counterb = counterb + 1 source = os.path.abspath(os.path.join(args.src_path, 'scans', ds, cat)) output = os.path.abspath(os.path.join(args.out_path, 'dfaust_processed')) utils.mkdir_ifnotexists(output) utils.mkdir_ifnotexists(os.path.join(output, ds)) utils.mkdir_ifnotexists(os.path.join(output, ds, cat)) counterc = 0 for item,shape in enumerate(shapes): print("item {0} : c{1}".format(cat, counterc)) counterc = counterc + 1 output_file = os.path.join(output,ds,cat,shape) print (output_file) if not (args.skip and os.path.isfile(output_file + '.npy')): print ('loading : {0}'.format(os.path.join(source,shape))) mesh = trimesh.load(os.path.join(source,shape) + '.ply') sample = sample_surface(mesh,SAMPLES) pnts = sample[0] normals = mesh.face_normals[sample[1]] center = np.mean(pnts, axis=0) pnts = pnts - np.expand_dims(center, axis=0) point_set = np.hstack([pnts, normals]) np.save(output_file + '.npy', point_set) np.save(output_file + '_normalization.npy', {"center":center,"scale":scale}) print ("end!")
for shape in shapes: if (shapeindex == global_shape_index or shapeindex == -1): print("found!") output_file = os.path.join(output, human, pose, shape) print(output_file) if (not opt.skip or not os.path.isfile(output_file + '_dist_triangle.npy')): print('loading : {0}'.format( os.path.join(source, shape))) mesh = trimesh.load( os.path.join(source, shape) + '.ply') sample = sample_surface(mesh, 250000) center = np.mean(sample[0], axis=0) pnts = sample[0] pnts = pnts - np.expand_dims(center, axis=0) scale = 1 pnts = pnts / scale triangles = [] for tri in mesh.triangles: a = Point_3((tri[0][0] - center[0]) / scale, (tri[0][1] - center[1]) / scale, (tri[0][2] - center[2]) / scale) b = Point_3((tri[1][0] - center[0]) / scale, (tri[1][1] - center[1]) / scale,