def getMeshCurvature(mesh, gaussian_curvature=True, mean_curvature=True, shape_index=True, remove_outliers=True): try: import igl except ModuleNotFoundError: raise ModuleNotFoundError("The dependency 'igl' is required for this functionality!") v1, v2, k1, k2 = igl.principal_curvature(mesh.vertices, mesh.faces) k1 = clipOutliers(k1) k2 = clipOutliers(k2) feature_names = [] features = [] if gaussian_curvature: feature_names.append('gaussian_curvature') mesh.vertex_attributes['gaussian_curvature'] = k1*k2 if mean_curvature: feature_names.append('mean_curvature') mesh.vertex_attributes['mean_curvature'] = (k1 + k2)/2 if shape_index: shape_index = -2*np.arctan( (k1 + k2)/(k1 - k2) )/np.pi feature_names.append('shape_index') mesh.vertex_attributes['shape_index'] = shape_index return feature_names
def mapPointFeaturesToMesh(mesh, points, features, distance_cutoff=3.0, offset=None, map_to='neighborhood', weight_method='inverse_distance', clip_values=False, **kwargs): X = np.zeros( (mesh.num_vertices, features.shape[1])) # store the mapped features W = np.zeros(mesh.num_vertices ) # weights determined by distance from points to vertices if (offset == None): offset = np.zeros(len(points)) assert len(points) == len(features) and len(points) == len(offset) if clip_values: X = clipOutliers(X, axis=0) for i in range(len(points)): p = points[i] f = features[i] o = offset[i] # decide how to map point features to vertices if (map_to == 'neighborhood'): # map features to all vertices within a neighborhood, weighted by distance v, d = mesh.verticesInBall(p, distance_cutoff) elif (map_to == 'nearest'): # get the neartest vertex v, d = mesh.nearestVertex(p) else: raise ValueError( "Unknown value of argument `map_to`: {}".format(map_to)) if (len(v) > 0): w = wfn(d, distance_cutoff, o, weight_method, **kwargs) X[v] += np.outer(w, f) W[v] += w # set zero weights to 1 wi = (W == 0) W[wi] = 1.0 # scale by weights X /= W.reshape(-1, 1) return X
def getMeshCurvature(mesh, gaussian_curvature=True, mean_curvature=True, shape_index=True, remove_outliers=True): v1, v2, k1, k2 = igl.principal_curvature(mesh.vertices, mesh.faces) k1 = clipOutliers(k1) k2 = clipOutliers(k2) feature_names = [] features = [] if gaussian_curvature: feature_names.append('gaussian_curvature') mesh.vertex_attributes['gaussian_curvature'] = k1*k2 if mean_curvature: feature_names.append('mean_curvature') mesh.vertex_attributes['mean_curvature'] = (k1 + k2)/2 if shape_index: shape_index = -2*np.arctan( (k1 + k2)/(k1 - k2) )/np.pi feature_names.append('shape_index') mesh.vertex_attributes['shape_index'] = shape_index return feature_names
def getHKS(mesh, num_samples=3, num_components=50, feature_name='hks', tau=1, eps=1e-5, normalize=True, **kwargs): try: from scipy.sparse.linalg import eigsh as sp_eigs from scipy.sparse import diags except ModuleNotFoundError: raise ModuleNotFoundError("The dependency 'SciPy' is required for this functionality!") # compute eigenvalues and eigenvectors of Laplace-Beltrami operator L = -mesh.cot_matrix M = mesh.mass_matrix try: evals, evecs = sp_eigs(L, k=num_components, M=M, which='LM', sigma=0, **kwargs) except RuntimeError: # add small value to cot matrix to try and make it invertable D = diags(eps*np.ones(L.shape[0])) L = L + D evals, evecs = sp_eigs(L, k=num_components, M=M, which='LM', sigma=0, **kwargs) if normalize: evals = evals/evals.sum() scale = mesh.area else: scale = 1 # determine time samples tmin = tau/min(evals.max(), 1e+1) tmax = tau/max(evals.min(), 1e-3) tsamps = np.exp(np.linspace(np.log(tmin), np.log(tmax), num_samples)) # compute heat kernel signatures evecs = evecs**2 feature_names = [] for i, t in enumerate(tsamps): fn = "{}{}".format(feature_name, i+1) HKS = scale*np.sum(np.exp(-t*evals)*evecs, axis=1) mesh.vertex_attributes[fn] = clipOutliers(HKS) feature_names.append(fn) return feature_names
def getHKS(mesh, num_samples=3, num_components=50, feature_name='hks', tau=1, **kwargs): # compute eigenvalues and eigenvectors of Laplace-Beltrami operator L = -mesh.cot_matrix M = mesh.mass_matrix evals, evecs = sp_eigs(L, k=num_components, M=M, which='LM', sigma=0, **kwargs) # determine time samples tmin = tau/min(evals.max(), 1e+1) tmax = tau/max(evals.min(), 1e-3) tsamps = np.exp(np.linspace(np.log(tmin), np.log(tmax), num_samples)) # compute heat kernel signatures evecs = evecs**2 feature_names = [] for i, t in enumerate(tsamps): fn = "{}{}".format(feature_name, i+1) HKS = np.sum(np.exp(-t*evals)*evecs, axis=1) mesh.vertex_attributes[fn] = clipOutliers(HKS) feature_names.append(fn) return feature_names
def mapPointFeaturesToMesh(mesh, points, features, distance_cutoff=3.0, offset=None, map_to='neighborhood', weight_method='inverse_distance', clip_values=False, laplace_smooth=False, **kwargs): if clip_values: features = clipOutliers(features, axis=0) # decide how to map point features to vertices if map_to == 'neighborhood': X = np.zeros((mesh.num_vertices, features.shape[1])) # store the mapped features W = np.zeros( mesh.num_vertices ) # weights determined by distance from points to vertices if offset == None: offset = np.zeros(len(points)) assert len(points) == len(features) and len(points) == len(offset) for i in range(len(points)): p = points[i] f = features[i] o = offset[i] # map features to all vertices within a neighborhood, weighted by distance t = distance_cutoff + o v, d = mesh.verticesInBall(p, t) if len(v) > 0: w = wfn(d, distance_cutoff, o, weight_method, **kwargs) X[v] += np.outer(w, f) W[v] += w # set zero weights to 1 wi = (W == 0) W[wi] = 1.0 # scale by weights X /= W.reshape(-1, 1) elif map_to == 'nearest': # get the neartest point to each vertex assert len(points) == len(features) try: from scipy.spatial import cKDTree except ModuleNotFoundError: raise ModuleNotFoundError( "The dependency 'SciPy' is required for this functionality!") kdt = cKDTree(points) d, ind = kdt.query(mesh.vertices) X = features[ind] else: raise ValueError( "Unknown value of argument `map_to`: {}".format(map_to)) if laplace_smooth: X = laplacianSmoothing(mesh, X) return X
def mapElectrostaticPotentialToMesh(mesh, phi, acc, sphere_average=True, npts=50, sphere_radius=1.0, efield=False, diff_method='symmetric_difference', h=None, laplace_smooth=False): feature_names = [] features = [] V = mesh.vertices N = mesh.vertex_normals nV = len(V) # Determine what points to sample if sphere_average: # compute point cloud kernel = generateUniformSpherePoints( npts, r=sphere_radius) # unit sphere at the origin # sample over kernel points = (V[:, np.newaxis] + kernel).reshape( -1, 3) # V*K x 3 array of points # accessibility mask pts_mask = acc(points).reshape(nV, -1) # V x K accessibility samples pts_msum = pts_mask.sum(axis=1) # V array of summed mask else: points = V # Map electrostatic potential if sphere_average: phi_s = phi(points).reshape(nV, -1) # V x K potential samples phi_s = phi_s * pts_mask # masking inaccessible potential values phi_s = phi_s.sum(axis=1) / pts_msum # V array of averaged potential features.append(clipOutliers(phi_s)) else: features.append(clipOutliers(phi(V))) feature_names.append('averaged_potential') if efield: # Map electric field to vertex normals if h is None: h = phi.grid.delta / 5 elif isinstance(h, float): h = np.array([h, h, h]) dx = h[0] * np.array([1, 0, 0]) dy = h[1] * np.array([0, 1, 0]) dz = h[2] * np.array([0, 0, 1]) if diff_method == 'symmetric_difference': Ex = (phi(V + dx) - phi(V - dx)) / (2 * h[0]) Ey = (phi(V + dy) - phi(V - dy)) / (2 * h[1]) Ez = (phi(V + dz) - phi(V - dz)) / (2 * h[2]) elif diff_method == 'five_point_stencil': Ex = (-phi(V + 2 * dx) + 8 * phi(V + dx) - 8 * phi(V - dx) + phi(V - 2 * dx)) / (12 * h[0]) Ey = (-phi(V + 2 * dy) + 8 * phi(V + dy) - 8 * phi(V - dy) + phi(V - 2 * dy)) / (12 * h[1]) Ez = (-phi(V + 2 * dz) + 8 * phi(V + dz) - 8 * phi(V - dz) + phi(V - 2 * dz)) / (12 * h[2]) else: raise ValueError( "Unknown value of parameter `diff_method`: '{}'".format( diff_method)) #if sphere_average: # Ex = (Ex.reshape(nV, -1)*pts_mask).sum(axis=1)/pts_msum # Ey = (Ey.reshape(nV, -1)*pts_mask).sum(axis=1)/pts_msum # Ez = (Ez.reshape(nV, -1)*pts_mask).sum(axis=1)/pts_msum sig = -N[:, 0] * Ex - N[:, 1] * Ey - N[:, 2] * Ez sig = clipOutliers(sig) if laplace_smooth: sig = laplacianSmoothing(mesh, sig, iterations=2) features.append(sig) feature_names.append('efield_projection') return np.array(features).T, feature_names