def mode(self, values, weights=None): """compute the mode within each group. Parameters ---------- values : array_like, [keys, ...] values to compute the mode of per group weights : array_like, [keys], float, optional optional weight associated with each entry in values Returns ------- unique: ndarray, [groups] unique keys reduced : ndarray, [groups, ...] value array, reduced over groups """ if weights is None: unique, weights = npi.count( (self.index.sorted_group_rank_per_key, values)) else: unique, weights = npi.group_by( (self.index.sorted_group_rank_per_key, values)).sum(weights) x, bin = npi.group_by(unique[0]).argmax(weights) return x, unique[1][bin]
def test_full(): di = dihedral.DihedralFull(3) print(di.complex.topology.elements[2]) print(di.factors) print(di.table) print(npi.count(di.element_order)) print() print(di.multiply([1, 2], 2))
def free_faces(coord, connect): """ Gets vertices of external faces of the mesh. Args: coord (:obj:`numpy.array`): Coordinates of the element. connect (:obj:`numpy.array`): Element connectivity. Returns: Corresponding nodes. """ nodes_per_face = np.array([connect[:, [1,2,3,4]], connect[:, [5,6,7,8]], \ connect[:, [6,7,3,2]], connect[:, [7,8,4,3]], \ connect[:, [6,5,1,2]], connect[:, [5,8,4,1]]]).reshape(-1,4) unique, counts = npi.count(nodes_per_face) unique = unique[counts < 2] ind_faces = npi.indices(coord[:, 0], unique.flatten()).reshape(-1, 4) return ind_faces
def calcContagion(self): ''' Contagion or edge density index 0 = maximum spatial complexity index 1 = no spatial complexity ''' x = self.vsClasses fractions = self.getFractions() fracCount = 0 index = 0 # concatenate shifts to compare neighbours = np.concatenate([ x[:, :-1].flatten(), x[:, +1:].flatten(), x[+1:, :].flatten(), x[:-1, :].flatten(), x[:-1, :-1].flatten(), x[+1:, +1:].flatten(), x[:-1, +1:].flatten(), x[+1:, :-1].flatten() ]) centers = np.concatenate([ x[:, +1:].flatten(), x[:, :-1].flatten(), x[:-1, :].flatten(), x[+1:, :].flatten(), x[+1:, +1:].flatten(), x[:-1, :-1].flatten(), x[+1:, :-1].flatten(), x[:-1, +1:].flatten() ]) # extract classes and count differences i.e. adjacencies classes, neighboursPerClass = npi.group_by(centers, neighbours) for classID, neigh in zip(classes, neighboursPerClass): # skip class 0 if classID != 0: # count adjacencies, except those next to 0 uniqueNeighbours, neighbourCounts = npi.count( neigh[neigh != 0]) for i in range(len(neighbourCounts)): indTemp = fractions[fracCount] * (neighbourCounts[i] / np.sum(neighbourCounts)) index += indTemp * math.log(indTemp) fracCount += 1 try: index /= (2 * math.log(self.getAbs())) index += 1 except ZeroDivisionError: index = 1. return index
def plot_mean_and_CI(ax, _x, _y, confidence=True): """This helper function plots the mean and p% confidence interval for _y grouped by index _x. Args: _x (numpy array): Drug concentrations which group different observations _y. _y (numpy array): The data that we would like to find the mean and confidence interval of. p: the percentage of confidence interval. Returns: None """ # Group _y by _x and find the mean, standard deviation of _y at each _x x_unique, y_mean = npi.group_by(_x).mean(_y) sample_size = npi.count(_x)[1] y_sem = npi.group_by(_x).std(_y)[1] / np.sqrt(sample_size) if not confidence: y_sem = None ax.errorbar(x=x_unique, y=y_mean, yerr=y_sem, fmt=".", color="black")
def __calculate_form_factors(self): # calculate the rotation matrix between the face normal and the isocell unit sphere's # original position and rotate the rays accordingly to match the face normal direction face_normals = self.mesh.face_normals # # test1 = r.vvrotvec(self.mesh.face_normals[1,:], [0, 0, 1]) # test1 = r.vvrotvec(face_normals[591:593,:], [0, 0, 1]) # test = r.vvrotvec(face_normals, [0, 0, 1]) # rotation_matrices = r.vrrotvec2mat(face_normals, [0, 0, 1]) rotation_matrices = r.rotation_matrices_from_vectors( face_normals, [0, 0, 1]) self.__form_factor_properties['rotation_matrices'] = rotation_matrices # drays = np.einsum('ijk,ak->iak', rotation_matrices, self.isocell.points) # drays = np.einsum('ijj,aj->iaj', rotation_matrices, self.isocell.points) drays = np.einsum('ijk,aj->iak', rotation_matrices, self.isocell.points) self.__form_factor_properties['drays'] = drays # get the centroid of the face/patch and shift it a bit so that rays do not stop at the self face thrown from start_points = self.mesh.triangles_center # the face/patch center points offset = np.sign(face_normals) offset = offset * 1e-3 origins = start_points + offset # intersects_location requires origins to be the same shape as vectors origins = np.repeat(origins, self.__n_rays, axis=0) self.__form_factor_properties['origins'] = origins.reshape( drays.shape[0], drays.shape[1], -1) # origins = np.tile(np.expand_dims(start_points, 0), (drays.shape[0], 1)) + offset # tree = ot.PyOctree(self.mesh.vertices.copy(order='C'), self.mesh.faces.copy(order='C').astype(np.int32)) # rayList = np.array([origins, drays.reshape(-1, 3)], dtype=np.float32) # startPoint = [0.0, 0.0, 0.0] # endPoint = [0.0, 0.0, 1.0] # rayList1 = np.array([[startPoint, endPoint]], dtype=np.float32) # intersectionFound = tree.rayIntersection(rayList) # start casting and find intersection points, rays and faces start = time.time() intersection_points, index_ray, index_tri = self.mesh.ray.intersects_location( origins, drays.reshape(-1, 3), multiple_hits=False) end = time.time() print('Ray casting in: {} sec'.format(end - start)) # tree = ot.PyOctree(vertices.copy(order='C'), faces.copy(order='C').astype(np.int32)) # check whether there were / print intersection points print('Intersections: {}/{}'.format(len(intersection_points), len(origins))) # check whether the extracted intersection output size is correct and fits the input if intersection_points.shape[0] != index_ray.shape[ 0] != index_tri.shape[0]: raise Exception( 'bad size alignment to the intersection ouput matrices') # find the indices of rays that did not intersect to any face and recover the size of the total casted rays no_intersection_rays = np.arange(origins.shape[0]) idxs_of_no_intersection_rays = no_intersection_rays[ ~np.isin(np.arange(no_intersection_rays.size), index_ray)] # check whether there are no_intersection_rays, and if yes adjust sizes in the output if idxs_of_no_intersection_rays.any(): # first apply backface culling and filter intersections from rays hitting faces from the back side # TODO: this could be addressed optimally by embree if it gets compiled with the corresponding parameter start = time.time() front_facing = self.__isFacing( np.delete(origins, idxs_of_no_intersection_rays, axis=0), np.delete(np.repeat(face_normals, self.__n_rays, axis=0), idxs_of_no_intersection_rays, axis=0), index_tri) end = time.time() print('Backface pulling in: {} sec'.format(end - start)) index_ray[np.where(front_facing == False)] = -1 index_tri[np.where(front_facing == False)] = -1 intersection_points[np.where(front_facing == False)] = -np.inf # index_ray = np.insert(index_ray, idxs_of_no_intersection_rays, -1) # simple insert does not work properly. See: https://stackoverflow.com/questions/47442115/insert-values-at-specific-locations-in-numpy-array-np-insert-done-right index_ray = np.insert( index_ray, idxs_of_no_intersection_rays - np.arange(len(idxs_of_no_intersection_rays)), -1) # index_tri = np.insert(index_tri, idxs_of_no_intersection_rays, -1) index_tri = np.insert( index_tri, idxs_of_no_intersection_rays - np.arange(len(idxs_of_no_intersection_rays)), -1) # intersection_points = np.insert(intersection_points, idxs_of_no_intersection_rays, -np.inf, axis=0) intersection_points = np.insert( intersection_points, idxs_of_no_intersection_rays - np.arange(len(idxs_of_no_intersection_rays)), -np.inf, axis=0) else: front_facing = self.__isFacing( origins, np.repeat(face_normals, self.__n_rays, axis=0), index_tri) index_ray[np.where(front_facing == False)] = -1 index_tri[np.where(front_facing == False)] = -1 intersection_points[np.where(front_facing == False)] = -np.inf index_ray = index_ray.reshape(self.__patch_count, -1) index_tri = index_tri.reshape(self.__patch_count, -1) intersection_points = intersection_points.reshape( self.__patch_count, self.__n_rays, -1) self.__form_factor_properties['index_rays'] = index_ray self.__form_factor_properties['index_tri'] = index_tri self.__form_factor_properties[ 'intersection_points'] = intersection_points # Bin elements per row (this means to find how many times each face is intersected from the thrown rays) from the intersected triangles matrix, i.e. index_tri. # See: # https://stackoverflow.com/questions/62662346/map-amount-of-repeated-elements-row-wise-from-a-numpy-array-to-another?noredirect=1#comment110814113_62662346 # https://stackoverflow.com/questions/46256279/bin-elements-per-row-vectorized-2d-bincount-for-numpy and # https://stackoverflow.com/a/40593110/1476932 # solution addapted from the last link rowidx, colidx = np.indices(index_tri.shape) (cols, rows), B = npi.count((index_tri.flatten(), rowidx.flatten())) # remove negative indexing that we introduced from the missing intersections negative_idxs = np.where(cols < 0) cols = np.delete(cols, negative_idxs) rows = np.delete(rows, negative_idxs) B = np.delete(B, negative_idxs) # assign values to the corresponding position of the form factors matrix self.ffs[rows, cols] = B self.ffs /= self.__n_rays # # # check whether there are no_intersection_rays, and if yes adjust sizes in the output # # if idxs_of_no_intersection_rays.any(): # # # first apply backface culling and filter intersections from rays hitting faces from the back side # # # TODO: this could be addressed optimally by embree if it gets compiled with the corresponding parameter # # front_facing = self.__isFacing(np.delete(origins, idxs_of_no_intersection_rays, axis=0), np.delete(np.repeat(face_normals, self.isocell.points.shape[0], axis=0), idxs_of_no_intersection_rays, axis=0), index_tri) # # # # index_ray = np.delete(index_ray, np.where(front_facing == False)) # # index_tri = np.delete(index_tri, np.where(front_facing == False)) # # intersection_points = np.delete(intersection_points, np.where(front_facing == False), axis=0) # # # # eq = npi.group_by(origins[index_ray]) # # # locs = trimesh.points.PointCloud(intersection_points) # # # render the result with vtkplotter # axes = vp.addons.buildAxes(vp.trimesh2vtk(self.mesh), c='k', zxGrid2=True) # rays = vp.Lines(origins[0:1083, :], drays[0, 0:1083, :].reshape(-1, 3)+origins[0:1083, :], c='b', scale=200) # locs = vp.Points(intersection_points[0:1083, :], c='r') # # rays = vp.Arrows(origins, drays+start_point, c='b', scale=1000) # normal = vp.Arrows(start_points[0, :].reshape(-1, 3), (face_normals[0, :]+start_points[0, :]).reshape(-1, 3), c='g', scale=250) # vp.show(vp.trimesh2vtk(self.mesh).alpha(0.1).lw(0.1), locs, rays, normal, axes, axes=4) # # # # for each hit, find the distance along its vector # # # you could also do this against the single camera Z vector # # depth = trimesh.util.diagonal_dot(intersection_points - start_point, drays[index_ray]) return self.ffs
def test_basic(): # tet_group = tetrahedral.Tetrahedral() # orbits = np.arange(24) % 2 # domains = tet_group.from_orbits(orbits) # basis = tet_group.basis_from_domains(domains) # transforms = tet_group.transforms_from_basis(basis) group = octahedral.Octahedral() npt.assert_allclose(np.linalg.norm(group.complex.vertices, axis=-1), 1.0) print(group.fundamental_domains.shape) print([tables.shape for tables in group.vertices]) # domains = group.fundamental_domains orbits = np.arange(48) domains = group.domains_from_orbits(orbits) basis = group.basis_from_domains(domains) npt.assert_allclose(np.linalg.norm(basis, axis=-1), 1.0) # transforms = group.transforms_from_basis(basis) # transforms = group.representation_from_basis(basis) # orientations = group.orientation_from_basis(basis) # print(orientations) # representation, relative = group.relative_transforms(transforms) # orbits = np.zeros(24, dtype=np.uint8) # null group orbits = np.arange(48) % 2 # chiral group; index 2, order 24 orbits = octahedral.Pyritohedral().orbits() # print(orbits.shape) # quit() domains = group.domains_from_orbits(orbits) basis = group.basis_from_domains(domains) npt.assert_allclose(np.linalg.norm(basis, axis=-1), 1.0) transforms = group.transforms_from_basis(basis) # orientations = group.orientation_from_basis(basis) # print(orientations) # representation, relative = group.relative_transforms(transforms) # print(group.from_orbits(np.zeros(120)).shape) # print(group.match_domains(group.fundamental_domains)) print() # E, T = group._edges(group.fundamental_domains) # for e in E: # print (e) # for e in T: # print (e) # # E, T = group._vertices(group.fundamental_domains) # for v in E: # print (v) # for v in T: # print (v) # print (group.fundamental_vertices(representation)) # print (group.fundamental_edges(representation).shape) tables = group.elements_tables(transforms) for q in np.split(tables[0], np.cumsum(group.complex.topology.n_elements[:-1]), axis=1): assert npi.all_unique(q) print(q) print(tables[0]) print(tables[1]) print(tables[2]) print(orbits[tables[2]]) print(npi.count(orbits[tables[2]]))
def test_table(): ico = icosahedral.IcosahedralFull() print(ico.factors) print(ico.table) print(npi.count(ico.element_order))