def _to_delaunay(data_dict): for name, data in data_dict.items(): mesh = Delaunay(data['coord']) mesh.simplices = data['connect'].astype(np.int32) data_dict[name]['mesh'] = mesh return data_dict
def generateMesh(self): points = self.generatePointCloud() tri = Delaunay(points) tri.simplices = self.filterTriangles(tri) self.plotMesh(points,tri) return Mesh2D(tri.points,tri.simplices) print("Generate Mesh Done")
def wet_circles(A, B, thetaA, thetaB): """Generates a mesh that wets the surface of circles A and B. Parameters ------------- A,B : Circle theta : list the number of radians that the wet covers and number of the points on the surface range """ vector = B.center - A.center if vector.x > 0: angleA = np.arctan(vector.y / vector.x) angleB = PI + angleA else: angleB = np.arctan(vector.y / vector.x) angleA = PI + angleB # print(vector) rA = A.radius rB = B.radius points = [] for t in ((np.arange(0, thetaA[1]) / (thetaA[1] - 1) - 0.5) * thetaA[0] + angleA): x = rA * np.cos(t) + A.center.x y = rA * np.sin(t) + A.center.y points.append([x, y]) mid = len(points) for t in ((np.arange(0, thetaB[1]) / (thetaB[1] - 1) - 0.5) * thetaB[0] + angleB): x = rB * np.cos(t) + B.center.x y = rB * np.sin(t) + B.center.y points.append([x, y]) points = np.array(points) # Triangulate the polygon tri = Delaunay(points) # Remove extra triangles # print(tri.simplices) mask = np.sum(tri.simplices < mid, 1) mask = np.logical_and(mask < 3, mask > 0) tri.simplices = tri.simplices[mask, :] # print(tri.simplices) m = Mesh() for t in tri.simplices: m.append( Triangle(Point([points[t[0], 0], points[t[0], 1]]), Point([points[t[1], 0], points[t[1], 1]]), Point([points[t[2], 0], points[t[2], 1]]))) return m
def _mesh(self): """Create mesh of all the atoms in the cluster. :return: Mesh :rtype: scipy.spatial.Delaunay """ from scipy.spatial import Delaunay points = self.cluster.get_positions() delaunay = Delaunay(points) simplices = self._filter_max_dist_in_element(delaunay.simplices) delaunay.simplices = simplices return delaunay
def Omega_mesh(self, dx, landmark): x, y, z = np.meshgrid(np.arange(-1,1+dx,dx), np.arange(-1,1+dx,dx), np.arange(-1,1+dx,dx)) v = np.hstack((x.reshape(-1, 1), y.reshape(-1, 1), z.reshape(-1, 1))) # v is of size l-by-3 l = v.shape[0] ll = landmark.ndim if not ll == 1: num_landmark = landmark.shape[0] else: num_landmark = 1 landmark_index = np.zeros(num_landmark) for i in range(num_landmark): if not ll == 1: check = landmark[i, :] == v else: check = landmark == v result = np.matmul(check, np.ones((3, 1))) if np.isin(3, result): landmark_index[i] = np.argmax(result) else: v = np.vstack((v, landmark)) landmark_index[i] = l l += 1 tri = Delaunay(v) faces = tri.simplices numf = np.shape(faces)[0] planar = np.zeros(numf) for i in range(numf): facet = faces[i, :] diff = v[facet[0:3], :] - np.tile(v[facet[3], :], (3, 1)) if not np.linalg.matrix_rank(diff) == args.dimension: planar[i] = 1 # else: # print(np.linalg.det(diff)) new_face = faces[planar==0, :] tri.simplices = new_face file2 = open('mesh.txt','w') file2.write('triangulation: \n') for i in range(new_face.shape[0]): for j in range(4): file2.write(str(new_face[i, j])) file2.write(' ') file2.write('\n') file2.write('vertices: \n') for i in range(v.shape[0]): for j in range(3): file2.write(str(v[i, j])) file2.write(' ') file2.write('\n') file2.close() return v, new_face, landmark_index
def delaunayTriangulation(points): # https://stackoverflow.com/questions/36604172/difference-between-matlab-delaunayn-and-scipy-delaunay N = points.ndim # The dimensions of points options = 'Qt Qbb Qc' if N <= 3 else 'Qt Qbb Qc Qx' # Set the QHull options DT = Delaunay(points, qhull_options=options) tri = DT.simplices keep = np.ones(len(tri), dtype=bool) for i, t in enumerate(tri): if abs(np.linalg.det(np.hstack( (points[t], np.ones([1, N + 1]).T)))) < 1E-15: keep[i] = False # Point is coplanar, we don't want to keep it tri = tri[keep] DT.simplices = tri return DT
def create_triangulation(self, ures=4, vres=4, lres=1, **kwargs): """Compute the triangulation of the volume using scipy's `delaunay` function Parameters ---------- ures, vres : int Specifies the oversampling of the original volume in u and v directions. For example: if `ures` = 2, and `self.u` = [0, 1, 2, 3], then the surface will be resampled at [0, 0.5, 1, 1.5, 2, 2.5, 3] prior to plotting. kwargs : dict See scipy docs for `scipy.spatial.Delaunay()` Returns ------- None """ from scipy.spatial import Delaunay if self.tri is not None: return self.tri # Make new u and v values of (possibly) higher resolution # the original ones. hru, hrv, hrl = self._resample_uvl(ures, vres, lres) N = 3 volpts = self.ev(hru, hrv, hrl).reshape(3, -1).T qhull_options = 'QJ' tri = Delaunay(volpts, qhull_options=qhull_options) keep = np.ones(len(tri.simplices), dtype=bool) for i, t in enumerate(tri.simplices): if abs(np.linalg.det(np.hstack( (volpts[t], np.ones([1, N + 1]).T)))) < 1E-12: keep[i] = False # Point is coplanar, we don't want to keep it tri.simplices = tri.simplices[keep] self.tri = tri return tri
def plot_model_grid(): ''' Plot the grid of models in each metallicity. Internal use. ''' grid_kurucz = pd.read_csv('files/grid_points_kurucz.csv') for m_h in grid_kurucz.groupby('m_h').size().index: plt.figure(figsize=(13,4)) index = grid_kurucz['m_h'] == m_h grid_matrix = np.array(grid_kurucz.loc[index, ['Teff', 'logg']]) tri = Delaunay(grid_matrix) for i in range(len(tri.simplices)-1, -1, -1): if min(grid_matrix[tri.simplices[i]][:,0]) >= 35000: teff_gap = 5000 else: teff_gap = 1500 if np.ptp(grid_matrix[tri.simplices[i]][:,0]) >= teff_gap or np.ptp(grid_matrix[tri.simplices[i]][:,1]) > 0.5: tri.simplices = np.concatenate([tri.simplices[:i], tri.simplices[i+1:]]) plt.triplot(grid_matrix[:,0], grid_matrix[:,1], tri.simplices, zorder=0, lw=1, color='gray',alpha=0.5) if m_h < 0.5: plt.plot([50000, 42500], [5, 5], color='gray', zorder=0, alpha=0.5, lw=1) elif m_h == 0.5: plt.plot([45000, 40000], [5, 5], color='gray', zorder=0, alpha=0.5, lw=1) elif m_h == 1: plt.plot([40000, 37500], [5, 5], color='gray', zorder=0, alpha=0.5, lw=1) plt.scatter(grid_kurucz.loc[index & (grid_kurucz['length']==72), 'Teff'], grid_kurucz.loc[index & (grid_kurucz['length']==72), 'logg'], s=5, label='Model length: 72') plt.scatter(grid_kurucz.loc[index & (grid_kurucz['length']==64), 'Teff'], grid_kurucz.loc[index & (grid_kurucz['length']==64), 'logg'], s=5, c='C3', label='Model length: 64') plt.legend() plt.xlim((1175, 52325)) plt.title('[Fe/H] = {:.1f}'.format(m_h)) plt.xlabel(r'$T_\mathrm{{eff}}$'); plt.ylabel('logg') plt.gca().invert_xaxis(); plt.gca().invert_yaxis() plt.tight_layout() plt.savefig('../docs/img/grid_points_kurucz/m_h{:+.1f}.png'.format(m_h), dpi=250) plt.close()
set(indx for simplex in triang.simplices if x in simplex for indx in simplex if indx != x)) ### remove convex hull for more sensible graph hull = ConvexHull(points) delete_indices = [] for tidx in range(tri.simplices.shape[0]): for hidx in range(hull.simplices.shape[0]): is_hull = (int( (tri.simplices[tidx] == hull.simplices[hidx][0]).any()) + int( (tri.simplices[tidx] == hull.simplices[hidx][1]).any())) == 2 if is_hull: delete_indices.append(tidx) delete_indices = sorted(np.sort(delete_indices), reverse=True) for idx in delete_indices: tri.simplices = np.delete(tri.simplices, idx, 0) ### write graph file graphfile = open('./graph.graph', 'w+') for pidx in range(points.size // 2): neighbours = find_neighbours(pidx, tri) if neighbours != []: graphfile.write("%d : %d %s\n" % (pidx, len(neighbours), ' '.join( [str(neighbour) for neighbour in neighbours]))) mappingfile = open('./graph.mapping', 'w+') for pidx in range(points.size // 2): neighbours = find_neighbours(pidx, tri) if neighbours != []: mappingfile.write("(%f , %f) : %d\n" % (points[pidx][0], points[pidx][1], pidx))
def preprocessingDataReceivers(mesh_file, receivers_file, out_dir, rank): ''' Preprocess conductivity model associated to a given mesh in Gmsh format. Here, dofs are defined for edge finite element computations. :param str mesh_file: mesh file name to be preprocess. :param str receivers_file: receiver positions file name to be preprocess. :param str out_dir: path for output. :param int rank: MPI rank. :return: None ''' from scipy.spatial import Delaunay if rank == 0: PETSc.Sys.Print(' Receiver positions (receivers.dat)') # Check if mesh_file exist success = checkFilePath(mesh_file) if rank == 0: if not success: msg = (' preprocessingDataReceivers(): file ' + mesh_file + ' does not exist.') raise ValueError(msg) # Check if receivers_file exist success = checkFilePath(receivers_file) if rank == 0: if not success: msg = (' preprocessingDataReceivers(): file ' + receivers_file + ' does not exist.') raise ValueError(msg) # Read receivers_file receivers = np.loadtxt(receivers_file) # Number of receivers if receivers.ndim == 1: nReceivers = 1 else: dim = receivers.shape nReceivers = dim[0] # Read nodes nodes, nNodes = readGmshNodes(mesh_file) # Build Delaunay triangulation with nodes tri = Delaunay(nodes) # Delete unnecesary arrays del nodes # Read connectivity elemsN, nElems = readGmshConnectivity(mesh_file) # Compute dofs dofs, _ = computeDofs(elemsN, nElems) # Overwrite Delaunay structure with mesh_file connectivity tri.simplices = elemsN.astype(np.int32) # Delete unnecesary arrays del elemsN # Find out which tetrahedral element points are in recvElems = tri.find_simplex(receivers) # Determine if all receiver points were found idx = np.where(recvElems < 0)[0] # If idx is not empty, there are receivers outside the domain if idx.size != 0: PETSc.Sys.Print(' Some receivers are were not located') PETSc.Sys.Print(' Following ID-receivers will not be taken ' + 'into account: ') PETSc.Sys.Print(idx) # Update number of receivers nReceivers = nReceivers - len(idx) if nReceivers == 0: PETSc.Sys.Print(' No receiver has been found. Nothing to do.' ' Aborting') exit(-1) # Remove idx from receivers matrix receivers = np.delete(receivers, idx, axis=0) # Create new file with located points coordinates # Build path to save the file out_path = out_dir + 'receiversPETGEM.txt' PETSc.Sys.Print(' Saving file with localized receiver positions ' + '(receiversPETGEM.txt)') np.savetxt(out_path, receivers, fmt='%1.8e') # Allocate space for receives in PETGEM format numDimensions = 3 nodalOrder = 4 edgeOrder = 6 allocate = numDimensions+nodalOrder*numDimensions+nodalOrder+edgeOrder tmp = np.zeros((nReceivers, allocate), dtype=np.float) # Fill tmp matrix with receiver positions, element coordinates and # nodal indexes for iReceiver in np.arange(nReceivers): # If there is one receiver if nReceivers == 1: # Get receiver coordinates coordiReceiver = receivers[0:] # Get element coordinates (container element) coordElement = tri.points[tri.simplices[recvElems, :]] coordElement = coordElement.flatten() # Get nodal indexes (container element) nodesElement = tri.simplices[recvElems, :] # Get element-dofs indices (container element) dofsElement = dofs[recvElems, :] # If there are more than one receivers else: # Get receiver coordinates coordiReceiver = receivers[iReceiver, :] # Get element coordinates (container element) coordElement = tri.points[tri.simplices[recvElems[iReceiver], :]] coordElement = coordElement.flatten() # Get nodal indexes (container element) nodesElement = tri.simplices[recvElems[iReceiver], :] # Get element-dofs indices (container element) dofsElement = dofs[recvElems[iReceiver], :] # Insert data for iReceiver tmp[iReceiver, 0:3] = coordiReceiver tmp[iReceiver, 3:15] = coordElement tmp[iReceiver, 15:19] = nodesElement tmp[iReceiver, 19:] = dofsElement # Delete unnecesary arrays del tri del dofs # Get matrix dimensions size = tmp.shape # Build PETSc structures matrix = createSequentialDenseMatrixWithArray(size[0], size[1], tmp) # Delete unnecesary arrays del tmp # Verify if OUT_DIR exists checkIfDirectoryExist(out_dir) # Build path to save the file out_path = out_dir + 'receivers.dat' # Write PETGEM receivers in PETSc format writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF) return nReceivers
def ComputeTriangularMesh(vertices, segments): from scipy.spatial import Delaunay from copy import deepcopy nVertices = len(vertices) tri = Delaunay(np.array(vertices)) trigs = deepcopy(tri.simplices) #+++++++++++++++++++++++++++++++++ #compute vertices2simplices list: vertices2simplices = [[]]*nVertices cnt = 0 for trig in trigs: for i in trig: alist=list(vertices2simplices[i]) alist.append(cnt) vertices2simplices[i] = alist cnt += 1 #trig counter #print(trigs) #print(vertices2simplices) #+++++++++++++++++++++++++++++++++ #compute neighbors: trigNeighbors = 0*trigs #-1 means no neighbor trig! trigNeighbors[:,:] = -1 #run over all triangles for i in range(len(trigs)): for j in range(3): i0 = trigs[i,j] i1 = trigs[i,(j+1)%3] actSeg = [i0, i1] listTest = vertices2simplices[i0] + vertices2simplices[i1] for trigIndex in listTest: if trigIndex < i: for k in range(3): t0 = trigs[trigIndex, k] t1 = trigs[trigIndex, (k+1)%3] if (i0 == t1) and (i1 == t0): #opposite trig orientation is reversed ... trigNeighbors[i,j] = trigIndex trigNeighbors[trigIndex,k] = i #print("neighbors=", trigNeighbors) #+++++++++++++++++++++++++++++++++ #compute inside triangles: trianglesInside = [-1]*len(trigs) #-1 is undefined, 0=outside, 1=inside for seg in segments: #triangles left to segment are inside listTest = vertices2simplices[seg[0]] + vertices2simplices[seg[1]] for trigIndex in listTest: for k in range(3): t0 = trigs[trigIndex, k] t1 = trigs[trigIndex, (k+1)%3] if (seg[0] == t0) and (seg[1] == t1): #inside triangle trianglesInside[trigIndex] = 1 elif (seg[0] == t1) and (seg[1] == t0): #outside triangle trianglesInside[trigIndex] = 0 #print(trianglesInside) #finally find remaining triangles (usually all triangles are on boundary, so nothing remains): undefinedTrigs = True while undefinedTrigs: #iterate as long as there are undefined triangles; usually only few iterations necessary undefinedTrigs = False #print("iterate neighbors") for i in range(len(trigs)): if trianglesInside[i] == -1: #still undefined found = False for j in range(3): #look at all neighbors tn = trigNeighbors[i, j] if trianglesInside[tn] != -1: trianglesInside[i] = trianglesInside[tn] found = True if not found: undefinedTrigs = True #now create new list of interior triangles interiorTrigs = [] for i in range(len(trigs)): if trianglesInside[i] == 1: interiorTrigs += [list(trigs[i])] #print("interiorTrigs=",interiorTrigs) tri.simplices = np.array(interiorTrigs) return tri
def _make_delaunay_mesh(self, coords, connectivity): mesh = Delaunay(coords) mesh.simplices = connectivity.astype(np.int32) return mesh
def wet_circles(A, B, thetaA, thetaB): """Generates a mesh that wets the surface of circles A and B. Parameters ------------- A,B : Circle theta : list the number of radians that the wet covers and number of the points on the surface range """ vector = B.center - A.center if vector.x > 0: angleA = np.arctan(vector.y / vector.x) angleB = PI + angleA else: angleB = np.arctan(vector.y / vector.x) angleA = PI + angleB # print(vector) rA = A.radius rB = B.radius points = [] for t in ((np.arange(0, thetaA[1]) / (thetaA[1] - 1) - 0.5) * thetaA[0] + angleA): x = rA * np.cos(t) + A.center.x y = rA * np.sin(t) + A.center.y points.append([x, y]) mid = len(points) for t in ((np.arange(0, thetaB[1]) / (thetaB[1] - 1) - 0.5) * thetaB[0] + angleB): x = rB * np.cos(t) + B.center.x y = rB * np.sin(t) + B.center.y points.append([x, y]) points = np.array(points) # Triangulate the polygon tri = Delaunay(points) # Remove extra triangles # print(tri.simplices) mask = np.sum(tri.simplices < mid, 1) mask = np.logical_and(mask < 3, mask > 0) tri.simplices = tri.simplices[mask, :] # print(tri.simplices) m = Mesh() for t in tri.simplices: m.append( Triangle( Point([points[t[0], 0], points[t[0], 1]]), Point([points[t[1], 0], points[t[1], 1]]), Point([points[t[2], 0], points[t[2], 1]]) ) ) return m
def run(self, setup): """Run a preprocessing task. :param obj setup: inputSetup object. :return: None """ # --------------------------------------------------------------- # Initialization # --------------------------------------------------------------- # Start timer Timers()["Preprocessing"].start() # Parameters shortcut (for code legibility) model = setup.model output = setup.output # Obtain the MPI environment parEnv = MPIEnvironment() # --------------------------------------------------------------- # Import mesh file (gmsh format) # --------------------------------------------------------------- # Read nodes nodes, _ = readGmshNodes(model.mesh_file) # Read connectivity elemsN, nElems = readGmshConnectivity(model.mesh_file) # --------------------------------------------------------------- # Preprocessing nodal coordinates # --------------------------------------------------------------- Print.master(' Nodal coordinates') # Build coordinates in PETGEM format where each row # represent the xyz coordinates of the 4 tetrahedral element num_dimensions = 3 num_nodes_per_element = 4 data = np.array((nodes[elemsN[:], :]), dtype=np.float) data = data.reshape(nElems, num_dimensions*num_nodes_per_element) # Get matrix dimensions size = data.shape # Build PETSc structures matrix = createSequentialDenseMatrixWithArray(size[0], size[1], data) # Build path to save the file out_path = output.directory_scratch + '/nodes.dat' if parEnv.rank == 0: # Write PETGEM nodes in PETSc format writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF) # --------------------------------------------------------------- # Preprocessing mesh connectivity # --------------------------------------------------------------- Print.master(' Mesh connectivity') # Get matrix dimensions size = elemsN.shape # Build PETSc structures matrix = createSequentialDenseMatrixWithArray(size[0], size[1], elemsN) # Build path to save the file out_path = output.directory_scratch + '/meshConnectivity.dat' if parEnv.rank == 0: # Write PETGEM connectivity in PETSc format writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF) # --------------------------------------------------------------- # Preprocessing edges connectivity # --------------------------------------------------------------- Print.master(' Edges connectivity') # Compute edges elemsE, edgesNodes = computeEdges(elemsN, nElems) nEdges = edgesNodes.shape[0] # Get matrix dimensions size = elemsE.shape # Build PETSc structures matrix = createSequentialDenseMatrixWithArray(size[0], size[1], elemsE) # Build path to save the file out_path = output.directory_scratch + '/edges.dat' if parEnv.rank == 0: # Write PETGEM edges in PETSc format writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF) # Reshape edgesNodes and save num_nodes_per_edge = 2 num_edges_per_element = 6 data = np.array((edgesNodes[elemsE[:], :]), dtype=np.float) data = data.reshape(nElems, num_nodes_per_edge*num_edges_per_element) # Get matrix dimensions size = data.shape # Build PETSc structures matrix = createSequentialDenseMatrixWithArray(size[0], size[1], data) # Build path to save the file out_path = output.directory_scratch + '/edgesNodes.dat' if parEnv.rank == 0: # Write PETGEM edgesNodes in PETSc format writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF) # --------------------------------------------------------------- # Preprocessing faces connectivity # --------------------------------------------------------------- Print.master(' Faces connectivity') # Compute faces elemsF, facesN = computeFaces(elemsN, nElems) nFaces = facesN.shape[0] # Get matrix dimensions size = elemsF.shape # Build PETSc structures matrix = createSequentialDenseMatrixWithArray(size[0], size[1], elemsF) # Build path to save the file out_path = output.directory_scratch + '/faces.dat' if parEnv.rank == 0: # Write PETGEM edges in PETSc format writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF) # --------------------------------------------------------------- # Preprocessing faces-edges connectivity # --------------------------------------------------------------- Print.master(' Faces-edges connectivity') N = invConnectivity(elemsF, nFaces) if nElems != 1: N = np.delete(N, 0, axis=1) # Allocate facesE = np.zeros((nFaces, 3), dtype=np.int) # Compute edges list for each face for i in np.arange(nFaces): iEle = N[i, 0] edgesEle = elemsE[iEle,:] facesEle = elemsF[iEle,:] kFace = np.where(facesEle == i)[0] if kFace == 0: # Face 1 facesE[facesEle[kFace],:] = [edgesEle[0], edgesEle[1], edgesEle[2]] elif kFace == 1: # Face 2 facesE[facesEle[kFace],:] = [edgesEle[0], edgesEle[4], edgesEle[3]] elif kFace == 2: # Face 3 facesE[facesEle[kFace],:] = [edgesEle[1], edgesEle[5], edgesEle[4]] elif kFace == 3: # Face 4 facesE[facesEle[kFace],:] = [edgesEle[2], edgesEle[5], edgesEle[3]] num_faces_per_element = 4 num_edges_per_face = 3 data = np.array((facesE[elemsF[:], :]), dtype=np.float) data = data.reshape(nElems, num_faces_per_element*num_edges_per_face) # Get matrix dimensions size = data.shape # Build PETSc structures matrix = createSequentialDenseMatrixWithArray(size[0], size[1], data) # Build path to save the file out_path = output.directory_scratch + '/facesEdges.dat' if parEnv.rank == 0: # Write PETGEM edges in PETSc format writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF) # --------------------------------------------------------------- # Preprocessing dofs connectivity # --------------------------------------------------------------- Print.master(' DOFs connectivity') # Compute degrees of freedom connectivity dofs, dof_edges, dof_faces, _, total_num_dofs = computeConnectivityDOFS(elemsE,elemsF,model.basis_order) # Get matrix dimensions size = dofs.shape # Build PETSc structures matrix = createSequentialDenseMatrixWithArray(size[0], size[1], dofs) # Build path to save the file out_path = output.directory_scratch + '/dofs.dat' if parEnv.rank == 0: # Write PETGEM edges in PETSc format writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF) # --------------------------------------------------------------- # Preprocessing boundaries # --------------------------------------------------------------- Print.master(' Boundaries') # Compute boundary faces bFacesN, bFaces = computeBoundaryFaces(elemsF, facesN) # Compute boundary edges bEdges = computeBoundaryEdges(edgesNodes, bFacesN) # Compute dofs on boundaries _, indx_boundary_dofs = computeBoundaries(dofs, dof_edges, dof_faces, bEdges, bFaces, model.basis_order); # Build PETSc structures vector = createSequentialVectorWithArray(indx_boundary_dofs) # Build path to save the file out_path = output.directory_scratch + '/boundaries.dat' if parEnv.rank == 0: # Write PETGEM nodes in PETSc format writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF) # --------------------------------------------------------------- # Preprocessing sigma model # --------------------------------------------------------------- Print.master(' Conductivity model') # Read element's tag elemsS, nElems = readGmshPhysicalGroups(model.mesh_file) # Build conductivity arrays conductivityModel = np.zeros((nElems, 2), dtype=np.float) for i in np.arange(nElems): # Set horizontal sigma conductivityModel[i, 0] = model.sigma_horizontal[np.int(elemsS[i])] # Set vertical sigma conductivityModel[i, 1] = model.sigma_vertical[np.int(elemsS[i])] # Get matrix dimensions size = conductivityModel.shape # Build PETSc structures matrix = createSequentialDenseMatrixWithArray(size[0], size[1], conductivityModel) # Build path to save the file out_path = output.directory_scratch + '/conductivityModel.dat' if parEnv.rank == 0: # Write PETGEM edges in PETSc format writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF) # --------------------------------------------------------------- # Preprocessing receivers # --------------------------------------------------------------- Print.master(' Receivers') # Open receivers_file fileID = h5py.File(model.receivers_file, 'r') # Read receivers receivers = fileID.get('data')[()] # Number of receivers if receivers.ndim == 1: nReceivers = 1 else: dim = receivers.shape nReceivers = dim[0] # Build Delaunay triangulation with nodes tri = Delaunay(nodes) # Overwrite Delaunay structure with mesh_file connectivity and points tri.simplices = elemsN.astype(np.int32) tri.vertices = elemsN.astype(np.int32) # Find out which tetrahedral element points are in recvElems = tri.find_simplex(receivers, bruteforce=True, tol=1.e-12) # Find out which tetrahedral element source point is in srcElem = tri.find_simplex(model.src_position, bruteforce=True, tol=1.e-12) # Determine if all receiver points were found idx = np.where(np.logical_or(recvElems>nElems, recvElems<0))[0] # If idx is not empty, there are receivers outside the domain if idx.size != 0: Print.master(' The following receivers were not located and will not be taken into account ' + str(idx)) # Update number of receivers nReceivers = nReceivers - len(idx) if nReceivers == 0: Print.master(' No receiver has been found. Nothing to do. Aborting') exit(-1) # Remove idx from receivers matrix receivers = np.delete(receivers, idx, axis=0) # Remove idx from recvElems recvElems = np.delete(recvElems, idx, axis=0) # If srcElem is empty, source not located if srcElem == 0: Print.master(' Source no located in the computational domain. Please, improve the mesh quality') exit(-1) # Compute number of dofs per element num_dof_in_element = np.int(model.basis_order*(model.basis_order+2)*(model.basis_order+3)/2) # Allocate data_receiver = np.zeros((nReceivers, 53+num_dof_in_element), dtype=np.float) # Fill tmp matrix with receiver positions, element coordinates and # nodal indexes for i in np.arange(nReceivers): # If there is one receiver if nReceivers == 1: # Get index of tetrahedral element (receiver container) iEle = recvElems # Get dofs of element container dofsElement = dofs[iEle] # If there are more than one receivers else: # Get index of tetrahedral element (receiver container) iEle = recvElems[i] # Get dofs of element container dofsElement = dofs[iEle, :] # Get indexes of nodes for iand insert nodesReceiver = elemsN[iEle, :] data_receiver[i, 0:4] = nodesReceiver # Get nodes coordinates for i and insert coordEle = nodes[nodesReceiver, :] coordEle = coordEle.flatten() data_receiver[i, 4:16] = coordEle # Get indexes of faces for i and insert facesReceiver = elemsF[iEle, :] data_receiver[i, 16:20] = facesReceiver # Get edges indexes for faces in i and insert edgesReceiver = facesE[facesReceiver, :] edgesReceiver = edgesReceiver.flatten() data_receiver[i, 20:32] = edgesReceiver # Get indexes of edges for i and insert edgesReceiver = elemsE[iEle, :] data_receiver[i, 32:38] = edgesReceiver # Get node indexes for edges in i and insert edgesNodesReceiver = edgesNodes[edgesReceiver, :] edgesNodesReceiver = edgesNodesReceiver.flatten() data_receiver[i, 38:50] = edgesNodesReceiver # Get receiver coordinates coordReceiver = receivers[i,: ] data_receiver[i, 50:53] = coordReceiver # Get dofs for srcElem and insert dofsReceiver = dofsElement data_receiver[i, 53::] = dofsReceiver # Get matrix dimensions size = data_receiver.shape # Build PETSc structures matrix = createSequentialDenseMatrixWithArray(size[0], size[1], data_receiver) # Build path to save the file out_path = output.directory_scratch + '/receivers.dat' if parEnv.rank == 0: # Write PETGEM receivers in PETSc format writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF) # Compute number of dofs per element num_dof_in_element = np.int(model.basis_order*(model.basis_order+2)*(model.basis_order+3)/2) # Build data for source insertion vector = np.zeros(50+num_dof_in_element, dtype=np.float) # Get indexes of nodes for srcElem and insert nodesSource = elemsN[srcElem, :] vector[0:4] = nodesSource # Get nodes coordinates for srcElem and insert coordSource = nodes[nodesSource, :] coordSource = coordSource.flatten() vector[4:16] = coordSource # Get indexes of faces for srcElem and insert facesSource = elemsF[srcElem, :] vector[16:20] = facesSource # Get edges indexes for faces in srcElem and insert edgesFace = facesE[facesSource, :] edgesFace = edgesFace.flatten() vector[20:32] = edgesFace # Get indexes of edges for srcElem and insert edgesSource = elemsE[srcElem, :] vector[32:38] = edgesSource # Get node indexes for edges in srcElem and insert edgesNodesSource = edgesNodes[edgesSource, :] edgesNodesSource = edgesNodesSource.flatten() vector[38:50] = edgesNodesSource # Get dofs for srcElem and insert dofsSource = dofs[srcElem,:] vector[50::] = dofsSource # Build PETSc structures vector = createSequentialVectorWithArray(vector) # Build path to save the file out_path = output.directory_scratch + '/source.dat' if parEnv.rank == 0: # Write PETGEM nodes in PETSc format writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF) # --------------------------------------------------------------- # Sparsity pattern # --------------------------------------------------------------- # Setup valence for each basis order (adding a small percentage to keep safe) valence = np.array([50, 200, 400, 800, 1400, 2500]) # Build nnz pattern for each row nnz = np.full((total_num_dofs), valence[model.basis_order-1], dtype=np.int) # Build PETSc structures vector = createSequentialVectorWithArray(nnz) # Build path to save the file out_path = output.directory_scratch + '/nnz.dat' if parEnv.rank == 0: # Write PETGEM nodes in PETSc format writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF) # --------------------------------------------------------------- # Print mesh statistics # --------------------------------------------------------------- Print.master(' ') Print.master(' Mesh statistics') Print.master(' Number of elements: {0:12}'.format(str(nElems))) Print.master(' Number of faces: {0:12}'.format(str(nFaces))) Print.master(' Number of edges: {0:12}'.format(str(nEdges))) Print.master(' Number of dofs: {0:12}'.format(str(total_num_dofs))) Print.master(' Number of boundaries: {0:12}'.format(str(len(indx_boundary_dofs)))) # --------------------------------------------------------------- # Print data model # --------------------------------------------------------------- Print.master(' ') Print.master(' Model data') Print.master(' Number of materials: {0:12}'.format(str(np.max(elemsS)+1))) Print.master(' Vector basis order: {0:12}'.format(str(model.basis_order))) Print.master(' Frequency (Hz): {0:12}'.format(str(model.frequency))) Print.master(' Source position (xyz): {0:12}'.format(str(model.src_position))) Print.master(' Source azimuth: {0:12}'.format(str(model.src_azimuth))) Print.master(' Source dip: {0:12}'.format(str(model.src_dip))) Print.master(' Source current: {0:12}'.format(str(model.src_current))) Print.master(' Source length: {0:12}'.format(str(model.src_length))) Print.master(' Sigma horizontal: {0:12}'.format(str(model.sigma_horizontal))) Print.master(' Sigma vertical: {0:12}'.format(str(model.sigma_vertical))) Print.master(' Number of receivers: {0:12}'.format(str(nReceivers))) # Apply barrier for MPI tasks alignement parEnv.comm.barrier() # Stop timer Timers()["Preprocessing"].stop()
b = points_3d[simple[1]] c = points_3d[simple[2]] mesh.add_facet((a, b, c)) # print(a, b, c) for simple in tri.simplices[mask]: a1 = [points_3d[simple[0]][0], points_3d[simple[0]][1], points_3d[simple[0]][2]] a1[2] = zscale * zmin + args.base b1 = [points_3d[simple[1]][0], points_3d[simple[1]][1], points_3d[simple[1]][2]] b1[2] = zscale * zmin + args.base c1 = [points_3d[simple[2]][0], points_3d[simple[2]][1], points_3d[simple[2]][2]] c1[2] = zscale * zmin + args.base mesh.add_facet((a1, b1, c1)) # print(a1, b1, c1) tri.simplices = tri.simplices[mask] ch = ConcaveHull() ch.loadpoints(points_2d) ch.calculatehull() hull_points = np.vstack(ch.boundary.exterior.coords.xy).T.tolist() # print(hull_points[:2]) points_2d = points_2d.tolist() # print(points_2d[:2]) for i, hp in enumerate(hull_points): if i == len(hull_points)-1: continue # print(hp) ind = points_2d.index(hp)
# --------------------------------------------------------------- # Preprocessing receivers # --------------------------------------------------------------- Print.master(' Receivers') # Setup receivers receivers = np.vstack((inline, broadside)) # Number of receivers nReceivers = receivers.shape[0] # Build Delaunay triangulation with nodes tri = Delaunay(nodes) # Overwrite Delaunay structure with mesh_file connectivity and points tri.simplices = elemsN.astype(np.int32) tri.vertices = elemsN.astype(np.int32) # Find out which tetrahedral element points are in recvElems = tri.find_simplex(receivers, bruteforce=True, tol=1.e-12) # Determine if all receiver points were found idx = np.where(np.logical_or(recvElems > nElems, recvElems < 0))[0] # If idx is not empty, there are receivers outside the domain if idx.size != 0: Print.master( ' The following receivers were not located and will not be taken into account ' + str(idx)) # Update number of receivers nReceivers = nReceivers - len(idx)
x_res = 141.7/512 y_res = x_res z_res = .8 res_arr = np.array([x_res, y_res, z_res]) x_pix = 512 y_pix = x_pix z_pix = 151 mesh = meshio.read('ProcessedMSHs/Gel4I.msh') points_pix = mesh.points/res_arr mins = np.floor(points_pix.min(0)).astype(int) maxs = np.ceil(points_pix.max(0)).astype(int) test_points = np.array(np.meshgrid(np.arange(mins[0], maxs[0]+1), np.arange(mins[1], maxs[1]+1), np.arange(mins[2], maxs[2]+1))).T.reshape(-1,3) tri = Delaunay(points_pix) tri.simplices = mesh.cells['triangle'].astype(np.int32) output = np.asarray([tri.find_simplex(point) for point in test_points]) ind = np.argwhere(output>=0).flatten() inside_points = test_points[ind] fig = plt.figure() ax = fig.add_subplot(111, projection='3d') ax.scatter(inside_points[:, 0], inside_points[:, 1], inside_points[:, 2], s=3, c='g') plt.show()
def run(self, inputSetup): """Run a preprocessing task. :param obj inputSetup: inputSetup object. :return: None """ # --------------------------------------------------------------- # Obtain the MPI environment # --------------------------------------------------------------- parEnv = MPIEnvironment() # Start timer Timers()["Preprocessing"].start() # --------------------------------------------------------------- # Preprocessing (sequential task) # --------------------------------------------------------------- if (parEnv.rank == 0): # Parameters shortcut (for code legibility) model = inputSetup.model run = inputSetup.run output = inputSetup.output out_dir = output.get('directory_scratch') # Compute number of dofs per element basis_order = run.get('nord') num_dof_in_element = np.int(basis_order * (basis_order + 2) * (basis_order + 3) / 2) if (model.get('mode') == 'csem'): mode = 'csem' elif (model.get('mode') == 'mt'): mode = 'mt' # Get data model data_model = model.get(mode) # --------------------------------------------------------------- # Import mesh file # --------------------------------------------------------------- mesh_file = model.get('mesh') # Import mesh mesh = meshio.read(mesh_file) # Number of elements size = mesh.cells[0][1][:].shape nElems = size[0] # --------------------------------------------------------------- # Preprocessing nodal coordinates # --------------------------------------------------------------- Print.master(' Nodal coordinates') # Build coordinates in PETGEM format where each row # represent the xyz coordinates of the 4 tetrahedral element num_dimensions = 3 num_nodes_per_element = 4 data = mesh.points[mesh.cells[0][1][:], :] data = data.reshape(nElems, num_dimensions * num_nodes_per_element) # Get matrix dimensions size = data.shape # Build PETSc structures matrix = createSequentialDenseMatrixWithArray( size[0], size[1], data) # Build path to save the file out_path = out_dir + '/nodes.dat' # Write PETGEM nodes in PETSc format writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF) # Remove temporal matrix del matrix # --------------------------------------------------------------- # Preprocessing mesh connectivity # --------------------------------------------------------------- Print.master(' Mesh connectivity') # Get matrix dimensions size = mesh.cells[0][1][:].shape # Build PETSc structures matrix = createSequentialDenseMatrixWithArray( size[0], size[1], mesh.cells[0][1][:]) # Build path to save the file out_path = out_dir + '/meshConnectivity.dat' # Write PETGEM connectivity in PETSc format writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF) # Remove temporal matrix del matrix # --------------------------------------------------------------- # Preprocessing edges connectivity # --------------------------------------------------------------- Print.master(' Edges connectivity') # Compute edges elemsE, edgesNodes = computeEdges(mesh.cells[0][1][:], nElems) nEdges = edgesNodes.shape[0] # Get matrix dimensions size = elemsE.shape # Build PETSc structures matrix = createSequentialDenseMatrixWithArray( size[0], size[1], elemsE) # Build path to save the file out_path = out_dir + '/edges.dat' # Write PETGEM edges in PETSc format writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF) # Remove temporal matrix del matrix # Reshape edgesNodes and save num_nodes_per_edge = 2 num_edges_per_element = 6 data = np.array((edgesNodes[elemsE[:], :]), dtype=np.float) data = data.reshape(nElems, num_nodes_per_edge * num_edges_per_element) # Get matrix dimensions size = data.shape # Build PETSc structures matrix = createSequentialDenseMatrixWithArray( size[0], size[1], data) # Build path to save the file out_path = out_dir + '/edgesNodes.dat' # Write PETGEM edgesNodes in PETSc format writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF) # Remove temporal matrix del matrix # --------------------------------------------------------------- # Preprocessing faces connectivity # --------------------------------------------------------------- Print.master(' Faces connectivity') # Compute faces elemsF, facesN = computeFaces(mesh.cells[0][1][:], nElems) nFaces = facesN.shape[0] # Get matrix dimensions size = elemsF.shape # Build PETSc structures matrix = createSequentialDenseMatrixWithArray( size[0], size[1], elemsF) # Build path to save the file out_path = out_dir + '/faces.dat' # Write PETGEM edges in PETSc format writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF) # Remove temporal matrix del matrix # --------------------------------------------------------------- # Preprocessing faces-edges connectivity # --------------------------------------------------------------- Print.master(' Faces-edges connectivity') facesE = computeFacesEdges(elemsF, elemsE, nFaces, nElems) num_faces_per_element = 4 num_edges_per_face = 3 data = np.array((facesE[elemsF[:], :]), dtype=np.float) data = data.reshape(nElems, num_faces_per_element * num_edges_per_face) # Get matrix dimensions size = data.shape # Build PETSc structures matrix = createSequentialDenseMatrixWithArray( size[0], size[1], data) # Build path to save the file out_path = out_dir + '/facesEdges.dat' # Write PETGEM edges in PETSc format writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF) del matrix # --------------------------------------------------------------- # Preprocessing dofs connectivity # --------------------------------------------------------------- Print.master(' DOFs connectivity') # Compute degrees of freedom connectivity basis_order = run.get('nord') dofs, dof_edges, dof_faces, _, total_num_dofs = computeConnectivityDOFS( elemsE, elemsF, basis_order) # Get matrix dimensions size = dofs.shape # Build PETSc structures matrix = createSequentialDenseMatrixWithArray( size[0], size[1], dofs) # Build path to save the file out_path = out_dir + '/dofs.dat' # Write PETGEM edges in PETSc format writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF) del matrix # --------------------------------------------------------------- # Preprocessing sigma model # --------------------------------------------------------------- Print.master(' Conductivity model') i_model = data_model.get('sigma') if (run.get('conductivity_from_file')): # Open sigma file sigma_file = i_model.get('file') fileID = h5py.File(sigma_file, 'r') # Read sigma file conductivityModel = fileID.get('data')[()] else: # Get physical groups elemsS = mesh.cell_data['gmsh:physical'][0] elemsS -= np.int(1) # 0-based indexing # Get horizontal sigma horizontal_sigma = i_model.get('horizontal') vertical_sigma = i_model.get('vertical') # Allocate conductivity array conductivityModel = np.zeros((nElems, 2), dtype=np.float) for i in np.arange(nElems): # Set horizontal sigma conductivityModel[i, 0] = horizontal_sigma[np.int(elemsS[i])] # Set vertical sigma conductivityModel[i, 1] = vertical_sigma[np.int(elemsS[i])] # Get matrix dimensions size = conductivityModel.shape # Build PETSc structures matrix = createSequentialDenseMatrixWithArray( size[0], size[1], conductivityModel) # Build path to save the file out_path = out_dir + '/conductivityModel.dat' # Write PETGEM edges in PETSc format writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF) del matrix # --------------------------------------------------------------- # Preprocessing boundaries # --------------------------------------------------------------- Print.master(' Boundaries') # Compute boundary faces bFacesN, bFaces, nbFaces = computeBoundaryFaces(elemsF, facesN) # Build array with boundary dofs for csem mode (dirichlet BC) if (mode == 'csem'): # Compute boundary edges bEdges = computeBoundaryEdges(edgesNodes, bFacesN) # Compute dofs on boundaries _, indx_boundary_dofs = computeBoundaries( dofs, dof_edges, dof_faces, bEdges, bFaces, basis_order) # Build PETSc structures vector = createSequentialVectorWithArray(indx_boundary_dofs) # Build path to save the file out_path = out_dir + '/boundaries.dat' # Write PETGEM nodes in PETSc format writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF) del vector elif (mode == 'mt'): # Compute to what plane the boundary face belongs planeFace = computeFacePlane(mesh.points, bFaces, bFacesN) # Compute boundary elements bElems, numbElems = computeBoundaryElements( elemsF, bFaces, nFaces) if (nbFaces != numbElems): Print.master( ' Number of boundary faces is not consistent.') exit(-1) # Allocate data_boundaries = np.zeros((nbFaces, 53 + num_dof_in_element), dtype=np.float) # Fill tmp matrix with data for boundary faces for i in np.arange(nbFaces): # Get index of tetrahedral element (boundary element) iEle = bElems[i] # Get dofs of element container dofsElement = dofs[iEle, :] # Get indexes of nodes for i-boundary element and insert nodesBoundaryElement = mesh.cells[0][1][iEle, :] data_boundaries[i, 0:4] = nodesBoundaryElement # Get nodes coordinates for i-boundary element and insert coordEle = mesh.points[nodesBoundaryElement, :] coordEle = coordEle.flatten() data_boundaries[i, 4:16] = coordEle # Get indexes of faces for i-boundary element and insert facesBoundaryElement = elemsF[iEle, :] data_boundaries[i, 16:20] = facesBoundaryElement # Get edges indexes for faces in i-boundary element and insert edgesBoundaryFace = facesE[facesBoundaryElement, :] edgesBoundaryFace = edgesBoundaryFace.flatten() data_boundaries[i, 20:32] = edgesBoundaryFace # Get indexes of edges for i-boundary and insert edgesBoundaryElement = elemsE[iEle, :] data_boundaries[i, 32:38] = edgesBoundaryElement # Get node indexes for edges in i-boundary and insert edgesNodesBoundaryElement = edgesNodes[ edgesBoundaryElement, :] edgesNodesBoundaryElement = edgesNodesBoundaryElement.flatten( ) data_boundaries[i, 38:50] = edgesNodesBoundaryElement # Get plane face ifacetype = planeFace[i] data_boundaries[i, 50] = ifacetype # Get global face index localFaceIndex = bFaces[i] data_boundaries[i, 51] = localFaceIndex # Get sigma value sigmaEle = conductivityModel[iEle, 0] data_boundaries[i, 52] = sigmaEle # Get dofs for boundary element and insert dofsBoundaryElement = dofsElement data_boundaries[i, 53::] = dofsBoundaryElement # Get matrix dimensions size = data_boundaries.shape # Build PETSc structures matrix = createSequentialDenseMatrixWithArray( size[0], size[1], data_boundaries) # Build path to save the file out_path = out_dir + '/boundaryElements.dat' # Write PETGEM receivers in PETSc format writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF) del matrix del data_boundaries # --------------------------------------------------------------- # Preprocessing receivers # --------------------------------------------------------------- Print.master(' Receivers') # Open receivers_file receivers_file = model.get('receivers') fileID = h5py.File(receivers_file, 'r') # Read receivers receivers = fileID.get('data')[()] # Number of receivers if receivers.ndim == 1: nReceivers = 1 else: dim = receivers.shape nReceivers = dim[0] # Find out which tetrahedral element source point is in (only for csem mode) if (mode == 'csem'): # Allocate vector to save source data data_source = np.zeros(50 + num_dof_in_element, dtype=np.float) i_model = data_model.get('source') # Get source position i_source_position = np.asarray(i_model.get('position'), dtype=np.float) # Build Delaunay triangulation with nodes tri = Delaunay(mesh.points) # Overwrite Delaunay structure with mesh_file connectivity and points tri.simplices = mesh.cells[0][1][:].astype(np.int32) tri.vertices = mesh.cells[0][1][:].astype(np.int32) srcElem = tri.find_simplex(i_source_position, bruteforce=True, tol=1.e-12) # If srcElem=-1, source not located if srcElem < 0: Print.master( ' Source no located in the computational domain. Please, verify source position or improve the mesh quality.' ) exit(-1) # Build data for source insertion # Get indexes of nodes for srcElem and insert nodesSource = mesh.cells[0][1][srcElem, :] data_source[0:4] = nodesSource # Get nodes coordinates for srcElem and insert coordSource = mesh.points[nodesSource, :] coordSource = coordSource.flatten() data_source[4:16] = coordSource # Get indexes of faces for srcElem and insert facesSource = elemsF[srcElem, :] data_source[16:20] = facesSource # Get edges indexes for faces in srcElem and insert edgesFace = facesE[facesSource, :] edgesFace = edgesFace.flatten() data_source[20:32] = edgesFace # Get indexes of edges for srcElem and insert edgesSource = elemsE[srcElem, :] data_source[32:38] = edgesSource # Get node indexes for edges in srcElem and insert edgesNodesSource = edgesNodes[edgesSource, :] edgesNodesSource = edgesNodesSource.flatten() data_source[38:50] = edgesNodesSource # Get dofs for srcElem and insert dofsSource = dofs[srcElem, :] data_source[50::] = dofsSource # Get matrix dimensions size = data_source.shape # Build PETSc structures vector = createSequentialVectorWithArray(data_source) # Build path to save the file out_path = out_dir + '/source.dat' # Write PETGEM nodes in PETSc format writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF) del vector # --------------------------------------------------------------- # Sparsity pattern # --------------------------------------------------------------- # Setup valence for each basis order (adding a small percentage to keep safe) valence = np.array([50, 200, 400, 800, 1400, 2500]) # Build nnz pattern for each row nnz = np.full((total_num_dofs), valence[basis_order - 1], dtype=np.int) # Build PETSc structures vector = createSequentialVectorWithArray(nnz) # Build path to save the file out_path = out_dir + '/nnz.dat' # Write PETGEM nodes in PETSc format writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF) # --------------------------------------------------------------- # Print mesh statistics # --------------------------------------------------------------- Print.master(' ') Print.master(' Mesh statistics') Print.master(' Mesh file: {0:12}'.format( str(model.get('mesh')))) Print.master(' Number of elements: {0:12}'.format( str(nElems))) Print.master(' Number of faces: {0:12}'.format( str(nFaces))) Print.master(' Number of edges: {0:12}'.format( str(nEdges))) Print.master(' Number of dofs: {0:12}'.format( str(total_num_dofs))) if (mode == 'csem'): Print.master(' Number of boundaries: {0:12}'.format( str(len(indx_boundary_dofs)))) # --------------------------------------------------------------- # Print data model # --------------------------------------------------------------- Print.master(' ') Print.master(' Model data') Print.master(' Modeling mode: {0:12}'.format(str(mode))) i_sigma = data_model.get('sigma') if (run.get('conductivity_from_file')): Print.master(' Conductivity file: {0:12}'.format( i_sigma.get('file'))) else: Print.master(' Horizontal conductivity: {0:12}'.format( str(i_sigma.get('horizontal')))) Print.master(' Vertical conductivity: {0:12}'.format( str(i_sigma.get('vertical')))) if (mode == 'csem'): i_source = data_model.get('source') Print.master(' Source:') Print.master(' - Frequency (Hz): {0:12}'.format( str(i_source.get('frequency')))) Print.master(' - Position (xyz): {0:12}'.format( str(i_source.get('position')))) Print.master(' - Azimuth: {0:12}'.format( str(i_source.get('azimuth')))) Print.master(' - Dip: {0:12}'.format( str(i_source.get('dip')))) Print.master(' - Current: {0:12}'.format( str(i_source.get('current')))) Print.master(' - Length: {0:12}'.format( str(i_source.get('length')))) else: Print.master(' Frequency (Hz): {0:12}'.format( str(data_model.get('frequency')))) Print.master(' Polarization: {0:12}'.format( str(data_model.get('polarization')))) Print.master(' Vector basis order: {0:12}'.format( str(basis_order))) Print.master(' Receivers file: {0:12}'.format( str(model.get('receivers')))) Print.master(' Number of receivers: {0:12}'.format( str(nReceivers))) Print.master(' VTK output: {0:12}'.format( str(output.get('vtk')))) Print.master(' Cuda support: {0:12}'.format( str(run.get('cuda')))) Print.master(' Output directory: {0:12}'.format( str(output.get('directory')))) Print.master(' Scratch directory: {0:12}'.format( str(output.get('directory_scratch')))) # Stop timer Timers()["Preprocessing"].stop() # Apply barrier for MPI tasks alignement parEnv.comm.barrier() return
def create_alpha_mask(points, distance_limit, resolution_x=1000, resolution_y=1000, visualization=True): """ Creates interpolation grid, then masks over the alpha shape spanned up by points and defined by distance_limit. @params: points - Required : points spanning up alpha shape distance_limit - Required : distance threshold for removing Delaunay simplices resolution_x - Optional : resolution for grid in x, default is 1000 resolution_y - Optional : resolution for grid in y, default is 1000 visualization - Optional : boolean for visualizing result, default is False Returns: grid_mask : An array containing 1 for cells inside, and 0 for cells outside """ import numpy as np from scipy.spatial import Delaunay from matplotlib.collections import LineCollection import matplotlib.path as mplPath #---------------------------------------------------------------------- # Create Grid #---------------------------------------------------------------------- # Create meshgrid xi = np.transpose( np.linspace(min(points[:, 0]), max(points[:, 0]), resolution_x)) yi = np.transpose( np.linspace(min(points[:, 1]), max(points[:, 1]), resolution_y)) X, Y = np.meshgrid(xi, yi) # Reshape into vector gridpoints_x = np.reshape(X, resolution_x * resolution_y) gridpoints_y = np.reshape(Y, resolution_x * resolution_y) # Combine into gridpoints array gridpoints = np.transpose(np.asarray((gridpoints_x, gridpoints_y))) #---------------------------------------------------------------------- # Create Alpha Shape #---------------------------------------------------------------------- # Start Delaunay triangulation tri = Delaunay(points) # Auxiliary function for plotting, if required if visualization == True: import matplotlib.pyplot as plt edges = set() edge_points = [] def add_edge(i, j): """Add a line between the i-th and j-th points, if not in the list already""" if (i, j) in edges or (j, i) in edges: # already added return edges.add((i, j)) edge_points.append(points[[i, j]]) # Remove simplices outside of distance_limit simplex_flag = np.zeros(len(tri.simplices[:, 0])) # Flags bad simplices counter = 0 for ia, ib, ic in tri.vertices: # ia, ib, ic = indices of corner points of the triangle if np.sqrt((points[ia,0]-points[ib,0])**2+(points[ia,1]-points[ib,1])**2) < distance_limit and \ np.sqrt((points[ia,0]-points[ic,0])**2+(points[ia,1]-points[ic,1])**2) < distance_limit and \ np.sqrt((points[ib,0]-points[ic,0])**2+(points[ib,1]-points[ic,1])**2) < distance_limit: # do nothing simplex_flag[counter] = 0 else: # simplex has at least one side larger than threshold, flag it simplex_flag[counter] = 1 counter += 1 tri.simplices = tri.simplices[simplex_flag == 0, :] # Remove bad simplices tri.vertices = tri.vertices[simplex_flag == 0, :] # Remove bad simplices # Visualize, if requested if visualization == True: # Mark all remaining simplices for ia, ib, ic in tri.vertices: add_edge(ia, ib) add_edge(ib, ic) add_edge(ic, ia) # Draw them lines = LineCollection(edge_points) plt.figure() plt.gca().add_collection(lines) plt.plot(points[:, 0], points[:, 1], 'o') #---------------------------------------------------------------------- # Mask over Alpha Shape #---------------------------------------------------------------------- # Prepare point flag flag_gridpoints = np.zeros(len(gridpoints[:, 0]), dtype=np.int) # Evaluate gridpoints for sim in range(len(tri.simplices[:, 0])): # Print progress bar cv = sim mv = len(tri.simplices[:, 0]) - 1 print('\r%s |%s| %s%% %s' % ('Masking: ', '\033[33m' + '█' * int(50 * cv // mv) + '-' * (50 - int(50 * cv // mv)) + '\033[0m', ("{0:." + str(1) + "f}").format(100 * (cv / float(mv))), ' Complete'), end='\r') # Create simplex path bbPath = mplPath.Path( np.array([ points[tri.simplices[sim, 0], :], points[tri.simplices[sim, 1], :], points[tri.simplices[sim, 2], :], points[tri.simplices[sim, 0], :] ])) # Flag points that are inside this simplex for gridpts in range(len(gridpoints[:, 0])): if flag_gridpoints[ gridpts] == 0: # only process points not already allocated if bbPath.contains_point( (gridpoints[gridpts, 0], gridpoints[gridpts, 1])) == True: flag_gridpoints[gridpts] = 1 # Plot, if required if visualization == True: plt.scatter(gridpoints[flag_gridpoints == 1, 0], gridpoints[flag_gridpoints == 1, 1], color='g') plt.scatter(gridpoints[flag_gridpoints == 0, 0], gridpoints[flag_gridpoints == 0, 1], color='r') # Reshape flag_gridpoints into a 2D array global grid_mask grid_mask = np.reshape(flag_gridpoints, (resolution_y, resolution_x)) # Return result return grid_mask
def morph_face(src_face, src_pts, tgt_pts, tgt_width, tgt_height): src_y, src_x, _ = src_face.shape if not src_x == tgt_width and not src_y == tgt_height: adjusted_face = cv2.resize(src_face, (tgt_width, tgt_height), interpolation = cv2.INTER_NEAREST) adjusted_pts = np.copy(src_pts) adjusted_pts[:, 0] = adjusted_pts[:, 0] * float(tgt_width) / float(src_x) adjusted_pts[:, 1] = adjusted_pts[:, 1] * float(tgt_height) / float(src_y) adjusted_pts = np.rint(adjusted_pts).astype(dtype=int) else: adjusted_face = np.copy(src_face) adjusted_pts = np.copy(src_pts) s_pts = np.append(adjusted_pts, [[0, 0], [tgt_width - 1, 0], [tgt_width - 1, tgt_height - 1], [0, tgt_height - 1]], axis=0) t_pts = np.append(tgt_pts, [[0, 0], [tgt_width - 1, 0], [tgt_width - 1, tgt_height - 1], [0, tgt_height - 1]], axis=0) avg_shape = (s_pts + t_pts) / 2.0 tri = Delaunay(avg_shape) src_tri = Delaunay(s_pts) src_tri.simplices = tri.simplices.copy() tgt_tri = Delaunay(t_pts) tgt_tri.simplices = tri.simplices.copy() morphed_face = np.empty(shape=(tgt_height, tgt_width, 3)) src_mat = [] for t in tri.simplices: p1 = src_tri.points[t[0]] p2 = src_tri.points[t[1]] p3 = src_tri.points[t[2]] A = np.matrix([[p1[0], p2[0], p3[0]], [p1[1], p2[1], p3[1]], [1, 1, 1]]) src_mat.append(A) tri_mat = [] for t in tri.simplices: p1 = tgt_tri.points[t[0]] p2 = tgt_tri.points[t[1]] p3 = tgt_tri.points[t[2]] A = np.matrix([[p1[0], p2[0], p3[0]], [p1[1], p2[1], p3[1]], [1, 1, 1]]) tri_mat.append((A, A.I)) y = np.arange(0, tgt_height) x = np.arange(0, tgt_width) points = np.array(list(itertools.product(x, y))) one_column = np.ones(points.shape[0])[np.newaxis].T points_homo = np.transpose(np.hstack((points, one_column)), axes=(1, 0)) inTri = Delaunay.find_simplex(tgt_tri, points, bruteforce=True) count = 0 for t in tri.simplices: indices = np.where(inTri == count)[0] tri_bary = np.dot(tri_mat[count][1], np.take(points_homo, indices=indices, axis=1)) src_pixels = np.array(np.rint(np.dot(src_mat[count], tri_bary)[0:2,:]).astype(dtype=int).T) tgt_pixels = np.take(points, indices=indices, axis=0) morphed_face[tgt_pixels[:, 1], tgt_pixels[:, 0]] = adjusted_face[src_pixels[:, 1], src_pixels[:, 0]] count = count + 1 # plt.imshow(morphed_face) # plt.show() # misc.imsave("morphed_face.png", morphed_face) return morphed_face # morph_face(np.asarray(Image.open("./Faces/Marq.png")), np.asarray(Image.open("./Faces/Marq.png")), np.loadtxt("./Faces/Marq.csv"), np.loadtxt("./Faces/Smile.csv"), 1000, 667)
def fieldInterpolator(solution_vector, nodes, elemsN, elemsE, edgesN, elemsF, facesE, dof_connectivity, points, inputSetup): """Interpolate electromagnetic field for a set of 3D points. :param ndarray-petsc solution_vector: vector field to be interpolated :param ndarray nodes: nodal coordinates :param ndarray elemsN: elements-node connectivity with dimensions = (number_elements, 4) :param ndarray elemsE: elements-edge connectivity with dimensions = (number_elements, 6) :param ndarray edgesN: edge-node connectivity with dimensions = (number_edges, 2) :param ndarray elemsF: element-faces connectivity with dimensions = (number_elements, 4) :param ndarray facesE: face-edges connectivity with dimensions = (number_faces, 3) :param ndarray dof_connectivity: local/global dofs list for elements, dofs index on edges, dofs index on faces, dofs index on volumes, total number of dofs :param ndarray points: point coordinates :param obj inputSetup: inputSetup object. :return: electromagnetic fields for a set of 3D points :rtype: ndarray and int """ # --------------------------------------------------------------- # Initialization # --------------------------------------------------------------- # Parameters shortcut (for code legibility) model = inputSetup.model run = inputSetup.run basis_order = run.get('nord') if (model.get('mode') == 'csem'): mode = 'csem' data_model = model.get(mode) # Get data model frequency = data_model.get('source').get('frequency') elif (model.get('mode') == 'mt'): mode = 'mt' data_model = model.get(mode) # Get data model frequency = data_model.get('frequency') omega = frequency * 2. * np.pi mu = 4. * np.pi * 1e-7 Const = np.sqrt(-1. + 0.j) * omega * mu # Number of elements size = elemsN.shape nElems = size[0] # Number of nodes #size = nodes.shape #nNodes = size[0] # Num dof per element num_dof_in_element = np.int(basis_order * (basis_order + 2) * (basis_order + 3) / 2) # Number of points if points.ndim == 1: nPoints = 1 else: dim = points.shape nPoints = dim[0] # Find where receivers are located tri = Delaunay(nodes) # Overwrite Delaunay structure with mesh_file connectivity and points tri.simplices = elemsN.astype(np.int32) tri.vertices = elemsN.astype(np.int32) # Find out which tetrahedral element points are in points_in_elements = tri.find_simplex(points, bruteforce=True, tol=1.e-12) # Determine if all points were found idx = np.where( np.logical_or(points_in_elements > nElems, points_in_elements < 0))[0] # If idx is not empty, there are points outside the domain if idx.size != 0: Print.master( ' The following receivers were not located and will not be taken into account ' + str(idx)) # Update number of receivers nPoints = nPoints - len(idx) if nPoints == 0: Print.master( ' No point has been found. Nothing to do. Aborting') exit(-1) # Remove idx from points matrix points = np.delete(points, idx, axis=0) # Remove idx from points_in_elements points_in_elements = np.delete(points_in_elements, idx, axis=0) indx_ele = points_in_elements # Allocate array fields = np.zeros((nPoints, 6), dtype=np.complex) # Interpolate electromagnetic field for all points for i in np.arange(nPoints): # Get element index iEle = indx_ele[i] # Get dofs of element container dofsEle = dof_connectivity[iEle, :] # Get receiver coordinates coordPoints = points[i, :] # Get indexes of nodes for iEle nodesEle = elemsN[iEle, :] # Get nodes coordinates for iEle coordEle = nodes[nodesEle, :] # Get indexes of faces for iEle facesEle = elemsF[iEle, :] # Get edges indexes for faces edgesFace = facesE[facesEle, :] # Get indexes of edges for iEle edgesEle = elemsE[iEle, :] # Get node indexes for edges in i and insert edgesNodesEle = edgesN[edgesEle, :] # Compute jacobian for iEle jacobian, invjacobian = computeJacobian(coordEle) # Compute global orientation for iEle edge_orientation, face_orientation = computeElementOrientation( edgesEle, nodesEle, edgesNodesEle, edgesFace) # Transform xyz source position to XiEtaZeta coordinates (reference tetrahedral element) XiEtaZeta = tetrahedronXYZToXiEtaZeta(coordEle, coordPoints) # Compute basis for i basis, curl_basis = computeBasisFunctions(edge_orientation, face_orientation, jacobian, invjacobian, basis_order, XiEtaZeta) # Get global dofs from x vector realField = np.real( solution_vector.getValues(dofsEle.astype(PETSc.IntType))) imagField = np.imag( solution_vector.getValues(dofsEle.astype(PETSc.IntType))) for j in np.arange(num_dof_in_element): # Exyz[k] = Exyz[k] + real_part*basis + imag_part*basis fields[i, 0] += realField[j] * basis[0, j] + np.sqrt( -1. + 0.j) * imagField[j] * basis[0, j] fields[i, 1] += realField[j] * basis[1, j] + np.sqrt( -1. + 0.j) * imagField[j] * basis[1, j] fields[i, 2] += realField[j] * basis[2, j] + np.sqrt( -1. + 0.j) * imagField[j] * basis[2, j] # Hxyz[k] = Hxyz[k] + real_part*curl_basis + imag_part*curl_basis fields[i, 3] += realField[j] * curl_basis[0, j] + np.sqrt( -1. + 0.j) * imagField[j] * curl_basis[0, j] fields[i, 4] += realField[j] * curl_basis[1, j] + np.sqrt( -1. + 0.j) * imagField[j] * curl_basis[1, j] fields[i, 5] += realField[j] * curl_basis[2, j] + np.sqrt( -1. + 0.j) * imagField[j] * curl_basis[2, j] # Following Maxwell equations, apply constant factor to compute magnetic field fields[:, 3::] /= Const return fields