def mark_inlet(markers): from random import random # get list of all outer boundary elements marked_cells40 = df.SubsetIterator(markers, 40) marked_cells10 = df.SubsetIterator(markers, 10) # randomly mark cells as IN cells for cell in chain(marked_cells10, marked_cells40): if random() < 0.01: markers[cell] = 1
def mark(self, edge_meshfun, value): edge_array = edge_meshfun.array() self.ensure_initialised(2, 1) self.ensure_initialised(1, 0) for face in dolfin.SubsetIterator(self.boundary_facefun, self.boundary_value): edge_array[face.entities(1)] = value
def volumes(mesh, cell_f=None): '''All volumes or those of cell_f == 1''' if cell_f is None: cell_f = df.MeshFunction('size_t', mesh, mesh.topology().dim(), 1) for c in df.SubsetIterator(cell_f, 1): yield df.Cell(mesh, c.index()).volume()
def get_normal(self, mesh): facet_iter = df.SubsetIterator(self.boundaries, self.id) normal = np.empty((self.num_facets, self.g_dim)) for i, facet in enumerate(facet_iter): fs = df.Facet(mesh, facet.index()) normal[i, :] = -1 * \ np.array([fs.normal()[j] for j in range(self.g_dim)]) return normal
def get_vertices(self): facet_iter = df.SubsetIterator(self.boundaries, self.id) vertices = np.empty((self.num_facets * self.g_dim, self.g_dim)) for i, facet in enumerate(facet_iter): for j, v in enumerate(df.vertices(facet)): vertices[i * self.g_dim + j, :] = v.point().array()[:self.g_dim] return vertices
def get_area(self, mesh): facet_iter = df.SubsetIterator(self.boundaries, self.id) area = np.empty(self.num_facets) mesh.init(self.t_dim - 1, self.t_dim) for i, facet in enumerate(facet_iter): cell = df.Cell(mesh, facet.entities(self.t_dim)[0]) facet_id = list(cell.entities(self.t_dim - 1)).index(facet.index()) area[i] = cell.facet_area(facet_id) return area
def test_mark(self): edge_meshfunc = dolfin.EdgeFunction('uint', self.mesh) edge_meshfunc.set_all(0) test_val = 11 self.DUT.mark(edge_meshfunc, test_val) no_edges = len(edge_meshfunc.array()) self.assertTrue( sum(edge_meshfunc.array() == test_val) == self.no_boundary_edges) for edge in dolfin.SubsetIterator(edge_meshfunc, test_val): self.assertTrue(self._check_on_boundary(edge))
def edge_lengths(mesh, cell_f=None): '''All edge lengths or those of edge_f == 1''' if cell_f is None: cell_f = df.MeshFunction('size_t', mesh, mesh.topology().dim(), 1) mesh.init(mesh.topology().dim(), 0) x = mesh.coordinates() for c in df.SubsetIterator(cell_f, 1): vertices = c.entities(0) yield min(np.linalg.norm(x[v0]-x[v1]) for v0, v1 in combinations(vertices, 2))
def cells(self, facet_func, id): """ Returns the cells adjacent to the surface of the object This information might be useful for calculating the current density into the object surface """ mesh = self.V.mesh() D = mesh.topology().dim() mesh.init(D - 1, D) # Build connectivity between facets and cells itr_facet = df.SubsetIterator(facet_func, id) object_adjacent_cells = [] for f in itr_facet: object_adjacent_cells.append(f.entities(D)[0]) return object_adjacent_cells
def boring(mesh_2d, inner_size): ''' A mesh2d is assumed to be be a cube [-inner_size, inner_size]^2. The curve is mostly a collection of boundary edges. ''' facet_f = df.MeshFunction('size_t', mesh_2d, 1, 0) mesh_2d.init(2, 1) # Mesh for the curve is tricky as we need to find the line in the faces def union(domains, A=inner_size, tol=1E-10): def body(domains): if isinstance(domains, str): if domains: return '( %s )' % domains else: return '' else: return ' || '.join(map(body, domains)) return df.CompiledSubDomain(body(domains), A=A, tol=tol) lines = { 4: union('near(x[1], A, tol) && near(x[2], A, tol)'), 3: union('near(x[2], -x[0], tol)'), 2: union('near(x[2], x[1], tol)'), 1: union([ 'near(x[0], -A, tol) && near(x[2], -A, tol)', 'near(x[1], A, tol) && near(x[0], -A, tol)', 'near(x[1], -A, tol) && near(x[0], -A, tol)' ]) } for tag, line in lines.items(): # Get candidates facets = set( sum((cell.entities(1).tolist() for cell in df.SubsetIterator(mesh_2d.marking_function, tag)), [])) for facet in facets: if line.inside(df.Facet(mesh_2d, facet).midpoint().array(), True): facet_f[int(facet)] = 1 return facet_f
def get_basis(self, mesh, vertices): facet_iter = df.SubsetIterator(self.boundaries, self.id) basis = np.empty((self.num_facets * self.g_dim, self.g_dim)) for i, facet in enumerate(facet_iter): fs = df.Facet(mesh, facet.index()) basis[i * self.g_dim, :] = vertices[i*self.g_dim, :] -\ vertices[i*self.g_dim+1, :] basis[i * self.g_dim, :] /= np.linalg.norm(basis[i * self.g_dim, :]) basis[self.g_dim*(i+1)-1, :] = -1 * \ np.array([fs.normal()[j] for j in range(self.g_dim)]) if (self.g_dim == 3): basis[i*self.g_dim + 1, :] = \ np.cross(basis[self.g_dim*(i+1)-1, :], basis[i*self.g_dim, :]) return basis
def closest_entity(x, subdomains, label=None): ''' Return entity with smallest distance to x out of entities marked by label in subdomains. The distance is determined by midpoint is it's only approximate. ''' x = df.Point(*x) # Grab all tags if label is None: label = set(subdomains.array()) label = as_tuple(label) sub_iter = itertools.chain( *[df.SubsetIterator(subdomains, l) for l in label]) pairs = (((x - e.midpoint()).norm(), e.index()) for e in sub_iter) dist, index = min(pairs, key=lambda p: p[0]) print('Found y, |x-y|=', dist) return df.MeshEntity(subdomains.mesh(), subdomains.dim(), index)
def dolfin_to_carpfile(mesh, basename, markers=None, \ vert_fields=None, cell_fields=None): """ NOT DEBUGGED: Write carp mesh and fields to file from dolfin data mesh : dolfin.Mesh The dolfin.mesh which should be written to file basename : str Basename of file which all data will be written to markers : dict (optional) A dict of name to markers of facet booundaries contained in the mesh vert_fields : dict (optional) A dict between named vertex field data and dolfin Functions cell_fields : dict (optional) A dict between named cell field data and dolfin Functions """ import dolfin as d import numpy as np boundary = d.CompiledSubDomain("on_boundary") d.warning("This function is not tested...") boundary_facets = d.FacetFunction("size_t", mesh, 0) boundary.mark(boundary_facets, 1) num_boundary_facets = np.sum(boundary_facets.array() == 1) with open(basename + ".pts", "w") as f: f.write("{0}\n".format(mesh.num_vertices())) [f.write("{0:.10f} {1:.10f} {2:.10f}\n".format(*coord)) \ for coord in mesh.coordinates()] with open(basename + ".elem", "w") as f: f.write("{0}\n".format(mesh.num_cells())) [f.write("Tt {0} {1} {2} {3} 0\n".format(*cell)) \ for cell in mesh.cells()] with open(basename + ".surf", "w") as f: f.write("{0}\n".format(num_boundary_facets)) [f.write("Tr {0} {1} {2}\n".format(*facet.entities(0))) \ for facet in d.SubsetIterator(boundary_facets, 1)] # If generating mapping between vertices and boundaries if markers: # Get the facet markers facet_markers = mesh.domains().facet_domains(mesh) # Iterate over markers for boundary_name, marker in markers.items(): vertices = set() for face in SubsetIterator(facet_markers, marker): vertices.update(face.entities(0)) with open(basename + "_" + boundary_name + ".vtx", "w") as f: f.write("{0:d}\nintra \n".format(int(len(vertices)))) [f.write("{0}\n".format(vert)) for vert in vertices] # Apex node... #with open(basename+"_apex.vtx", "w") as f: # f.write("{0:d}\nintra \n".format(1)) # f.write("{0:d}\n".format((apex_point.array()==1).nonzero()[0][0])) # If outputing vertex fields if vert_fields: # Get dof mapping dofs_to_vert, vectordofs_to_vert, vectordofs_to_subvert = \ dofs_to_verts(mesh) # Iterate over the passed fields for field_name, func in vert_fields.items(): values = func.vector().array() # If scalar field if mesh.num_vertices() == len(dofs): reordered_values = values[dofs_to_vert] # Write the field to file with open(basename + "_" + field_name + ".dat", "w") as f: [ f.write("{0:.10f}\n".format(value)) for value in reordered_values ] # If vector field elif mesh.num_vertices() == 3 * len(dofs): raise NotImplementedError else: raise ValueError("Field and mesh do not match: " + field_name)
def __init__(self, marking_function, markers): base_mesh = marking_function.mesh() assert base_mesh.topology().dim() >= marking_function.dim() # Work in serial only (much like submesh) assert df.MPI.size(base_mesh.mpi_comm()) == 1 gdim = base_mesh.geometry().dim() tdim = marking_function.dim() assert tdim > 0, 'No Embedded mesh from vertices' if isinstance(markers, int): markers = [markers] assert markers, markers # We reuse a lot of Submesh capabilities if marking by cell_f if base_mesh.topology().dim() == marking_function.dim(): # Submesh works only with one marker so we conform color_array = marking_function.array() color_cells = dict( (m, np.where(color_array == m)[0]) for m in markers) # So everybody is marked as 1 one_cell_f = df.MeshFunction('size_t', base_mesh, tdim, 0) for cells in color_cells.itervalues(): one_cell_f.array()[cells] = 1 # The Embedded mesh now steals a lot from submesh submesh = df.SubMesh(base_mesh, one_cell_f, 1) df.Mesh.__init__(self, submesh) # The entity mapping attribute mesh_key = marking_function.mesh().id() self.parent_entity_map = { mesh_key: { 0: submesh.data().array('parent_vertex_indices', 0), tdim: submesh.data().array('parent_cell_indices', tdim) } } # Finally it remains to preserve the markers f = df.MeshFunction('size_t', self, tdim, 0) if len(markers) > 1: # We turn the old cells to set for faster lookup color_cells = {k: set(v) for k, v in color_cells.iteritems()} # And then use the new -> old mapping to color for new_cell, old_cell in enumerate( self.parent_entity_map[mesh_key][tdim]): for color, cells in color_cells.iteritems(): if old_cell in cells: f[new_cell] = color break else: f.set_all(markers[0]) self.marking_function = f return None # https://stackoverflow.com/questions/2491819/how-to-return-a-value-from-init-in-python # Otherwise the mesh needs to by build from scratch base_mesh.init(tdim, 0) # Collect unique vertices based on their new-mesh indexing, the cells # of the embedded mesh are defined in terms of their embedded-numbering new_vertices, new_cells = [], [] # NOTE: new_vertices is actually new -> old vertex map # Map from cells of embedded mesh to tdim entities of base mesh, and cell_map = [] cell_colors = defaultdict(list) # Preserve the markers new_cell_index, new_vertex_index = 0, 0 for marker in markers: for entity in df.SubsetIterator(marking_function, marker): vs = entity.entities(0) cell = [] # Vertex lookup for v in vs: try: local = new_vertices.index(v) except ValueError: local = new_vertex_index new_vertices.append(v) new_vertex_index += 1 # Cell, one by one in terms of vertices cell.append(local) # The cell new_cells.append(cell) # Into map cell_map.append(entity.index()) # Colors cell_colors[marker].append(new_cell_index) new_cell_index += 1 # With acquired data build the mesh df.Mesh.__init__(self) editor = df.MeshEditor() if df.__version__ == '2017.2.0': cell_type = {1: 'interval', 2: 'triangle', 3: 'tetrahedron'}[tdim] editor.open(self, cell_type, tdim, gdim) else: editor.open(self, tdim, gdim) editor.init_vertices(len(new_vertices)) editor.init_cells(len(new_cells)) vertex_coordinates = base_mesh.coordinates()[new_vertices] for vi, x in enumerate(vertex_coordinates): editor.add_vertex(vi, x) for ci, c in enumerate(new_cells): editor.add_cell(ci, *c) editor.close() # The entity mapping attribute mesh_key = marking_function.mesh().id() self.parent_entity_map = {mesh_key: {0: new_vertices, tdim: cell_map}} f = df.MeshFunction('size_t', self, tdim, 0) f_ = f.array() # Finally the inherited marking function if len(markers) > 1: for marker, cells in cell_colors.iteritems(): f_[cells] = marker else: f.set_all(markers[0]) self.marking_function = f
# Check creation mesh = df.UnitCubeMesh(10, 10, 10) f = df.MeshFunction('size_t', mesh, mesh.topology().dim() - 1, 0) chi = df.CompiledSubDomain('near(x[i], 0.5)', i=0) for i in range(3): chi.i = i chi.mark(f, i + 1) mesh = EmbeddedMesh(f, [1, 2, 3]) volume = lambda c: df.Cell(mesh, c.index()).volume() assert df.near( sum(volume(c) for c in df.SubsetIterator(mesh.marking_function, 1)), 1, 1E-10) assert df.near( sum(volume(c) for c in df.SubsetIterator(mesh.marking_function, 2)), 1, 1E-10) assert df.near( sum(volume(c) for c in df.SubsetIterator(mesh.marking_function, 3)), 1, 1E-10) # Check normla computation mesh = df.UnitCubeMesh(10, 10, 10) bmesh = df.BoundaryMesh(mesh, 'exterior') n = OuterNormal(bmesh, [0.5, 0.5, 0.5]) for cell in df.cells(bmesh):
def __init__(self, marking_function, markers): if not isinstance(markers, (list, tuple)): markers = [markers] # Convenience option to specify only subdomains is_number = lambda m: isinstance(m, int) new_markers = [] # Build a new list int list with facet_function marked if not all(map(is_number, markers)): numbers = filter(is_number, markers) next_int_marker = max(numbers) if numbers else 0 for marker in markers: if is_number(marker): new_markers.append(marker) else: next_int_marker += 1 # SubDomain try: marker.mark(marking_function, next_int_marker) except AttributeError: # A string try: df.CompiledSubDomain(marker).mark( marking_function, next_int_marker) # A lambda except TypeError: df.CompiledSubDomain(*marker()).mark( marking_function, next_int_marker) new_markers.append(next_int_marker) markers = new_markers base_mesh = marking_function.mesh() assert base_mesh.topology().dim() >= marking_function.dim() # Work in serial only (much like submesh) assert df.MPI.size(base_mesh.mpi_comm()) == 1 gdim = base_mesh.geometry().dim() tdim = marking_function.dim() assert tdim > 0, 'No Embedded mesh from vertices' assert markers, markers # We reuse a lot of Submesh capabilities if marking by cell_f if base_mesh.topology().dim() == marking_function.dim(): # Submesh works only with one marker so we conform color_array = marking_function.array() color_cells = dict( (m, np.where(color_array == m)[0]) for m in markers) # So everybody is marked as 1 one_cell_f = df.MeshFunction('size_t', base_mesh, tdim, 0) for cells in color_cells.itervalues(): one_cell_f.array()[cells] = 1 # The Embedded mesh now steals a lot from submesh submesh = df.SubMesh(base_mesh, one_cell_f, 1) df.Mesh.__init__(self, submesh) # The entity mapping attribute; # NOTE: At this point there is not reason to use a dict as # a lookup table mesh_key = marking_function.mesh().id() mapping_0 = submesh.data().array('parent_vertex_indices', 0) mapping_tdim = submesh.data().array('parent_cell_indices', tdim) self.parent_entity_map = { mesh_key: { 0: dict(enumerate(mapping_0)), tdim: dict(enumerate(mapping_tdim)) } } # Finally it remains to preserve the markers f = df.MeshFunction('size_t', self, tdim, 0) f_values = f.array() if len(markers) > 1: old2new = dict(zip(mapping_tdim, range(len(mapping_tdim)))) for color, old_cells in color_cells.iteritems(): new_cells = np.array([old2new[o] for o in old_cells], dtype='uintp') f_values[new_cells] = color else: f.set_all(markers[0]) self.marking_function = f # Declare which tagged cells are found self.tagged_cells = set(markers) # https://stackoverflow.com/questions/2491819/how-to-return-a-value-from-init-in-python return None # Otherwise the mesh needs to by build from scratch base_mesh.init(tdim, 0) # Collect unique vertices based on their new-mesh indexing, the cells # of the embedded mesh are defined in terms of their embedded-numbering new_vertices, new_cells = [], [] # NOTE: new_vertices is actually new -> old vertex map # Map from cells of embedded mesh to tdim entities of base mesh, and cell_map = [] cell_colors = defaultdict(list) # Preserve the markers new_cell_index, new_vertex_index = 0, 0 for marker in markers: for entity in df.SubsetIterator(marking_function, marker): vs = entity.entities(0) cell = [] # Vertex lookup for v in vs: try: local = new_vertices.index(v) except ValueError: local = new_vertex_index new_vertices.append(v) new_vertex_index += 1 # Cell, one by one in terms of vertices cell.append(local) # The cell new_cells.append(cell) # Into map cell_map.append(entity.index()) # Colors cell_colors[marker].append(new_cell_index) new_cell_index += 1 vertex_coordinates = base_mesh.coordinates()[new_vertices] new_cells = np.array(new_cells, dtype='uintp') # With acquired data build the mesh df.Mesh.__init__(self) # Fill make_mesh(coordinates=vertex_coordinates, cells=new_cells, tdim=tdim, gdim=gdim, mesh=self) # The entity mapping attribute mesh_key = marking_function.mesh().id() self.parent_entity_map = { mesh_key: { 0: dict(enumerate(new_vertices)), tdim: dict(enumerate(cell_map)) } } f = df.MeshFunction('size_t', self, tdim, 0) f_ = f.array() # Finally the inherited marking function if len(markers) > 1: for marker, cells in cell_colors.iteritems(): f_[cells] = marker else: f.set_all(markers[0]) self.marking_function = f # Declare which tagged cells are found self.tagged_cells = set(markers)
def mark(self, cell_meshfun, value): cell_array = cell_meshfun.array() self.ensure_initialised(1, 3) # edge -> cell connectivity for edge in dolfin.SubsetIterator(self.boundary_edgefun, self.boundary_value): cell_array[edge.entities(3)] = value
def build_embedding_map(emesh, mesh, esubdomains=None, tags=None, tol=1E-14): ''' Operating with the assumption that the emsh consists of entities of mesh we find here a map from emesh vertices and cells to mesh vertices and entities. ''' df.info('\tEmbedding map') e_timer = df.Timer('emap') assert emesh.topology().dim() < mesh.topology().dim() edim = emesh.topology().dim() # We have right subdomains, i-e- a cell function assert esubdomains is None or esubdomains.dim() == edim # Let's make the inputs consistent if esubdomains is None: assert tags is None esubdomains = df.MeshFunction('size_t', emesh, edim, 0) # Meaning all the emesh cells and vertices need to be found all_check = tags is None # All the cells if all_check: tags = set((0, )) # We might be lucky and this is a boundary mesh -> extract if hasattr(emesh, 'entity_map'): # One must be careful here for it is guaranteed that emsh was # constructed from mesh. This has to be flagged by the user if hasattr(emesh, 'parent_id') and emesh.parent_id == mesh.id(): entity_map = { 0: dict(enumerate(emesh.entity_map(0).array())), edim: dict(enumerate(emesh.entity_map(edim).array())) } df.info('\tDone (Embeddeding map by extracting) %g' % e_timer.stop()) return entity_map # Otherwise we work hard # Localization will require tree = mesh.bounding_box_tree() # to find candidata cells. We zoom in on the unique entity by mesh.init(edim) # Set amoong which emesh cells will be searched mesh.init(mesh.topology().dim(), edim) # Via cell connectivity mesh.init(edim, 0) # And coordinate comparison c2v = mesh.topology()(mesh.topology().dim(), 0) c2e = mesh.topology()(mesh.topology().dim(), edim) e2v = mesh.topology()(edim, 0) tagged_cells = chain( *[df.SubsetIterator(esubdomains, tag) for tag in tags]) mesh_x = mesh.coordinates() emesh_x = emesh.coordinates() # Get som idea of mesh size to make relative comparison of coords scale = max(emesh_x.max(axis=0) - emesh_x.min(axis=0)) # Also build the map for vertices entity_map = {0: dict(), edim: dict()} vertex_map = entity_map[0] cells_with_vertex = dict() for cell in tagged_cells: the_entity = set() for vertex in cell.entities(0): if vertex not in vertex_map: vertex_x = emesh_x[vertex] mcells = tree.compute_entity_collisions(df.Point(*vertex_x)) # What is the id of vertex in the mesh mcell_vertices = c2v(mcells[0]) the_vertex = min( mcell_vertices, key=lambda v: np.linalg.norm(vertex_x - mesh_x[v])) error = np.linalg.norm(vertex_x - mesh_x[the_vertex]) / scale assert error < tol, 'Found a hanging node %16f' % error vertex_map[vertex] = the_vertex cells_with_vertex[vertex] = mcells else: the_vertex = vertex_map[vertex] mcells = cells_with_vertex[vertex] # For each I want to get its entities which containt the vertex # We are after such (UNIQUE) entity which would be in each such # set build for a vertex vertex_set = { entity for mcell in mcells for entity in c2e(mcell) if the_vertex in e2v(entity) } if not the_entity: the_entity.update(vertex_set) else: the_entity.intersection_update(vertex_set) assert len(the_entity) == 1 # Insert entity_map[edim][cell.index()] = the_entity.pop() if all_check: # All and continuous assert len(entity_map[0]) == emesh.num_vertices() assert len(entity_map[edim]) == emesh.num_cells() # Continuity assert is_1_sequence(entity_map[0]) assert is_1_sequence(entity_map[edim]) df.info('\tDone (Embeddeding map) %g' % e_timer.stop()) return entity_map
def extractFeNiCsBiVFacet(ugrid, geometry="BiV"): tol = 1e-2 #ugrid = vtk_py.readUGrid(meshfilename) # Extract surface geom = vtk.vtkGeometryFilter() if (vtk.vtkVersion().GetVTKMajorVersion() < 6): geom.SetInput(ugrid) else: geom.SetInputData(ugrid) geom.Update() surf = geom.GetOutput() bc_pts_locator = [] bc_pts = [] bc_pts_range = [] bc_pts_map = [] # Extract Surface Normal normal = vtk.vtkPolyDataNormals() if (vtk.vtkVersion().GetVTKMajorVersion() < 6): normal.SetInput(surf) else: normal.SetInputData(surf) normal.ComputeCellNormalsOn() normal.Update() surf_w_norm = normal.GetOutput() #vtk_py.writePData(normal.GetOutput(), "normal.vtk") zmax = surf_w_norm.GetBounds()[5] surf_w_norm.BuildLinks() idlist = vtk.vtkIdList() basecellidlist = vtk.vtkIdTypeArray() basesurf = vtk.vtkPolyData() for p in range(0, surf_w_norm.GetNumberOfCells()): zvec = surf_w_norm.GetCellData().GetNormals().GetTuple3(p)[2] surf_w_norm.GetCellPoints(p, idlist) zpos = surf_w_norm.GetPoints().GetPoint(idlist.GetId(0))[2] if ((abs(zvec - 1.0) < tol or abs(zvec + 1.0) < tol) and (abs(zmax - zpos) < tol)): surf_w_norm.DeleteCell(p) basecellidlist.InsertNextValue(p) basesurf = vtk_py.extractCellFromPData(basecellidlist, surf) baseptlocator = vtk.vtkPointLocator() baseptlocator.SetDataSet(basesurf) baseptlocator.BuildLocator() ####################################################################### surf_w_norm.RemoveDeletedCells() cleanpdata = vtk.vtkCleanPolyData() if (vtk.vtkVersion().GetVTKMajorVersion() < 6): cleanpdata.SetInput(surf_w_norm) else: cleanpdata.SetInputData(surf_w_norm) cleanpdata.Update() connfilter = vtk.vtkPolyDataConnectivityFilter() if (vtk.vtkVersion().GetVTKMajorVersion() < 6): connfilter.SetInput(cleanpdata.GetOutput()) else: connfilter.SetInputData(cleanpdata.GetOutput()) connfilter.Update() print "Total_num_points = ", cleanpdata.GetOutput().GetNumberOfPoints() tpt = 0 if (geometry == "BiV"): nsurf = 3 else: nsurf = 2 for p in range(0, nsurf): pts = vtk.vtkPolyData() connfilter.SetExtractionModeToSpecifiedRegions() [connfilter.DeleteSpecifiedRegion(k) for k in range(0, nsurf)] connfilter.AddSpecifiedRegion(p) connfilter.ScalarConnectivityOff() connfilter.FullScalarConnectivityOff() connfilter.Update() cleanpdata2 = vtk.vtkCleanPolyData() if (vtk.vtkVersion().GetVTKMajorVersion() < 6): cleanpdata2.SetInput(connfilter.GetOutput()) else: cleanpdata2.SetInputData(connfilter.GetOutput()) cleanpdata2.Update() pts.DeepCopy(cleanpdata2.GetOutput()) tpt = tpt + cleanpdata2.GetOutput().GetNumberOfPoints() ptlocator = vtk.vtkPointLocator() ptlocator.SetDataSet(pts) ptlocator.BuildLocator() bc_pts_locator.append(ptlocator) bc_pts.append(pts) bc_pts_range.append([ abs(pts.GetBounds()[k + 1] - pts.GetBounds()[k]) for k in range(0, 6, 2) ]) #vtk_py.writePData(connfilter.GetOutput(), "/home/likchuan/Research/fenicsheartmesh/ellipsoidal/Geometry/test.vtk") print "Total_num_points = ", tpt Epiid = np.argmax(np.array([max(pts) for pts in bc_pts_range])) maxzrank = np.array([pts[2] for pts in bc_pts_range]).argsort() if (geometry == "BiV"): LVid = maxzrank[1] RVid = 3 - (LVid + Epiid) bc_pts_map = [4, 4, 4, 4] bc_pts_map[Epiid] = 1 bc_pts_map[LVid] = 2 bc_pts_map[RVid] = 3 baseid = 3 else: LVid = maxzrank[0] bc_pts_map = [4, 4, 4] bc_pts_map[Epiid] = 1 bc_pts_map[LVid] = 2 baseid = 2 bc_pts_locator.append(baseptlocator) bc_pts.append(basesurf) dolfin_mesh = vtk_py.convertUGridToXMLMesh(ugrid) dolfin_facets = dolfin.FacetFunction('size_t', dolfin_mesh) dolfin_facets.set_all(0) for facet in dolfin.SubsetIterator(dolfin_facets, 0): for locator in range(0, nsurf + 1): cnt = 0 for p in range(0, 3): v0 = dolfin.Vertex(dolfin_mesh, facet.entities(0)[p]).x(0) v1 = dolfin.Vertex(dolfin_mesh, facet.entities(0)[p]).x(1) v2 = dolfin.Vertex(dolfin_mesh, facet.entities(0)[p]).x(2) ptid = bc_pts_locator[locator].FindClosestPoint(v0, v1, v2) x0 = bc_pts[locator].GetPoints().GetPoint(ptid) dist = vtk.vtkMath.Distance2BetweenPoints([v0, v1, v2], x0) if (dist < 1e-5): cnt = cnt + 1 if (cnt == 3): dolfin_facets[facet] = bc_pts_map[locator] dolfin_edges = dolfin.EdgeFunction('size_t', dolfin_mesh) dolfin_edges.set_all(0) epilocator = Epiid lvendolocator = LVid for edge in dolfin.SubsetIterator(dolfin_edges, 0): cnt_epi = 0 cnt_lvendo = 0 for p in range(0, 2): v0 = dolfin.Vertex(dolfin_mesh, edge.entities(0)[p]).x(0) v1 = dolfin.Vertex(dolfin_mesh, edge.entities(0)[p]).x(1) v2 = dolfin.Vertex(dolfin_mesh, edge.entities(0)[p]).x(2) epiptid = bc_pts_locator[epilocator].FindClosestPoint(v0, v1, v2) epix0 = bc_pts[epilocator].GetPoints().GetPoint(epiptid) epidist = vtk.vtkMath.Distance2BetweenPoints([v0, v1, v2], epix0) topptid = bc_pts_locator[baseid].FindClosestPoint(v0, v1, v2) topx0 = bc_pts[baseid].GetPoints().GetPoint(topptid) topdist = vtk.vtkMath.Distance2BetweenPoints([v0, v1, v2], topx0) lvendoptid = bc_pts_locator[lvendolocator].FindClosestPoint( v0, v1, v2) lvendox0 = bc_pts[lvendolocator].GetPoints().GetPoint(lvendoptid) lvendodist = vtk.vtkMath.Distance2BetweenPoints([v0, v1, v2], lvendox0) if (topdist < 1e-5 and epidist < 1e-5): cnt_epi = cnt_epi + 1 if (topdist < 1e-5 and lvendodist < 1e-5): cnt_lvendo = cnt_lvendo + 1 if (cnt_epi == 2): dolfin_edges[edge] = 1 if (cnt_lvendo == 2): dolfin_edges[edge] = 2 return dolfin_mesh, dolfin_facets, dolfin_edges
def mortar_meshes(subdomains, markers, ifacet_iter=None, strict=True, tol=1E-14): ''' Let subdomains a cell function. We assume that domains (cells) marked with the given markers are adjecent and an interface can be defined between these domains which is a continuous curve. Then for each domain we create a (sub)mesh and a single interface mesh which holds a connectivity map of its cells to facets of the submeshes. The submeshes are returned as a list. The connectivity map is of the form submesh.id -> facets. The marking function f of the EmbeddedMesh that is the interface is colored such that f[color] is the interface of meshes (submeshes[m] for m color_map[color]). ''' assert len(markers) > 1 # Need a cell function mesh = subdomains.mesh() tdim = mesh.topology().dim() assert subdomains.dim() == tdim markers = list(markers) # For each facet we want to know which 2 cells share it tagged_iface = defaultdict(dict) if ifacet_iter is None: mesh.init(tdim - 1) ifacet_iter = df.facets(mesh) mesh.init(tdim - 1, tdim) for facet in ifacet_iter: cells = map(int, facet.entities(tdim)) if len(cells) > 1: c0, c1 = cells tag0, tag1 = subdomains[c0], subdomains[c1] if tag0 != tag1 and tag0 in markers and tag1 in markers: # A key of sorted tags if tag0 < tag1: key = (tag0, tag1) # The cells connected to facet order to match to tags value = (c0, c1) else: key = (tag1, tag0) value = (c1, c0) # A facet to 2 cells map for the facets of tagged pair tagged_iface[key][facet.index()] = value # order -> tagged keys color_to_tag_map = tagged_iface.keys() # Set to color which won't be encounred interface = df.MeshFunction('size_t', mesh, tdim - 1, len(color_to_tag_map)) values = interface.array() # Mark facets corresponding to tagged pair by a color for color, tags in enumerate(color_to_tag_map): values[tagged_iface[tags].keys()] = color # Finally create an interface mesh for all the colors interface_mesh = EmbeddedMesh(interface, range(len(color_to_tag_map))) # Try to recogninze the meshes which violates assumptions by counting assert not strict or is_continuous(interface_mesh) # And subdomain mesh for each marker subdomain_meshes = {tag: EmbeddedMesh(subdomains, tag) for tag in markers} # Alloc the entity maps for the embedded mesh interface_map = { subdomain_meshes[tag].id(): [None] * interface_mesh.num_cells() for tag in markers } # THe maps are filled by the following idea. Using marking function # of interface mesh one cat get cells of that color and useing entity # map for (original) mesh map the cells to mesh facet. A color also # corresponds to a pair of tags which identifies the two meshes which # share the facet - facet connected to 2 cells one for each mesh. The # final step is to lean to map submesh cells to mesh cells # local submesh <- global of parent mesh sub_mesh_map = lambda tag: dict( (mesh_c, submesh_c) for submesh_c, mesh_c in enumerate( subdomain_meshes[tag].parent_entity_map[mesh.id()][tdim])) # Thec cell-cell connectivity of each submesh c2c = {tag: sub_mesh_map(tag) for tag in markers} # A connectivity of interface mesh cells to facets of global mesh c2f = interface_mesh.parent_entity_map[mesh.id()][tdim - 1] for color, tags in enumerate(color_to_tag_map): # Precompute for the 2 tags submeshes = [subdomain_meshes[tag] for tag in tags] for cell in df.SubsetIterator(interface_mesh.marking_function, color): cell_index = cell.index() # The corresponding global cell facet facet = c2f[cell_index] # The two cells in global mesh numbering global_cells = tagged_iface[tags][facet] # Let's find the facet in submesh for tag, gc, submesh in zip(tags, global_cells, submeshes): # The map uses local cell local_cell = c2c[tag][gc] mesh_id = submesh.id() found = False for submesh_facet in df.facets(df.Cell(submesh, local_cell)): found = df.near( cell.midpoint().distance(submesh_facet.midpoint()), 0, tol) if found: interface_map[mesh_id][ cell_index] = submesh_facet.index() break # Collapse to list; I want list indexing subdomain_meshes = np.array([subdomain_meshes[m] for m in markers]) color_map = [map(markers.index, tags) for tags in color_to_tag_map] # Parent in the sense that the colored piece of interface # could have been created from mesh interface_mesh.parent_entity_map.update( dict((k, { tdim - 1: v }) for k, v in interface_map.items())) return subdomain_meshes, interface_mesh, color_map