Esempio n. 1
0
def vtk_ug_to_dolfin_mesh(ug):
    """
    Create a DOLFIN Mesh from a vtkUnstructuredGrid object
    """
    if not isinstance(ug, vtk.vtkUnstructuredGrid):
        raise TypeError("Expected a 'vtkUnstructuredGrid'")

    # Get mesh data
    num_cells = int(ug.GetNumberOfCells())
    num_vertices = int(ug.GetNumberOfPoints())

    # Get topological and geometrical dimensions
    cell = ug.GetCell(0)
    gdim = int(cell.GetCellDimension())
    cell_type = cell.GetCellType()
    if cell_type not in [vtk.VTK_TETRA, vtk.VTK_TRIANGLE]:
        raise TypeError("DOLFIN only support meshes of triangles " + \
                        "and tetrahedrons.")

    tdim = 3 if cell_type == vtk.VTK_TETRA else 2

    # Create empty DOLFIN Mesh
    mesh = Mesh()
    editor = MeshEditor()
    editor.open(mesh, tdim, gdim)
    editor.init_cells(num_cells)
    editor.init_vertices(num_vertices)
    editor.close()

    # Assign the cell and vertex informations directly from the vtk data
    cells_array = array_handler.vtk2array(ug.GetCells().GetData())

    # Get the assumed fixed size of indices and create an index array
    cell_size = cell.GetPointIds().GetNumberOfIds()
    cellinds = np.arange(len(cells_array))

    # Each cell_ids_size:th data point need to be deleted from the
    # index array
    ind_delete = slice(0, len(cells_array), cell_size + 1)

    # Check that the removed value all have the same value which should
    # be the size of the data
    if not np.all(cells_array[ind_delete] == cell_size):
        raise ValueError("Expected all cells to be of the same size")

    cellinds = np.delete(cellinds, ind_delete)

    # Get cell data from mesh and make it writeable (cell data is non
    # writeable by default) and update the values
    mesh_cells = mesh.cells()
    mesh_cells.flags.writeable = True
    mesh_cells[:] = np.reshape(cells_array[cellinds], \
                              (num_cells , cell_size))

    # Set coordinates from vtk data
    vertex_array = array_handler.vtk2array(ug.GetPoints().GetData())
    if vertex_array.shape[1] != gdim:
        vertex_array = vertex_array[:, :gdim]
    mesh.coordinates()[:] = vertex_array
    return mesh
Esempio n. 2
0
def vtk_ug_to_dolfin_mesh(ug):
    """
    Create a DOLFIN Mesh from a vtkUnstructuredGrid object
    """
    if not isinstance(ug, vtk.vtkUnstructuredGrid):
        raise TypeError("Expected a 'vtkUnstructuredGrid'")
    
    # Get mesh data
    num_cells = int(ug.GetNumberOfCells())
    num_vertices = int(ug.GetNumberOfPoints())
    
    # Get topological and geometrical dimensions
    cell = ug.GetCell(0)
    gdim = int(cell.GetCellDimension())
    cell_type = cell.GetCellType()                                                                                                                                          
    if cell_type not in [vtk.VTK_TETRA, vtk.VTK_TRIANGLE]:                                                                                                                  
        raise TypeError("DOLFIN only support meshes of triangles " + \
                        "and tetrahedrons.")
    
    tdim = 3 if cell_type == vtk.VTK_TETRA else 2
    
    # Create empty DOLFIN Mesh
    mesh = Mesh()
    editor = MeshEditor()
    editor.open(mesh, tdim, gdim)
    editor.init_cells(num_cells)
    editor.init_vertices(num_vertices)
    editor.close()
    
    # Assign the cell and vertex informations directly from the vtk data
    cells_array = array_handler.vtk2array(ug.GetCells().GetData())
    
    # Get the assumed fixed size of indices and create an index array
    cell_size = cell.GetPointIds().GetNumberOfIds()
    cellinds = np.arange(len(cells_array))
    
    # Each cell_ids_size:th data point need to be deleted from the
    # index array
    ind_delete = slice(0, len(cells_array), cell_size+1)
    
    # Check that the removed value all have the same value which should
    # be the size of the data
    if not np.all(cells_array[ind_delete]==cell_size):
        raise ValueError("Expected all cells to be of the same size")
    
    cellinds = np.delete(cellinds, ind_delete)
    
    # Get cell data from mesh and make it writeable (cell data is non
    # writeable by default) and update the values
    mesh_cells = mesh.cells()
    mesh_cells.flags.writeable = True
    mesh_cells[:] = np.reshape(cells_array[cellinds], \
                              (num_cells , cell_size))
    
    # Set coordinates from vtk data
    vertex_array = array_handler.vtk2array(ug.GetPoints().GetData())
    if vertex_array.shape[1] != gdim:
        vertex_array = vertex_array[:,:gdim]
    mesh.coordinates()[:] = vertex_array
    return mesh
Esempio n. 3
0
def convert(ifilename, handler):
    """ Convert from Abaqus.

    The Abaqus format first defines a node block, then there should be a number
    of elements containing these nodes.
    """

    # Dictionary of nodes (maps node id to coordinates)
    nodes = {}

    # Dictionary of elements (maps cell id to list of cell nodes)
    elems = {}

    # Lists of nodes for given name (key)
    node_sets = {}

    # Lists of cells for given name (key)
    cell_sets = {}

    # Lists of surfaces for given name (key) in the format:
    # {'SS1': [set(['SS1_S1', 'S1']), set(['SS1_S4', 'S4'])]},
    # where SS1 is the name of the surface, SS1_S1 is the name of the
    # cell list whose first face is to be selected, ...
    surface_sets = {}

    # Open file Abaqus file
    file = open(ifilename, 'r')
    csv_file = csv.reader(file, delimiter=',', skipinitialspace=True)

    node_set_name = None
    generate = None

    # Set intial state state
    state = State.Init

    # Read data from input file
    for l in csv_file:

        # Sanity check
        if (len(l) == 0): print("Ooops, zero length.")

        if l[0].startswith('**'):  # Pass over comments
            continue
        elif l[0].startswith('*'):  # Have a keyword
            state = State.Unknown

            if l[0].lower() == "*heading":
                state = State.ReadHeading

            elif l[0].lower() == "*part":
                part_name = _read_part_name(l)

            elif l[0].lower() == "*end part":
                state = State.Invalid

            elif l[0].lower() == "*node":
                node_set_name = _create_node_list_entry(node_sets, l)
                state = State.ReadNodes

            elif l[0].lower() == "*element":
                cell_type, cell_set_name = _read_element_keywords(cell_sets, l)
                state = State.ReadCells

            elif l[0].lower() == "*nset":
                node_set_name, generate = _read_nset_keywords(node_sets, l)
                state = State.ReadNodeSet

            elif l[0].lower() == "*elset":
                cell_set_name, generate = _read_elset_keywords(cell_sets, l)
                if generate:
                    print("WARNING: generation of *elsets not tested.")
                state = State.ReadCellSet

            elif l[0].lower() == "*surface":
                surface_set_name, generate = _read_surface_keywords(
                    surface_sets, l)
                state = State.ReadSurfaceSet

            else:
                print("WARNING: unrecognised Abaqus input keyword:", l[0])
                state = State.Unknown

        else:

            if state == State.ReadHeading:
                model_name = _read_heading(l)

            elif state == State.ReadNodes:
                node_id = int(l[0]) - 1
                coords = [float(c) for c in l[1:]]
                nodes[node_id] = coords
                if node_set_name is not None:
                    node_sets[node_set_name].add(node_id)

            elif state == State.ReadCells:
                cell_id = int(l[0]) - 1
                cell_connectivity = [int(v) - 1 for v in l[1:]]
                elems[cell_id] = cell_connectivity
                if cell_set_name is not None:
                    cell_sets[cell_set_name].add(cell_id)

            elif state == State.ReadNodeSet:

                try:
                    if generate:
                        n0, n1, increment = l
                        node_range = list(
                            range(int(n0) - 1,
                                  int(n1) - 1, int(increment)))
                        node_range.append(int(n1) - 1)
                        node_sets[node_set_name].update(node_range)
                    else:
                        # Strip empty term at end of list, if present
                        if l[-1] == '': l.pop(-1)
                        node_range = [int(n) - 1 for n in l]
                        node_sets[node_set_name].update(node_range)
                except:
                    print("WARNING: Non-integer node sets not yet supported.")

            elif state == State.ReadCellSet:
                try:
                    if generate:
                        n0, n1, increment = l
                        cell_range = list(
                            range(int(n0) - 1,
                                  int(n1) - 1, int(increment)))
                        cell_range.append(int(n1) - 1)
                        cell_sets[cell_set_name].update(cell_range)
                    else:
                        # Strip empty term at end of list, if present
                        if l[-1] == '': l.pop(-1)
                        cell_range = [int(n) - 1 for n in l]
                        cell_sets[cell_set_name].update(cell_range)
                except:
                    print(
                        "WARNING: Non-integer element sets not yet supported.")

            elif state == State.ReadSurfaceSet:
                # Strip empty term at end of list, if present
                if l[-1] == '': l.pop(-1)
                surface_sets[surface_set_name].update([tuple(l)])

            elif state == State.Invalid:  # part
                raise Exception("Inavlid Abaqus parser state..")

    # Close CSV object
    file.close()
    del csv_file

    # Write data to XML file
    # Note that vertices/cells must be consecutively numbered, which
    # isn't necessarily the case in Abaqus. Therefore we enumerate and
    # translate original IDs to sequence indexes if gaps are present.

    # FIXME
    handler.set_mesh_type("tetrahedron", 3)

    process_facets = len(surface_sets) > 0
    if process_facets:
        try:
            from dolfin import MeshEditor, Mesh
        except ImportError:
            _error(
                "DOLFIN must be installed to handle Abaqus boundary regions")

        mesh = Mesh()
        mesh_editor = MeshEditor()
        mesh_editor.open(mesh, 3, 3)

    node_ids_order = {}
    # Check for gaps in vertex numbering
    node_ids = list(iterkeys(nodes))
    if len(node_ids) > 0:
        vertex_gap = (min(node_ids) != 0 or max(node_ids) != len(node_ids) - 1)
        for x, y in enumerate(node_ids):
            node_ids_order[y] = x  # Maps Abaqus IDs to Dolfin IDs
    else:
        vertex_gap = True

    # Check for gaps in cell numbering
    elemids = list(iterkeys(elems))
    if len(elemids) > 0:
        cell_gap = (min(elemids) != 0 or max(elemids) != len(elemids) - 1)
    else:
        cell_gap = True

    # Write vertices to XML file
    handler.start_vertices(len(nodes))
    if process_facets:
        mesh_editor.init_vertices_global(len(nodes), len(nodes))

    if not vertex_gap:

        for v_id, v_coords in list(iteritems(nodes)):
            handler.add_vertex(v_id, v_coords)
            if process_facets:
                mesh_editor.add_vertex(v_id, np.array(v_coords,
                                                      dtype=np.float_))

    else:

        for idx, (v_id, v_coords) in enumerate(iteritems(nodes)):
            handler.add_vertex(idx, v_coords)
            if process_facets:
                mesh_editor.add_vertex(idx, np.array(v_coords,
                                                     dtype=np.float_))

    handler.end_vertices()

    # Write cells to XML file
    handler.start_cells(len(elems))
    if process_facets:
        mesh_editor.init_cells_global(len(elems), len(elems))

    if not vertex_gap and not cell_gap:

        for c_index, c_data in list(iteritems(elems)):
            for v_id in c_data:
                if not (0 <= v_id < len(nodes)):
                    handler.error(
                        "Element %s references non-existent node %s" %
                        (c_index, v_id))
            handler.add_cell(c_index, c_data)

            if process_facets:
                c_data_tmp = np.array(c_data)
                c_data_tmp.sort()
                mesh_editor.add_cell(c_index,
                                     np.array(c_data_tmp, dtype=np.uintp))

    elif not vertex_gap and cell_gap:

        for idx, (c_index, c_data) in enumerate(iteritems(elems)):
            for v_id in c_data:
                if not (0 <= v_id < len(nodes)):
                    handler.error(
                        "Element %s references non-existent node %s" %
                        (c_index, v_id))
            handler.add_cell(idx, c_data)

            if process_facets:
                c_data_tmp = np.array(c_data)
                c_data_tmp.sort()
                mesh_editor.add_cell(idx, np.array(c_data_tmp, dtype=np.uintp))

    else:

        for idx, (c_id, c_data) in enumerate(iteritems(elems)):
            c_nodes = []
            for v_id in c_data:
                try:
                    c_nodes.append(node_ids_order[v_id])
                except ValueError:
                    handler.error(
                        "Element %s references non-existent node %s" %
                        (c_id, v_id))
            handler.add_cell(idx, c_nodes)

            if process_facets:
                c_nodes.sort()
                mesh_editor.add_cell(idx, np.array(c_nodes, dtype=np.uintp))

    handler.end_cells()

    # Write MeshValueCollections to XML file
    handler.start_domains()

    # Build a abaqus node ID -> dolfin cell ID map (which is not unique but that is irrelevant here)
    #                           and its local entity.
    if len(node_sets) > 0:
        node_cell_map = {}
        for c_dolfin_index, (c_index, c_data) in enumerate(iteritems(elems)):
            c_data_tmp = np.array(c_data)
            c_data_tmp.sort()
            for local_entity, n_index in enumerate(c_data_tmp):
                node_cell_map[n_index] = (c_dolfin_index, local_entity)

    # Write vertex/node sets
    dim = 0
    for value, (name, node_set) in enumerate(iteritems(node_sets)):
        handler.start_mesh_value_collection(name, dim, len(node_set), "uint")

        for node in node_set:
            try:
                cell, local_entity = node_cell_map[node]
                handler.add_entity_mesh_value_collection(
                    dim, cell, value, local_entity=local_entity)
            except KeyError:
                print("Warning: Boundary references non-existent node %s" %
                      node)
        handler.end_mesh_value_collection()

    # Write cell/element sets
    dim = 3
    for name, s in list(iteritems(cell_sets)):
        handler.start_mesh_value_collection(name, dim, len(s), "uint")
        for cell in s:
            handler.add_entity_mesh_value_collection(dim, cell, 0)
        handler.end_mesh_value_collection()

    # Write surface sets
    if process_facets:
        dim = 2
        nodes_facet_map = _nodes_facet_map(mesh)

        data = [int(0)] * mesh.num_facets()
        S1 = [0, 1, 2]
        S2 = [0, 3, 1]
        S3 = [1, 3, 2]
        S4 = [2, 3, 0]
        node_selector = {
            'S1': S1,
            'S2': S2,
            'S3': S3,
            'S4': S4,
        }

        for index, (name, s) in enumerate(iteritems(surface_sets)):
            cell_face_list = []
            for cell_set_name, face_index in s:
                cell_face_list += [(cell, face_index)
                                   for cell in cell_sets[cell_set_name]]

            for cell, face in cell_face_list:
                cell_nodes = elems[cell]
                # Extract the face nodes
                face_nodes = [cell_nodes[i] for i in node_selector[face]]
                dolfin_face_nodes = [node_ids_order[n] for n in face_nodes]
                dolfin_face_nodes.sort()
                # Convert the face_nodes to dolfin IDs
                face_id = nodes_facet_map[tuple(dolfin_face_nodes)]
                data[face_id] = index + 1

        # Create and initialise the mesh function
        handler.start_meshfunction("facet_region", dim, mesh.num_facets())
        for index, physical_region in enumerate(data):
            handler.add_entity_meshfunction(index, physical_region)
        handler.end_meshfunction()

    handler.end_domains()
Esempio n. 4
0
def write_fenics_file(dim, ofilename):
    ofile  = File(ofilename + '.xml')
    mesh = Mesh()
    editor = MeshEditor()
    editor.open(mesh, dim, dim)
    editor.init_vertices(nodes.shape[1])
    editor.init_cells(len(cell_map))    
    for i in range(nodes.shape[1]):
        if dim == 2:
            editor.add_vertex(i, nodes[0, i], nodes[1, i])
        else:
            editor.add_vertex(i, nodes[0, i], nodes[1, i], nodes[2, i])
            
    for i in range(1, len(cell_map)+1):
        if dim == 2:
            editor.add_cell(i-1, cell_map[i][0]-1, cell_map[i][1]-1, cell_map[i][2]-1)
        else:
            editor.add_cell(i-1, cell_map[i][0]-1, cell_map[i][1]-1, cell_map[i][2]-1, cell_map[i][3]-1)
    
    mesh.order()
    mvc = mesh.domains().markers(dim-1)
    for zone, cells in boundary_cells.iteritems():
        for cell, nds in cells.iteritems():
            dolfin_cell = Cell(mesh, cell-1)
            nodes_of_cell = dolfin_cell.entities(0)
            #print cell
            #print nodes_of_cell
            nodes_of_face = nds - 1
            #print nodes_of_face
            for jj, ff in enumerate(facets(dolfin_cell)):
                facet_nodes = ff.entities(0)
                #print facet_nodes
                if all(map(lambda x: x in nodes_of_face, facet_nodes)):
                    local_index = jj
                    break
            mvc.set_value(cell-1, local_index, zone)
        
    ofile << mesh        
    from dolfin import plot
    plot(mesh, interactive=True)
    print 'Finished writing FEniCS mesh\n'
Esempio n. 5
0
def Triangle(left, bottom):
    """ Triangular mesh with just one triangular cell. """
    mesh = Mesh()
    editor = MeshEditor()
    editor.open(mesh, 2, 2)
    editor.init_vertices(3)
    editor.init_cells(1)
    editor.add_vertex(0, 0, 0)
    editor.add_vertex(1, bottom, 0)
    editor.add_vertex(2, 0, left)
    editor.add_cell(0, 0, 1, 2)
    editor.close()
    return mesh
Esempio n. 6
0
 def get(self):
     """Build cells based on triangulated regular polygon."""
     sides, h, x, y = self.pad(self.values)
     mesh = Mesh()
     editor = MeshEditor()
     editor.open(mesh, 3, 3)  # dimension
     editor.init_vertices(sides + 2)
     editor.init_cells(sides)
     editor.add_vertex(0, x, y, h)
     for i in range(1, sides + 1):
         editor.add_vertex(i,
                           cos(2 * pi * i / sides),
                           sin(2 * pi * i / sides),
                           0)
     editor.add_vertex(sides + 1, 0, 0, 0)
     for i in range(sides - 1):
         editor.add_cell(i, 0, i + 1, i + 2, sides + 1)
     editor.add_cell(sides - 1, 0, sides, 1, sides + 1)
     editor.close()
     return mesh
Esempio n. 7
0
def fit2d(x0,
          y0,
          points,
          cells,
          eps,
          degree=1,
          verbose=False,
          solver='spsolve'):
    # Convert points, cells to dolfin mesh
    editor = MeshEditor()
    mesh = Mesh()
    # topological and geometrical dimension 2
    editor.open(mesh, 'triangle', 2, 2, 1)
    editor.init_vertices(len(points))
    editor.init_cells(len(cells))
    for k, point in enumerate(points):
        editor.add_vertex(k, point[:2])
    for k, cell in enumerate(cells.astype(numpy.uintp)):
        editor.add_cell(k, cell)
    editor.close()

    # Eps = numpy.array([[eps, eps], [eps, eps]])
    # Eps = numpy.array([[eps, 0], [0, eps]])
    Eps = numpy.array([[2 * eps, eps], [eps, 2 * eps]])
    # Eps = numpy.array([[1.0, 1.0], [1.0, 1.0]])

    return fit(x0,
               y0,
               mesh,
               Eps,
               degree=degree,
               verbose=verbose,
               solver=solver)
Esempio n. 8
0
 def get(self):
     """Two cells."""
     a, b = self.pad(self.values)
     mesh = Mesh()
     editor = MeshEditor()
     editor.open(mesh, 2, 2)
     editor.init_vertices(4)
     editor.init_cells(2)
     editor.add_vertex(0, 0, 0)
     editor.add_vertex(1, 1, 0)
     editor.add_vertex(2, a, b)
     editor.add_vertex(3, a, -b)
     editor.add_cell(0, 0, 1, 2)
     editor.add_cell(1, 0, 1, 3)
     editor.close()
     return mesh
Esempio n. 9
0
    def get(self):
        """Build mesh from the tree of triangles created by reflections."""
        # get n
        n, k, a, b = self.pad(self.values)
        # adjust defaults based on n
        self.full[2:4] = self.shape[n]
        # get all parameters
        n, k, a, b = self.pad(self.values)
        if n == -1:
            self.vertices = self.bothvertices[k]
        else:
            tree = self.trees[(n, k)]
            self.triangles = []
            # angles are 2pi/a, 2pi/b
            side = np.sin(2 * pi / b) / np.sin(pi - 2 * pi / a - 2 * pi / b)
            self.vertices = [
                np.array([0.0, 0.0]), np.array([1.0, 0.0]), np.array(
                    [side * np.cos(2 * pi / a), side * np.sin(2 * pi / a)])]
            self.generate(tree, [0, 1, 2])

        mesh = Mesh()
        editor = MeshEditor()
        editor.open(mesh, 2, 2)
        editor.init_vertices(len(self.vertices))
        editor.init_cells(len(self.triangles))
        for i, v in enumerate(self.vertices):
            editor.add_vertex(i, *v)
        for i, t in enumerate(self.triangles):
            editor.add_cell(i, *t)
        editor.close()
        return mesh
Esempio n. 10
0
 def get(self):
     """Build cells based on triangulated regular polygon."""
     sides, h, x, y = self.pad(self.values)
     mesh = Mesh()
     editor = MeshEditor()
     editor.open(mesh, 3, 3)  # dimension
     editor.init_vertices(sides + 2)
     editor.init_cells(sides)
     editor.add_vertex(0, x, y, h)
     for i in range(1, sides + 1):
         editor.add_vertex(i, cos(2 * pi * i / sides),
                           sin(2 * pi * i / sides), 0)
     editor.add_vertex(sides + 1, 0, 0, 0)
     for i in range(sides - 1):
         editor.add_cell(i, 0, i + 1, i + 2, sides + 1)
     editor.add_cell(sides - 1, 0, sides, 1, sides + 1)
     editor.close()
     return mesh
Esempio n. 11
0
def reduced_mesh(mesh):
    '''
    Represent each branch only as a segment / single cell in the mesh.
    The mesh has a map from new mesh to old mesh vertices
    '''
    terminals, _ = find_branches(mesh)

    # Unique nodes, this is map from rmesh nodes to parent
    nodes = map(int, set(sum(terminals, ())))
    # Let's make reverse for purpose of creating the mesh
    old2new = {old: new for new, old in enumerate(nodes)}

    # Their coordinates
    x = mesh.coordinates()[nodes]

    rmesh = Mesh()
    editor = MeshEditor()

    editor.open(rmesh, 1, mesh.geometry().dim())
    editor.init_vertices(len(x))
    editor.init_cells(len(terminals))

    # Add vertices
    for vi, v in enumerate(x):
        editor.add_vertex(vi, v)

    # Add cells
    for ci, c in enumerate(terminals):
        editor.add_cell(ci, *map(lambda v: old2new[v], c))

    editor.close()

    # How to do this with MeshData
    rmesh.parent_vertex_indices = nodes

    return rmesh
Esempio n. 12
0
 def get(self):
     """One cell."""
     mesh = Mesh()
     editor = MeshEditor()
     editor.open(mesh, 3, 3)
     editor.init_vertices(4)
     editor.init_cells(1)
     editor.add_vertex(0, 0, 0, 0)
     editor.add_vertex(1, 1, 1, 0)
     editor.add_vertex(2, 0, 1, 1)
     editor.add_vertex(3, 1, 0, 1)
     editor.add_cell(0, 0, 1, 2, 3)
     editor.close()
     return mesh
Esempio n. 13
0
 def get(self):
     """Eight cells."""
     mesh = Mesh()
     editor = MeshEditor()
     editor.open(mesh, 3, 3)
     editor.init_vertices(7)
     editor.init_cells(8)
     editor.add_vertex(0, 1, 0, 0)
     editor.add_vertex(1, 0, 1, 0)
     editor.add_vertex(2, 0, 0, 1)
     editor.add_vertex(3, -1, 0, 0)
     editor.add_vertex(4, 0, -1, 0)
     editor.add_vertex(5, 0, 0, -1)
     editor.add_vertex(6, 0, 0, 0)
     editor.add_cell(0, 6, 0, 1, 2)
     editor.add_cell(1, 6, 0, 1, 5)
     editor.add_cell(2, 6, 0, 4, 2)
     editor.add_cell(3, 6, 0, 4, 5)
     editor.add_cell(4, 6, 3, 1, 2)
     editor.add_cell(5, 6, 3, 1, 5)
     editor.add_cell(6, 6, 3, 4, 2)
     editor.add_cell(7, 6, 3, 4, 5)
     editor.close()
     return mesh
Esempio n. 14
0
 def get(self):
     """One triangle per side in smaller, two triangles in larger."""
     sides, R = self.pad(self.values)
     mesh = Mesh()
     large = [
         np.array((cos(2 * pi * i / sides), sin(2 * pi * i / sides)))
         for i in range(1, sides + 1)
     ]
     small = np.array([v * R for v in large])
     # centers of edges in large polygon
     center = np.array([(v + w) / 2
                        for v, w in zip(large, large[1:] + [large[0]])])
     large = np.array(large)
     editor = MeshEditor()
     editor.open(mesh, 2, 2)
     editor.init_vertices(3 * sides)
     editor.init_cells(3 * sides)
     for i in range(sides):
         editor.add_vertex(3 * i, *large[i])
         editor.add_vertex(3 * i + 1, *small[i])
         editor.add_vertex(3 * i + 2, *center[i])
     for i, j in zip(range(sides), range(1, sides) + [0]):
         editor.add_cell(3 * i, 3 * i, 3 * i + 1, 3 * i + 2)
         editor.add_cell(3 * i + 1, 3 * i + 1, 3 * i + 2, 3 * j + 1)
         editor.add_cell(3 * i + 2, 3 * i + 2, 3 * j + 1, 3 * j)
     editor.close()
     return mesh
Esempio n. 15
0
    def get(self):
        """Build mesh from the tree of triangles created by reflections."""
        # get n
        n, k, a, b = self.pad(self.values)
        # adjust defaults based on n
        self.full[2:4] = self.shape[n]
        # get all parameters
        n, k, a, b = self.pad(self.values)
        if n == -1:
            self.vertices = self.bothvertices[k]
        else:
            tree = self.trees[(n, k)]
            self.triangles = []
            # angles are 2pi/a, 2pi/b
            side = np.sin(2 * pi / b) / np.sin(pi - 2 * pi / a - 2 * pi / b)
            self.vertices = [
                np.array([0.0, 0.0]),
                np.array([1.0, 0.0]),
                np.array(
                    [side * np.cos(2 * pi / a), side * np.sin(2 * pi / a)])
            ]
            self.generate(tree, [0, 1, 2])

        mesh = Mesh()
        editor = MeshEditor()
        editor.open(mesh, 2, 2)
        editor.init_vertices(len(self.vertices))
        editor.init_cells(len(self.triangles))
        for i, v in enumerate(self.vertices):
            editor.add_vertex(i, *v)
        for i, t in enumerate(self.triangles):
            editor.add_cell(i, *t)
        editor.close()
        return mesh
Esempio n. 16
0
 def get(self):
     """Build vertices from polar coordinates."""
     angle, dist = self.values
     if len(angle) < 3:
         angle = np.array(range(int(angle[0]))) * 360.0 / angle[0]
     while len(dist) < len(angle):
         dist = dist * 2
     dist = np.array(dist)
     sides = len(angle)
     mesh = Mesh()
     editor = MeshEditor()
     editor.open(mesh, 2, 2)
     editor.init_vertices(sides + 1)
     editor.init_cells(sides)
     editor.add_vertex(0, 0, 0)
     for i in range(1, sides + 1):
         editor.add_vertex(i, dist[i - 1] * cos(angle[i - 1] / 180.0 * pi),
                           dist[i - 1] * sin(angle[i - 1] / 180.0 * pi))
     for i in range(sides - 1):
         editor.add_cell(i, 0, i + 1, i + 2)
     editor.add_cell(sides - 1, 0, sides, 1)
     editor.close()
     return mesh
Esempio n. 17
0
def gmsh2xml(ifilename, handler):
    """Convert between .gmsh v2.0 format (http://www.geuz.org/gmsh/) and .xml,
    parser implemented as a state machine:

        0 = read 'MeshFormat'
        1 = read  mesh format data
        2 = read 'EndMeshFormat'
        3 = read 'Nodes'
        4 = read  number of vertices
        5 = read  vertices
        6 = read 'EndNodes'
        7 = read 'Elements'
        8 = read  number of cells
        9 = read  cells
        10 = done

    Afterwards, extract physical region numbers if they are defined in
    the mesh file as a mesh function.

    """

    print "Converting from Gmsh format (.msh, .gmsh) to DOLFIN XML format"

    # The dimension of the gmsh element types supported here as well as the dolfin cell types for each dimension
    gmsh_dim = {15: 0, 1: 1, 2: 2, 4: 3}
    cell_type_for_dim = {1: "interval", 2: "triangle", 3: "tetrahedron" }
    # the gmsh element types supported for conversion
    supported_gmsh_element_types = [1, 2, 4, 15]

    # Open files
    ifile = open(ifilename, "r")

    # Scan file for cell type
    cell_type = None
    highest_dim = 0
    line = ifile.readline()
    while line:

        # Remove newline
        if line[-1] == "\n":
            line = line[:-1]

        # Read dimension
        if line.find("$Elements") == 0:

            line = ifile.readline()
            num_elements = int(line)
            if num_elements == 0:
                _error("No elements found in gmsh file.")
            line = ifile.readline()

            # Now iterate through elements to find largest dimension.  Gmsh
            # format might include elements of lower dimensions in the element list.
            # We also need to count number of elements of correct dimensions.
            # Also determine which vertices are not used.
            dim_count = {0: 0, 1: 0, 2: 0, 3: 0}
            vertices_used_for_dim = {0: [], 1: [], 2: [], 3: []}
            # Array used to store gmsh tags for 1D (type 1/line), 2D (type 2/triangular) elements and 3D (type 4/tet) elements
            tags_for_dim = {0: [], 1: [], 2: [], 3: []}

            while line.find("$EndElements") == -1:
                element = line.split()
                elem_type = int(element[1])
                num_tags = int(element[2])
                if elem_type in supported_gmsh_element_types:
                    dim = gmsh_dim[elem_type]
                    if highest_dim < dim:
                        highest_dim = dim
                    node_num_list = [int(node) for node in element[3 + num_tags:]]
                    vertices_used_for_dim[dim].extend(node_num_list)
                    if num_tags > 0:
                        tags_for_dim[dim].append(tuple(int(tag) for tag in element[3:3+num_tags]))
                    dim_count[dim] += 1
                else:
                    #TODO: output a warning here. "gmsh element type %d not supported" % elem_type
                    pass
                line = ifile.readline()
        else:
            # Read next line
            line = ifile.readline()

    # Check that we got the cell type and set num_cells_counted
    if highest_dim == 0:
        _error("Unable to find cells of supported type.")

    num_cells_counted = dim_count[highest_dim]
    vertex_set = set(vertices_used_for_dim[highest_dim])
    vertices_used_for_dim[highest_dim] = None

    vertex_dict = {}
    for n,v in enumerate(vertex_set):
        vertex_dict[v] = n

    # Step to beginning of file
    ifile.seek(0)

    # Set mesh type
    handler.set_mesh_type(cell_type_for_dim[highest_dim], highest_dim)

    # Initialise node list (gmsh does not export all vertexes in order)
    nodelist = {}

    # Current state
    state = 0

    # Write data
    num_vertices_read = 0
    num_cells_read = 0

    # Only import the dolfin objects if facet markings exist
    process_facets = False
    if len(tags_for_dim[highest_dim-1]) > 0:
        # first construct the mesh
        try:
            from dolfin import MeshEditor, Mesh
        except ImportError:
            _error("DOLFIN must be installed to handle Gmsh boundary regions")
        mesh = Mesh()
        mesh_editor = MeshEditor ()
        mesh_editor.open( mesh, highest_dim, highest_dim )
        process_facets = True
    else:
        # TODO: Output a warning or an error here
        me = None

    while state != 10:

        # Read next line
        line = ifile.readline()
        if not line: break

        # Skip comments
        if line[0] == '#':
            continue

        # Remove newline
        if line[-1] == "\n":
            line = line[:-1]

        if state == 0:
            if line == "$MeshFormat":
                state = 1
        elif state == 1:
            (version, file_type, data_size) = line.split()
            state = 2
        elif state == 2:
            if line == "$EndMeshFormat":
                state = 3
        elif state == 3:
            if line == "$Nodes":
                state = 4
        elif state == 4:
            num_vertices = len(vertex_dict)
            handler.start_vertices(num_vertices)
            if process_facets:
                mesh_editor.init_vertices ( num_vertices )
            state = 5
        elif state == 5:
            (node_no, x, y, z) = line.split()
            node_no = int(node_no)
            x,y,z = [float(xx) for xx in (x,y,z)]
            if vertex_dict.has_key(node_no):
                node_no = vertex_dict[node_no]
            else:
                continue
            nodelist[int(node_no)] = num_vertices_read
            handler.add_vertex(num_vertices_read, [x, y, z])
            if process_facets:
                if highest_dim == 1:
                    coords = numpy.array([x])
                elif highest_dim == 2:
                    coords = numpy.array([x, y])
                elif highest_dim == 3:
                    coords = numpy.array([x, y, z])
                mesh_editor.add_vertex(num_vertices_read, coords)

            num_vertices_read +=1

            if num_vertices == num_vertices_read:
                handler.end_vertices()
                state = 6
        elif state == 6:
            if line == "$EndNodes":
                state = 7
        elif state == 7:
            if line == "$Elements":
                state = 8
        elif state == 8:
            handler.start_cells(num_cells_counted)
            if process_facets:
                mesh_editor.init_cells( num_cells_counted )

            state = 9
        elif state == 9:
            element = line.split()
            elem_type = int(element[1])
            num_tags  = int(element[2])
            if elem_type in supported_gmsh_element_types:
                dim = gmsh_dim[elem_type]
            else:
                dim = 0
            if dim == highest_dim:
                node_num_list = [vertex_dict[int(node)] for node in element[3 + num_tags:]]
                for node in node_num_list:
                    if not node in nodelist:
                        _error("Vertex %d of %s %d not previously defined." %
                              (node, cell_type_for_dim[dim], num_cells_read))
                cell_nodes = [nodelist[n] for n in node_num_list]
                handler.add_cell(num_cells_read, cell_nodes)

                if process_facets:
                    cell_nodes = numpy.array([nodelist[n] for n in node_num_list], dtype=numpy.uintp)
                    mesh_editor.add_cell(num_cells_read, cell_nodes)

                num_cells_read +=1

            if num_cells_counted == num_cells_read:
                handler.end_cells()
                if process_facets:
                    mesh_editor.close()
                state = 10
        elif state == 10:
            break

    # Write mesh function based on the Physical Regions defined by
    # gmsh, but only if they are not all zero. All zero physical
    # regions indicate that no physical regions were defined.
    if highest_dim not in [1,2,3]:
        _error("Gmsh tags not supported for dimension %i. Probably a bug" % dim)

    tags = tags_for_dim[highest_dim]
    physical_regions = tuple(tag[0] for tag in tags)
    if not all(tag == 0 for tag in physical_regions):
        handler.start_meshfunction("physical_region", dim, num_cells_counted)
        for i, physical_region in enumerate(physical_regions):
            handler.add_entity_meshfunction(i, physical_region)
        handler.end_meshfunction()

    # Now process the facet markers
    tags = tags_for_dim[highest_dim-1]
    if (len(tags) > 0) and (mesh is not None):
        physical_regions = tuple(tag[0] for tag in tags)
        if not all(tag == 0 for tag in physical_regions):
            mesh.init(highest_dim-1,0)

            # Get the facet-node connectivity information (reshape as a row of node indices per facet)
            if highest_dim==1:
              # for 1d meshes the mesh topology returns the vertex to vertex map, which isn't what we want
              # as facets are vertices
              facets_as_nodes = numpy.array([[i] for i in range(mesh.num_facets())])
            else:
              facets_as_nodes = mesh.topology()(highest_dim-1,0)().reshape ( mesh.num_facets(), highest_dim )

            # Build the reverse map
            nodes_as_facets = {}
            for facet in range(mesh.num_facets()):
              nodes_as_facets[tuple(facets_as_nodes[facet,:])] = facet

            data = [int(0*k) for k in range(mesh.num_facets()) ]
            for i, physical_region in enumerate(physical_regions):
                nodes = [n-1 for n in vertices_used_for_dim[highest_dim-1][highest_dim*i:(highest_dim*i+highest_dim)]]
                nodes.sort()

                if physical_region != 0:
                    try:
                        index = nodes_as_facets[tuple(nodes)]
                        data[index] = physical_region
                    except IndexError:
                        raise Exception ( "The facet (%d) was not found to mark: %s" % (i, nodes) )

#            # Create and initialise the mesh function
            handler.start_meshfunction("facet_region", highest_dim-1, mesh.num_facets() )
            for index, physical_region in enumerate ( data ):
                handler.add_entity_meshfunction(index, physical_region)
            handler.end_meshfunction()

    # Check that we got all data
    if state == 10:
        print "Conversion done"
    else:
       _error("Missing data, unable to convert \n\ Did you use version 2.0 of the gmsh file format?")

    # Close files
    ifile.close()
Esempio n. 18
0
 def get(self):
     """Part of the regular polygon construction."""
     sides, angle = self.pad(self.values)
     mesh = Mesh()
     editor = MeshEditor()
     editor.open(mesh, 2, 2)  # dimension
     editor.init_vertices(angle + 2)
     editor.init_cells(angle)
     editor.add_vertex(0, 0, 0)
     for i in range(0, angle + 1):
         editor.add_vertex(i + 1,
                           cos(2 * pi * i / sides),
                           sin(2 * pi * i / sides))
     editor.add_cell(0, 2, 1, 0)
     for i in range(1, angle):
         editor.add_cell(i, 0, i + 1, i + 2)
     editor.close()
     return mesh
Esempio n. 19
0
 def get(self):
     """Single cell."""
     a, b, c, d, e = self.pad(self.values)
     mesh = Mesh()
     editor = MeshEditor()
     editor.open(mesh, 3, 3)  # dimension
     editor.init_vertices(4)
     editor.init_cells(1)
     editor.add_vertex(0, 0, 0, 0)
     editor.add_vertex(1, 1, 0, 0)
     editor.add_vertex(2, a, b, 0)
     editor.add_vertex(3, c, d, e)
     editor.add_cell(0, 0, 1, 2, 3)
     editor.close()
     return mesh
Esempio n. 20
0
 def get(self):
     """Just one cell."""
     topx, topy = self.pad(self.values)
     mesh = Mesh()
     editor = MeshEditor()
     editor.open(mesh, 2, 2)
     editor.init_vertices(3)
     editor.init_cells(1)
     editor.add_vertex(0, 0, 0)
     editor.add_vertex(1, 1, 0)
     editor.add_vertex(2, topx, topy)
     editor.add_cell(0, 0, 1, 2)
     editor.close()
     return mesh
Esempio n. 21
0
 def get(self):
     """One triangle per side with common vertex at (0,0)."""
     sides = self.values[0]
     mesh = Mesh()
     editor = MeshEditor()
     editor.open(mesh, 2, 2)
     editor.init_vertices(sides + 1)
     editor.init_cells(sides)
     editor.add_vertex(0, 0, 0)
     for i in range(1, sides + 1):
         editor.add_vertex(i,
                           cos(2 * pi * i / sides),
                           sin(2 * pi * i / sides))
     for i in range(sides - 1):
         editor.add_cell(i, 0, i + 1, i + 2)
     editor.add_cell(sides - 1, 0, sides, 1)
     editor.close()
     return mesh
Esempio n. 22
0
def get_original_mesh(t_mesh_path, mesh, par):
    def transform(coordinates, xi, rho):
        S = np.exp(coordinates[:, 0] +
                   coordinates[:, 1] * rho / np.sqrt(1 - rho**2))
        v = 50 * coordinates[:, 1] * xi / np.sqrt(1 - rho**2)
        new_coords = np.column_stack((S, v))
        return new_coords

    if os.path.exists(t_mesh_path):
        t_mesh = Mesh()
        with XDMFFile(t_mesh_path) as f:
            f.read(t_mesh)
        return t_mesh

    # Construct the mesh under transformed variables
    new_coords = transform(mesh.coordinates(), xi=par.xi, rho=par.rho)

    t_mesh = Mesh()
    editor = MeshEditor()
    editor.open(t_mesh, 'triangle', 2, 2)
    editor.init_vertices(len(new_coords))
    editor.init_cells(len(mesh.cells()))

    for i, vertex in enumerate(new_coords):
        editor.add_vertex(i, vertex)
    for i, cell in enumerate(mesh.cells()):
        editor.add_cell(i, cell)
    editor.close()

    with XDMFFile(t_mesh_path) as f:
        f.write(t_mesh)
    return t_mesh
Esempio n. 23
0
 def get(self):
     """Two cells."""
     a, b = self.pad(self.values)
     mesh = Mesh()
     editor = MeshEditor()
     editor.open(mesh, 2, 2)
     editor.init_vertices(4)
     editor.init_cells(2)
     editor.add_vertex(0, 0, 0)
     editor.add_vertex(1, 1, 0)
     editor.add_vertex(2, a, b)
     editor.add_vertex(3, a, -b)
     editor.add_cell(0, 0, 1, 2)
     editor.add_cell(1, 0, 1, 3)
     editor.close()
     return mesh
Esempio n. 24
0
def barycentric_refine(mesh):

   from dolfin import Mesh, MeshEditor
   
   # the extension to 3d is straightforward but not yet implemented
   assert mesh.topology().dim() is 2

   # barycentric refinement
   v = mesh.coordinates()
   t = mesh.cells()

   b = (v[t[:,0],:]+v[t[:,1],:]+v[t[:,2],:])/3
   
   mesh = Mesh()
   editor = MeshEditor()
   editor.open(mesh, 2, 2);

   # add vertices to mesh
   nv0 = len(v)
   nv1 = nv0 + len(t)
   editor.init_vertices(nv1)
   for i, vi in enumerate(v):
     editor.add_vertex(i, vi[0], vi[1])
   for i, vi in enumerate(b):
     editor.add_vertex(i + nv0, vi[0], vi[1])

   # add cells to the mesh
   nt1 = 3*len(t)
   editor.init_cells(nt1)
   for i, ti in enumerate(t):
     editor.add_cell(i*3+0, ti[0], ti[1], nv0 + i)
     editor.add_cell(i*3+1, ti[1], ti[2], nv0 + i)
     editor.add_cell(i*3+2, ti[2], ti[0], nv0 + i)

   # done: create and return mesh object
   editor.close()
   return mesh
Esempio n. 25
0
 def get(self):
     """One cell."""
     mesh = Mesh()
     editor = MeshEditor()
     editor.open(mesh, 3, 3)
     editor.init_vertices(4)
     editor.init_cells(1)
     editor.add_vertex(0, 0, 0, 0)
     editor.add_vertex(1, 1, 1, 0)
     editor.add_vertex(2, 0, 1, 1)
     editor.add_vertex(3, 1, 0, 1)
     editor.add_cell(0, 0, 1, 2, 3)
     editor.close()
     return mesh
Esempio n. 26
0
    def __init__(self, centers, J, n):
        self.centers = centers
        self.J = J

        self.target = 0.002
        self.J /= self.target

        # dir_path = os.path.dirname(os.path.realpath(__file__))
        # with open(os.path.join(dir_path, '../colorio/data/gamut_triangulation.yaml')) as f:
        #     data = yaml.safe_load(f)

        # self.points = numpy.column_stack([
        #     data['points'], numpy.zeros(len(data['points']))
        #     ])
        # self.cells = numpy.array(data['cells'])

        # self.points, self.cells = colorio.xy_gamut_mesh(0.15)

        self.points, self.cells = meshzoo.triangle(n,
                                                   corners=numpy.array(
                                                       [[0.0, 0.0], [1.0, 0.0],
                                                        [0.0, 1.0]]))

        # https://bitbucket.org/fenics-project/dolfin/issues/845/initialize-mesh-from-vertices
        editor = MeshEditor()
        mesh = Mesh()
        editor.open(mesh, "triangle", 2, 2)
        editor.init_vertices(self.points.shape[0])
        editor.init_cells(self.cells.shape[0])
        for k, point in enumerate(self.points):
            editor.add_vertex(k, point)
        for k, cell in enumerate(self.cells):
            editor.add_cell(k, cell)
        editor.close()

        self.V = FunctionSpace(mesh, "CG", 1)
        self.Vgrad = VectorFunctionSpace(mesh, "DG", 0)

        # self.ux0 = Function(self.V)
        # self.uy0 = Function(self.V)

        # 0 starting guess
        # ax = numpy.zeros(self.V.dim())
        # ay = numpy.zeros(self.V.dim())

        # Use F(x, y) = (x, y) as starting guess
        self.ux0 = project(Expression("x[0]", degree=1), self.V)
        self.uy0 = project(Expression("x[1]", degree=1), self.V)
        ax = self.ux0.vector().get_local()
        ay = self.uy0.vector().get_local()
        # Note that alpha doesn't contain the values in the order that one might expect,
        # see
        # <https://www.allanswered.com/post/awevg/projectexpressionx0-v-vector-get_local-not-in-order/>.
        self.alpha = numpy.concatenate([ax, ay])

        self.num_f_eval = 0

        # Build L as scipy.csr_matrix
        u = TrialFunction(self.V)
        v = TestFunction(self.V)
        L = assemble(dot(grad(u), grad(v)) * dx)
        Lmat = as_backend_type(L).mat()
        indptr, indices, data = Lmat.getValuesCSR()

        size = Lmat.getSize()
        self.L = sparse.csr_matrix((data, indices, indptr), shape=size)
        self.LT = self.L.getH()

        self.dx, self.dy = build_grad_matrices(self.V, centers)
        self.dxT = self.dx.getH()
        self.dyT = self.dy.getH()
        return
Esempio n. 27
0
 def __setstate__(self, d):
     """pickling restore"""
     # mesh
     verts = d['coordinates']
     elems = d['cells']
     dim = verts.shape[1]
     mesh = Mesh()
     ME = MeshEditor()
     ME.open(mesh, dim, dim)
     ME.init_vertices(verts.shape[0])
     ME.init_cells(elems.shape[0])
     for i, v in enumerate(verts):
         ME.add_vertex(i, v[0], v[1])
     for i, c in enumerate(elems):
         ME.add_cell(i, c[0], c[1], c[2])
     ME.close()
     # function space
     if d['num_subspaces'] > 1:
         V = VectorFunctionSpace(mesh, d['family'], d['degree'])
     else:
         V = FunctionSpace(mesh, d['family'], d['degree'])
     # vector
     v = Function(V)
     v.vector()[:] = d['array']
     self._fefunc = v
Esempio n. 28
0
def main():
    def round_trip_connect(start, end):
      result = []
      for i in range(start, end):
        result.append((i, i+1))
      result.append((end, start))
      return result

    corners, mesh1, mesh2 = generate_meshes(2, 1, 0.3)
    points = get_vertices(corners, mesh1, mesh2)
    print "points", np.array(points)

    info = triangle.MeshInfo()
    info.set_points(points)
    info.set_facets(round_trip_connect(0, len(corners)-1))

    mesh = triangle.build(info, allow_volume_steiner=False, allow_boundary_steiner=False, min_angle=60)

    if False:
        print "vertices:"
        for i, p in enumerate(mesh.points):
            print i, p
        print "point numbers in triangles:"
        for i, t in enumerate(mesh.elements):
            print i, t

    finemesh = Mesh()
    ME = MeshEditor()
    ME.open(finemesh,2,2)
    ME.init_vertices(len(mesh.points))
    ME.init_cells(len(mesh.elements))
    for i,v in enumerate(mesh.points):
        ME.add_vertex(i,v[0],v[1])
    for i,c in enumerate(mesh.elements):
        ME.add_cell(i,c[0],c[1],c[2])
    ME.close()

    triangle.write_gnuplot_mesh("triangles.dat", mesh)

    plot(mesh1)
    plot(mesh2)
    plot(finemesh)
    interactive()
Esempio n. 29
0
def create_submesh(mesh, markers, marker):
    "This function allows for a SubMesh-equivalent to be created in parallel"
    # Build mesh
    submesh = Mesh()
    mesh_editor = MeshEditor()
    mesh_editor.open(submesh,
                     mesh.ufl_cell().cellname(),
                     mesh.ufl_cell().topological_dimension(),
                     mesh.ufl_cell().geometric_dimension())

    # Return empty mesh if no matching markers
    if MPI.sum(mpi_comm_world(), int(marker in markers.array())) == 0:
        cbc_warning(
            "Unable to find matching markers in meshfunction. Submesh is empty."
        )
        mesh_editor.close()
        return submesh

    base_cell_indices = np.where(markers.array() == marker)[0]
    base_cells = mesh.cells()[base_cell_indices]
    base_vertex_indices = np.unique(base_cells.flatten())

    base_global_vertex_indices = sorted(
        [mesh.topology().global_indices(0)[vi] for vi in base_vertex_indices])

    gi = mesh.topology().global_indices(0)
    shared_local_indices = set(base_vertex_indices).intersection(
        set(mesh.topology().shared_entities(0).keys()))
    shared_global_indices = [gi[vi] for vi in shared_local_indices]

    unshared_global_indices = list(
        set(base_global_vertex_indices) - set(shared_global_indices))
    unshared_vertices_dist = distribution(len(unshared_global_indices))

    # Number unshared vertices on separate process
    idx = sum(unshared_vertices_dist[:MPI.rank(mpi_comm_world())])
    base_to_sub_global_indices = {}
    for gi in unshared_global_indices:
        base_to_sub_global_indices[gi] = idx
        idx += 1

    # Gather all shared process on process 0 and assign global index
    all_shared_global_indices = gather(shared_global_indices,
                                       on_process=0,
                                       flatten=True)
    all_shared_global_indices = np.unique(all_shared_global_indices)

    shared_base_to_sub_global_indices = {}
    idx = int(
        MPI.max(mpi_comm_world(),
                float(max(base_to_sub_global_indices.values() + [-1e16]))) + 1)
    if MPI.rank(mpi_comm_world()) == 0:
        for gi in all_shared_global_indices:
            shared_base_to_sub_global_indices[int(gi)] = idx
            idx += 1

    # Broadcast global numbering of all shared vertices
    shared_base_to_sub_global_indices = dict(
        zip(broadcast(shared_base_to_sub_global_indices.keys(), 0),
            broadcast(shared_base_to_sub_global_indices.values(), 0)))

    # Join shared and unshared numbering in one dict
    base_to_sub_global_indices = dict(
        base_to_sub_global_indices.items() +
        shared_base_to_sub_global_indices.items())

    # Create mapping of local indices
    base_to_sub_local_indices = dict(
        zip(base_vertex_indices, range(len(base_vertex_indices))))

    # Define sub-cells
    sub_cells = [None] * len(base_cells)
    for i, c in enumerate(base_cells):
        sub_cells[i] = [base_to_sub_local_indices[j] for j in c]

    # Store vertices as sub_vertices[local_index] = (global_index, coordinates)
    sub_vertices = {}
    for base_local, sub_local in base_to_sub_local_indices.items():
        sub_vertices[sub_local] = (base_to_sub_global_indices[
            mesh.topology().global_indices(0)[base_local]],
                                   mesh.coordinates()[base_local])

    ## Done with base mesh

    # Distribute meshdata on (if any) empty processes
    sub_cells, sub_vertices = distribute_meshdata(sub_cells, sub_vertices)
    global_cell_distribution = distribution(len(sub_cells))
    #global_vertex_distribution = distribution(len(sub_vertices))

    global_num_cells = MPI.sum(mpi_comm_world(), len(sub_cells))
    global_num_vertices = sum(unshared_vertices_dist) + MPI.sum(
        mpi_comm_world(), len(all_shared_global_indices))

    mesh_editor.init_vertices(len(sub_vertices))
    #mesh_editor.init_cells(len(sub_cells))
    mesh_editor.init_cells_global(len(sub_cells), global_num_cells)
    global_index_start = sum(
        global_cell_distribution[:MPI.rank(mesh.mpi_comm())])

    for index, cell in enumerate(sub_cells):
        if LooseVersion(dolfin_version()) >= LooseVersion("1.6.0"):
            mesh_editor.add_cell(index, *cell)
        else:
            mesh_editor.add_cell(int(index), global_index_start + index,
                                 np.array(cell, dtype=np.uintp))

    for local_index, (global_index, coordinates) in sub_vertices.items():
        #print coordinates
        mesh_editor.add_vertex_global(int(local_index), int(global_index),
                                      coordinates)

    mesh_editor.close()

    submesh.topology().init(0, len(sub_vertices), global_num_vertices)
    submesh.topology().init(mesh.ufl_cell().topological_dimension(),
                            len(sub_cells), global_num_cells)

    # FIXME: Set up shared entities
    # What damage does this do?
    submesh.topology().shared_entities(0)[0] = []
    # The code below sets up shared vertices, but lacks shared facets.
    # It is considered incomplete, and therefore commented out
    '''
    #submesh.topology().shared_entities(0)[0] = []
    from dolfin import compile_extension_module
    cpp_code = """
    void set_shared_entities(Mesh& mesh, std::size_t idx, const Array<std::size_t>& other_processes)
    {
        std::set<unsigned int> set_other_processes;
        for (std::size_t i=0; i<other_processes.size(); i++)
        {
            set_other_processes.insert(other_processes[i]);
            //std::cout << idx << " --> " << other_processes[i] << std::endl;
        }
        //std::cout << idx << " --> " << set_other_processes[0] << std::endl;
        mesh.topology().shared_entities(0)[idx] = set_other_processes;
    }
    """

    set_shared_entities = compile_extension_module(cpp_code).set_shared_entities
    base_se = mesh.topology().shared_entities(0)
    se = submesh.topology().shared_entities(0)

    for li in shared_local_indices:
        arr = np.array(base_se[li], dtype=np.uintp)
        sub_li = base_to_sub_local_indices[li]
        set_shared_entities(submesh, base_to_sub_local_indices[li], arr)
    '''
    return submesh
Esempio n. 30
0
def create_submesh(mesh, markers):
    mpi_comm = mesh.mpi_comm()
    if not has_pybind11():
        mpi_comm = mpi_comm.tompi4py()
    assert isinstance(markers, MeshFunctionBool)
    assert markers.dim() == mesh.topology().dim()
    marker_id = True
    
    # == 1. Extract marked cells == #
    # Dolfin does not support a distributed mesh that is empty on some processes.
    # cbcpost gets around this by moving a single cell from the a non-empty processor to an empty one.
    # Note that, however, this cannot work if the number of marked cell is less than the number of processors.
    # In the interest of considering this case, we enable at least one cell (arbitrarily) on each processor.
    # We find this solution acceptable for our purposes, despite the increase of the reduced mesh size,
    # since we are never actually interested in solving a PDE on the reduced mesh, but rather only in
    # assemblying tensors on it and extract their values at some locations.
    backup_first_marker_id = None
    if marker_id not in markers.array():
        backup_first_marker_id = markers.array()[0]
        markers.array()[0] = marker_id
    assert marker_id in markers.array()
    
    # == 2. Create submesh == #
    submesh = Mesh(mesh.mpi_comm())
    mesh_editor = MeshEditor()
    mesh_editor.open(submesh,
                     mesh.ufl_cell().cellname(),
                     mesh.ufl_cell().topological_dimension(),
                     mesh.ufl_cell().geometric_dimension())
    # Extract cells from mesh with specified marker_id
    mesh_cell_indices = where(markers.array() == marker_id)[0]
    mesh_cells = mesh.cells()[mesh_cell_indices]
    mesh_global_cell_indices = sorted([mesh.topology().global_indices(mesh.topology().dim())[cell_index] for cell_index in mesh_cell_indices])
    # Get vertices of extracted cells
    mesh_vertex_indices = unique(mesh_cells.flatten())
    mesh_global_vertex_indices = sorted([mesh.topology().global_indices(0)[vertex_index] for vertex_index in mesh_vertex_indices])
    # Number vertices in a way which is independent from the number of processors. To do so ...
    # ... first of all collect all vertices from all processors
    allgathered_mesh_global_vertex_indices__non_empty_processors = list()
    allgathered_mesh_global_vertex_indices__empty_processors = list()
    for r in range(mpi_comm.size):
        backup_first_marker_id_r = mpi_comm.bcast(backup_first_marker_id, root=r)
        if backup_first_marker_id_r is None:
            allgathered_mesh_global_vertex_indices__non_empty_processors.extend(mpi_comm.bcast(mesh_global_vertex_indices, root=r))
        else:
            allgathered_mesh_global_vertex_indices__empty_processors.extend(mpi_comm.bcast(mesh_global_vertex_indices, root=r))
    allgathered_mesh_global_vertex_indices__non_empty_processors = sorted(unique(allgathered_mesh_global_vertex_indices__non_empty_processors))
    allgathered_mesh_global_vertex_indices__empty_processors = sorted(unique(allgathered_mesh_global_vertex_indices__empty_processors))
    # ... then create a dict that will contain the map from mesh global vertex index to submesh global vertex index.
    # ... Here make sure to number first "real" vertices (those coming from non empty processors), since the other ones
    # ... are just a side effect of the current partitioning!
    allgathered_mesh_to_submesh_vertex_global_indices = dict()
    _submesh_vertex_global_index = 0
    for mesh_vertex_global_index in allgathered_mesh_global_vertex_indices__non_empty_processors:
        assert mesh_vertex_global_index not in allgathered_mesh_to_submesh_vertex_global_indices
        allgathered_mesh_to_submesh_vertex_global_indices[mesh_vertex_global_index] = _submesh_vertex_global_index
        _submesh_vertex_global_index += 1
    for mesh_vertex_global_index in allgathered_mesh_global_vertex_indices__empty_processors:
        if mesh_vertex_global_index not in allgathered_mesh_to_submesh_vertex_global_indices:
            allgathered_mesh_to_submesh_vertex_global_indices[mesh_vertex_global_index] = _submesh_vertex_global_index
            _submesh_vertex_global_index += 1
    # Number cells in a way which is independent from the number of processors. To do so ...
    # ... first of all collect all cells from all processors
    allgathered_mesh_global_cell_indices__non_empty_processors = list()
    allgathered_mesh_global_cell_indices__empty_processors = list()
    for r in range(mpi_comm.size):
        backup_first_marker_id_r = mpi_comm.bcast(backup_first_marker_id, root=r)
        if backup_first_marker_id_r is None:
            allgathered_mesh_global_cell_indices__non_empty_processors.extend(mpi_comm.bcast(mesh_global_cell_indices, root=r))
        else:
            allgathered_mesh_global_cell_indices__empty_processors.extend(mpi_comm.bcast(mesh_global_cell_indices, root=r))
    allgathered_mesh_global_cell_indices__non_empty_processors = sorted(unique(allgathered_mesh_global_cell_indices__non_empty_processors))
    allgathered_mesh_global_cell_indices__empty_processors = sorted(unique(allgathered_mesh_global_cell_indices__empty_processors))
    # ... then create a dict that will contain the map from mesh global cell index to submesh global cell index.
    # ... Here make sure to number first "real" vertices (those coming from non empty processors), since the other ones
    # ... are just a side effect of the current partitioning!
    allgathered_mesh_to_submesh_cell_global_indices = dict()
    _submesh_cell_global_index = 0
    for mesh_cell_global_index in allgathered_mesh_global_cell_indices__non_empty_processors:
        assert mesh_cell_global_index not in allgathered_mesh_to_submesh_cell_global_indices
        allgathered_mesh_to_submesh_cell_global_indices[mesh_cell_global_index] = _submesh_cell_global_index
        _submesh_cell_global_index += 1
    for mesh_cell_global_index in allgathered_mesh_global_cell_indices__empty_processors:
        assert mesh_cell_global_index not in allgathered_mesh_to_submesh_cell_global_indices
        allgathered_mesh_to_submesh_cell_global_indices[mesh_cell_global_index] = _submesh_cell_global_index
        _submesh_cell_global_index += 1
    # Also create a mapping from mesh local vertex index to submesh local vertex index.
    mesh_to_submesh_vertex_local_indices = dict(zip(mesh_vertex_indices, list(range(len(mesh_vertex_indices)))))
    # Also create a mapping from mesh local cell index to submesh local cell index.
    mesh_to_submesh_cell_local_indices = dict(zip(mesh_cell_indices, list(range(len(mesh_cell_indices)))))
    # Now, define submesh cells
    submesh_cells = list()
    for i, c in enumerate(mesh_cells):
        submesh_cells.append([mesh_to_submesh_vertex_local_indices[j] for j in c])
    # Store vertices as submesh_vertices[local_index] = (global_index, coordinates)
    submesh_vertices = dict()
    for mesh_vertex_local_index, submesh_vertex_local_index in mesh_to_submesh_vertex_local_indices.items():
        submesh_vertices[submesh_vertex_local_index] = (
            allgathered_mesh_to_submesh_vertex_global_indices[mesh.topology().global_indices(0)[mesh_vertex_local_index]],
            mesh.coordinates()[mesh_vertex_local_index]
        )
    # Collect the global number of vertices and cells
    global_num_cells = mpi_comm.allreduce(len(submesh_cells), op=SUM)
    global_num_vertices = len(allgathered_mesh_to_submesh_vertex_global_indices)
    # Fill in mesh_editor
    mesh_editor.init_vertices_global(len(submesh_vertices), global_num_vertices)
    mesh_editor.init_cells_global(len(submesh_cells), global_num_cells)
    for local_index, cell_vertices in enumerate(submesh_cells):
        if has_pybind11():
            mesh_editor.add_cell(local_index, cell_vertices)
        else:
            mesh_editor.add_cell(local_index, *cell_vertices)
    for local_index, (global_index, coordinates) in submesh_vertices.items():
        mesh_editor.add_vertex_global(local_index, global_index, coordinates)
    mesh_editor.close()
    # Initialize topology
    submesh.topology().init(0, len(submesh_vertices), global_num_vertices)
    submesh.topology().init(mesh.ufl_cell().topological_dimension(), len(submesh_cells), global_num_cells)
    # Correct the global index of cells
    for local_index in range(len(submesh_cells)):
        submesh.topology().set_global_index(
            submesh.topology().dim(),
            local_index,
            allgathered_mesh_to_submesh_cell_global_indices[mesh_global_cell_indices[local_index]]
        )
    
    # == 3. Store (local) mesh to/from submesh map for cells, facets and vertices == #
    # Cells
    submesh.mesh_to_submesh_cell_local_indices = mesh_to_submesh_cell_local_indices
    submesh.submesh_to_mesh_cell_local_indices = mesh_cell_indices
    # Vertices
    submesh.mesh_to_submesh_vertex_local_indices = mesh_to_submesh_vertex_local_indices
    submesh.submesh_to_mesh_vertex_local_indices = mesh_vertex_indices
    # Facets
    mesh_vertices_to_mesh_facets = dict()
    mesh_facets_to_mesh_vertices = dict()
    for mesh_cell_index in mesh_cell_indices:
        mesh_cell = Cell(mesh, mesh_cell_index)
        for mesh_facet in facets(mesh_cell):
            mesh_facet_vertices = list()
            for mesh_facet_vertex in vertices(mesh_facet):
                mesh_facet_vertices.append(mesh_facet_vertex.index())
            mesh_facet_vertices = tuple(sorted(mesh_facet_vertices))
            if mesh_facet_vertices in mesh_vertices_to_mesh_facets:
                assert mesh_vertices_to_mesh_facets[mesh_facet_vertices] == mesh_facet.index()
            else:
                mesh_vertices_to_mesh_facets[mesh_facet_vertices] = mesh_facet.index()
            if mesh_facet.index() in mesh_facets_to_mesh_vertices:
                assert mesh_facets_to_mesh_vertices[mesh_facet.index()] == mesh_facet_vertices
            else:
                mesh_facets_to_mesh_vertices[mesh_facet.index()] = mesh_facet_vertices
    submesh_vertices_to_submesh_facets = dict()
    submesh_facets_to_submesh_vertices = dict()
    for submesh_facet in facets(submesh):
        submesh_facet_vertices = list()
        for submesh_facet_vertex in vertices(submesh_facet):
            submesh_facet_vertices.append(submesh_facet_vertex.index())
        submesh_facet_vertices = tuple(sorted(submesh_facet_vertices))
        assert submesh_facet_vertices not in submesh_vertices_to_submesh_facets
        submesh_vertices_to_submesh_facets[submesh_facet_vertices] = submesh_facet.index()
        assert submesh_facet.index() not in submesh_facets_to_submesh_vertices
        submesh_facets_to_submesh_vertices[submesh_facet.index()] = submesh_facet_vertices
    mesh_to_submesh_facets_local_indices = dict()
    for (mesh_facet_index, mesh_vertices) in mesh_facets_to_mesh_vertices.items():
        submesh_vertices = tuple(sorted([submesh.mesh_to_submesh_vertex_local_indices[mesh_vertex] for mesh_vertex in mesh_vertices]))
        submesh_facet_index = submesh_vertices_to_submesh_facets[submesh_vertices]
        mesh_to_submesh_facets_local_indices[mesh_facet_index] = submesh_facet_index
    submesh_to_mesh_facets_local_indices = dict()
    for (submesh_facet_index, submesh_vertices) in submesh_facets_to_submesh_vertices.items():
        mesh_vertices = tuple(sorted([submesh.submesh_to_mesh_vertex_local_indices[submesh_vertex] for submesh_vertex in submesh_vertices]))
        mesh_facet_index = mesh_vertices_to_mesh_facets[mesh_vertices]
        submesh_to_mesh_facets_local_indices[submesh_facet_index] = mesh_facet_index
    submesh.mesh_to_submesh_facet_local_indices = mesh_to_submesh_facets_local_indices
    submesh.submesh_to_mesh_facet_local_indices = list()
    assert min(submesh_to_mesh_facets_local_indices.keys()) == 0
    assert max(submesh_to_mesh_facets_local_indices.keys()) == len(submesh_to_mesh_facets_local_indices.keys()) - 1
    for submesh_facet_index in range(len(submesh_to_mesh_facets_local_indices)):
        submesh.submesh_to_mesh_facet_local_indices.append(submesh_to_mesh_facets_local_indices[submesh_facet_index])
    # == 3bis. Prepare (temporary) global indices of facets == #
    # Wrapper to DistributedMeshTools::number_entities
    if has_pybind11():
        cpp_code = """
            #include <pybind11/pybind11.h>
            #include <dolfin/mesh/DistributedMeshTools.h>
            #include <dolfin/mesh/Mesh.h>
            
            void initialize_global_indices(std::shared_ptr<dolfin::Mesh> mesh, std::size_t dim)
            {
                dolfin::DistributedMeshTools::number_entities(*mesh, dim);
            }
            
            PYBIND11_MODULE(SIGNATURE, m)
            {
                m.def("initialize_global_indices", &initialize_global_indices);
            }
        """
        initialize_global_indices = compile_cpp_code(cpp_code).initialize_global_indices
    else:
        cpp_code = """
            void initialize_global_indices(Mesh & mesh, std::size_t dim)
            {
                DistributedMeshTools::number_entities(mesh, dim);
            }
        """
        initialize_global_indices = compile_extension_module(cpp_code, additional_system_headers=["dolfin/mesh/DistributedMeshTools.h"]).initialize_global_indices
    initialize_global_indices(mesh, mesh.topology().dim() - 1)
    # Prepare global indices of facets
    mesh_facets_local_to_global_indices = dict()
    for mesh_cell_index in mesh_cell_indices:
        mesh_cell = Cell(mesh, mesh_cell_index)
        for mesh_facet in facets(mesh_cell):
            mesh_facets_local_to_global_indices[mesh_facet.index()] = mesh_facet.global_index()
    mesh_facets_global_indices_in_submesh = list()
    for mesh_facet_local_index in mesh_to_submesh_facets_local_indices.keys():
        mesh_facets_global_indices_in_submesh.append(mesh_facets_local_to_global_indices[mesh_facet_local_index])
    allgathered__mesh_facets_global_indices_in_submesh = list()
    for r in range(mpi_comm.size):
        allgathered__mesh_facets_global_indices_in_submesh.extend(mpi_comm.bcast(mesh_facets_global_indices_in_submesh, root=r))
    allgathered__mesh_facets_global_indices_in_submesh = sorted(set(allgathered__mesh_facets_global_indices_in_submesh))
    mesh_to_submesh_facets_global_indices = dict()
    for (submesh_facet_global_index, mesh_facet_global_index) in enumerate(allgathered__mesh_facets_global_indices_in_submesh):
        mesh_to_submesh_facets_global_indices[mesh_facet_global_index] = submesh_facet_global_index
    submesh_facets_local_to_global_indices = dict()
    for (submesh_facet_local_index, mesh_facet_local_index) in submesh_to_mesh_facets_local_indices.items():
        submesh_facets_local_to_global_indices[submesh_facet_local_index] = mesh_to_submesh_facets_global_indices[mesh_facets_local_to_global_indices[mesh_facet_local_index]]
    
    # == 4. Assign shared vertices == #
    shared_entities_dimensions = {
        "vertex": 0,
        "facet": submesh.topology().dim() - 1,
        "cell": submesh.topology().dim()
    }
    shared_entities_class = {
        "vertex": Vertex,
        "facet": Facet,
        "cell": Cell
    }
    shared_entities_iterator = {
        "vertex": vertices,
        "facet": facets,
        "cell": cells
    }
    shared_entities_submesh_global_index_getter = {
        "vertex": lambda entity: entity.global_index(),
        "facet": lambda entity: submesh_facets_local_to_global_indices[entity.index()],
        "cell": lambda entity: entity.global_index()
    }
    for entity_type in ["vertex", "facet", "cell"]: # do not use .keys() because the order is important
        dim = shared_entities_dimensions[entity_type]
        class_ = shared_entities_class[entity_type]
        iterator = shared_entities_iterator[entity_type]
        submesh_global_index_getter = shared_entities_submesh_global_index_getter[entity_type]
        # Get shared entities from mesh. A subset of these will end being shared entities also the submesh
        # (thanks to the fact that we do not redistribute cells from one processor to another)
        if mpi_comm.size > 1: # some entities may not be initialized in serial, since they are not needed
            assert mesh.topology().have_shared_entities(dim), "Mesh shared entities have not been initialized for dimension " + str(dim)
        if mesh.topology().have_shared_entities(dim): # always true in parallel (when really needed)
            # However, it may happen that an entity which has been selected is not shared anymore because only one of
            # the sharing processes has it in the submesh. For instance, consider the case
            # of two cells across the interface (located on a facet f) between two processors. It may happen that
            # only one of the two cells is selected: the facet f and its vertices are not shared anymore!
            # For this reason, we create a new dict from global entity index to processors sharing them. Thus ...
            # ... first of all get global indices corresponding to local entities
            if entity_type in ["vertex", "cell"]:
                assert submesh.topology().have_global_indices(dim), "Submesh global indices have not been initialized for dimension " + str(dim)
            submesh_local_entities_global_index = list()
            submesh_local_entities_global_to_local_index = dict()
            for entity in iterator(submesh):
                local_entity_index = entity.index()
                global_entity_index = submesh_global_index_getter(entity)
                submesh_local_entities_global_index.append(global_entity_index)
                submesh_local_entities_global_to_local_index[global_entity_index] = local_entity_index
            # ... then gather all global indices from all processors
            gathered__submesh_local_entities_global_index = list() # over processor id
            for r in range(mpi_comm.size):
                gathered__submesh_local_entities_global_index.append(mpi_comm.bcast(submesh_local_entities_global_index, root=r))
            # ... then create dict from global index to processors sharing it
            submesh_shared_entities__global = dict()
            for r in range(mpi_comm.size):
                for global_entity_index in gathered__submesh_local_entities_global_index[r]:
                    if global_entity_index not in submesh_shared_entities__global:
                        submesh_shared_entities__global[global_entity_index] = list()
                    submesh_shared_entities__global[global_entity_index].append(r)
            # ... and finally popuplate shared entities dict, which is the same as the dict above except that
            # the current processor rank is removed and a local indexing is used
            submesh_shared_entities = dict() # from local index to list of integers
            for (global_entity_index, processors) in submesh_shared_entities__global.items():
                if (
                    mpi_comm.rank in processors  # only local entities
                        and
                    len(processors) > 1 # it was still shared after submesh extraction
                ):
                    other_processors_list = list(processors)
                    other_processors_list.remove(mpi_comm.rank)
                    other_processors = array(other_processors_list, dtype=uintp)
                    submesh_shared_entities[submesh_local_entities_global_to_local_index[global_entity_index]] = other_processors

            # Need an extension module to populate shared_entities because in python each call to shared_entities
            # returns a temporary.
            if has_pybind11():
                cpp_code = """
                    #include <Eigen/Core>
                    #include <pybind11/pybind11.h>
                    #include <pybind11/eigen.h>
                    #include <dolfin/mesh/Mesh.h>
                    
                    using OtherProcesses = Eigen::Ref<const Eigen::Matrix<std::size_t, Eigen::Dynamic, 1>>;
                    
                    void set_shared_entities(std::shared_ptr<dolfin::Mesh> submesh, std::size_t idx, const OtherProcesses other_processes, std::size_t dim)
                    {
                        std::set<unsigned int> set_other_processes;
                        for (std::size_t i(0); i < other_processes.size(); i++)
                            set_other_processes.insert(other_processes[i]);
                        submesh->topology().shared_entities(dim)[idx] = set_other_processes;
                    }
                    
                    PYBIND11_MODULE(SIGNATURE, m)
                    {
                        m.def("set_shared_entities", &set_shared_entities);
                    }
                """
                set_shared_entities = compile_cpp_code(cpp_code).set_shared_entities
            else:
                cpp_code = """
                    void set_shared_entities(Mesh & submesh, std::size_t idx, const Array<std::size_t>& other_processes, std::size_t dim)
                    {
                        std::set<unsigned int> set_other_processes;
                        for (std::size_t i(0); i < other_processes.size(); i++)
                            set_other_processes.insert(other_processes[i]);
                        submesh.topology().shared_entities(dim)[idx] = set_other_processes;
                    }
                """
                set_shared_entities = compile_extension_module(cpp_code).set_shared_entities
            for (submesh_entity_local_index, other_processors) in submesh_shared_entities.items():
                set_shared_entities(submesh, submesh_entity_local_index, other_processors, dim)
                
            log(DEBUG, "Local indices of shared entities for dimension " + str(dim) + ": " + str(list(submesh.topology().shared_entities(0).keys())))
            log(DEBUG, "Global indices of shared entities for dimension " + str(dim) + ": " + str([class_(submesh, local_index).global_index() for local_index in submesh.topology().shared_entities(dim).keys()]))
    
    # == 5. Also initialize submesh facets global indices, now that shared facets have been computed == #
    initialize_global_indices(submesh, submesh.topology().dim() - 1) # note that DOLFIN might change the numbering when compared to the one at 3bis
    
    # == 6. Restore backup_first_marker_id and return == #
    if backup_first_marker_id is not None:
        markers.array()[0] = backup_first_marker_id
    return submesh
Esempio n. 31
0
def gmsh2xml(ifilename, handler):
    """Convert between .gmsh v2.0 format (http://www.geuz.org/gmsh/) and .xml,
    parser implemented as a state machine:

        0 = read 'MeshFormat'
        1 = read  mesh format data
        2 = read 'EndMeshFormat'
        3 = read 'Nodes'
        4 = read  number of vertices
        5 = read  vertices
        6 = read 'EndNodes'
        7 = read 'Elements'
        8 = read  number of cells
        9 = read  cells
        10 = done

    Afterwards, extract physical region numbers if they are defined in
    the mesh file as a mesh function.

    """

    print("Converting from Gmsh format (.msh, .gmsh) to DOLFIN XML format")

    # The dimension of the gmsh element types supported here as well as the dolfin cell types for each dimension
    gmsh_dim = {15: 0, 1: 1, 2: 2, 4: 3}
    cell_type_for_dim = {1: "interval", 2: "triangle", 3: "tetrahedron" }
    # the gmsh element types supported for conversion
    supported_gmsh_element_types = [1, 2, 4, 15]

    # Open files
    ifile = open(ifilename, "r")

    # Scan file for cell type
    cell_type = None
    highest_dim = 0
    line = ifile.readline()
    while line:

        # Remove newline
        line = line.rstrip("\n\r")

        # Read dimension
        if line.find("$Elements") == 0:

            line = ifile.readline()
            num_elements = int(line)
            if num_elements == 0:
                _error("No elements found in gmsh file.")
            line = ifile.readline()

            # Now iterate through elements to find largest dimension.  Gmsh
            # format might include elements of lower dimensions in the element list.
            # We also need to count number of elements of correct dimensions.
            # Also determine which vertices are not used.
            dim_count = {0: 0, 1: 0, 2: 0, 3: 0}
            vertices_used_for_dim = {0: [], 1: [], 2: [], 3: []}
            # Array used to store gmsh tags for 1D (type 1/line), 2D (type 2/triangular) elements and 3D (type 4/tet) elements
            tags_for_dim = {0: [], 1: [], 2: [], 3: []}

            while line.find("$EndElements") == -1:
                element = line.split()
                elem_type = int(element[1])
                num_tags = int(element[2])
                if elem_type in supported_gmsh_element_types:
                    dim = gmsh_dim[elem_type]
                    if highest_dim < dim:
                        highest_dim = dim
                    node_num_list = [int(node) for node in element[3 + num_tags:]]
                    vertices_used_for_dim[dim].extend(node_num_list)
                    if num_tags > 0:
                        tags_for_dim[dim].append(tuple(int(tag) for tag in element[3:3+num_tags]))
                    dim_count[dim] += 1
                else:
                    #TODO: output a warning here. "gmsh element type %d not supported" % elem_type
                    pass
                line = ifile.readline()
        else:
            # Read next line
            line = ifile.readline()

    # Check that we got the cell type and set num_cells_counted
    if highest_dim == 0:
        _error("Unable to find cells of supported type.")

    num_cells_counted = dim_count[highest_dim]
    vertex_set = set(vertices_used_for_dim[highest_dim])
    vertices_used_for_dim[highest_dim] = None

    vertex_dict = {}
    for n,v in enumerate(vertex_set):
        vertex_dict[v] = n

    # Step to beginning of file
    ifile.seek(0)

    # Set mesh type
    handler.set_mesh_type(cell_type_for_dim[highest_dim], highest_dim)

    # Initialise node list (gmsh does not export all vertexes in order)
    nodelist = {}

    # Current state
    state = 0

    # Write data
    num_vertices_read = 0
    num_cells_read = 0

    # Only import the dolfin objects if facet markings exist
    process_facets = False
    if len(tags_for_dim[highest_dim-1]) > 0:
        # first construct the mesh
        try:
            from dolfin import MeshEditor, Mesh
        except ImportError:
            _error("DOLFIN must be installed to handle Gmsh boundary regions")
        mesh = Mesh()
        mesh_editor = MeshEditor ()
        mesh_editor.open( mesh, highest_dim, highest_dim )
        process_facets = True
    else:
        # TODO: Output a warning or an error here
        me = None

    while state != 10:

        # Read next line
        line = ifile.readline()
        if not line: break

        # Skip comments
        if line[0] == '#':
            continue

        # Remove newline
        line = line.rstrip("\n\r")

        if state == 0:
            if line == "$MeshFormat":
                state = 1
        elif state == 1:
            (version, file_type, data_size) = line.split()
            state = 2
        elif state == 2:
            if line == "$EndMeshFormat":
                state = 3
        elif state == 3:
            if line == "$Nodes":
                state = 4
        elif state == 4:
            num_vertices = len(vertex_dict)
            handler.start_vertices(num_vertices)
            if process_facets:
                mesh_editor.init_vertices_global(num_vertices, num_vertices)
            state = 5
        elif state == 5:
            (node_no, x, y, z) = line.split()
            node_no = int(node_no)
            x,y,z = [float(xx) for xx in (x,y,z)]
            if node_no in vertex_dict:
                node_no = vertex_dict[node_no]
            else:
                continue
            nodelist[int(node_no)] = num_vertices_read
            handler.add_vertex(num_vertices_read, [x, y, z])
            if process_facets:
                if highest_dim == 1:
                    coords = numpy.array([x])
                elif highest_dim == 2:
                    coords = numpy.array([x, y])
                elif highest_dim == 3:
                    coords = numpy.array([x, y, z])
                mesh_editor.add_vertex(num_vertices_read, coords)

            num_vertices_read +=1

            if num_vertices == num_vertices_read:
                handler.end_vertices()
                state = 6
        elif state == 6:
            if line == "$EndNodes":
                state = 7
        elif state == 7:
            if line == "$Elements":
                state = 8
        elif state == 8:
            handler.start_cells(num_cells_counted)
            if process_facets:
                mesh_editor.init_cells_global(num_cells_counted, num_cells_counted)

            state = 9
        elif state == 9:
            element = line.split()
            elem_type = int(element[1])
            num_tags  = int(element[2])
            if elem_type in supported_gmsh_element_types:
                dim = gmsh_dim[elem_type]
            else:
                dim = 0
            if dim == highest_dim:
                node_num_list = [vertex_dict[int(node)] for node in element[3 + num_tags:]]
                for node in node_num_list:
                    if not node in nodelist:
                        _error("Vertex %d of %s %d not previously defined." %
                              (node, cell_type_for_dim[dim], num_cells_read))
                cell_nodes = [nodelist[n] for n in node_num_list]
                handler.add_cell(num_cells_read, cell_nodes)

                if process_facets:
                    cell_nodes = numpy.array([nodelist[n] for n in node_num_list], dtype=numpy.uintp)
                    mesh_editor.add_cell(num_cells_read, cell_nodes)

                num_cells_read +=1

            if num_cells_counted == num_cells_read:
                handler.end_cells()
                if process_facets:
                    mesh_editor.close()
                state = 10
        elif state == 10:
            break

    # Write mesh function based on the Physical Regions defined by
    # gmsh, but only if they are not all zero. All zero physical
    # regions indicate that no physical regions were defined.
    if highest_dim not in [1,2,3]:
        _error("Gmsh tags not supported for dimension %i. Probably a bug" % dim)

    tags = tags_for_dim[highest_dim]
    physical_regions = tuple(tag[0] for tag in tags)
    if not all(tag == 0 for tag in physical_regions):
        handler.start_meshfunction("physical_region", dim, num_cells_counted)
        for i, physical_region in enumerate(physical_regions):
            handler.add_entity_meshfunction(i, physical_region)
        handler.end_meshfunction()

    # Now process the facet markers
    tags = tags_for_dim[highest_dim-1]
    if (len(tags) > 0) and (mesh is not None):
        physical_regions = tuple(tag[0] for tag in tags)
        if not all(tag == 0 for tag in physical_regions):
            mesh.init(highest_dim-1,0)

            # Get the facet-node connectivity information (reshape as a row of node indices per facet)
            if highest_dim==1:
              # for 1d meshes the mesh topology returns the vertex to vertex map, which isn't what we want
              # as facets are vertices
              facets_as_nodes = numpy.array([[i] for i in range(mesh.num_facets())])
            else:
              facets_as_nodes = mesh.topology()(highest_dim-1,0)().reshape ( mesh.num_facets(), highest_dim )

            # Build the reverse map
            nodes_as_facets = {}
            for facet in range(mesh.num_facets()):
              nodes_as_facets[tuple(facets_as_nodes[facet,:])] = facet

            data = [int(0*k) for k in range(mesh.num_facets()) ]
            for i, physical_region in enumerate(physical_regions):
                nodes = [n-1 for n in vertices_used_for_dim[highest_dim-1][highest_dim*i:(highest_dim*i+highest_dim)]]
                nodes.sort()

                if physical_region != 0:
                    try:
                        index = nodes_as_facets[tuple(nodes)]
                        data[index] = physical_region
                    except IndexError:
                        raise Exception ( "The facet (%d) was not found to mark: %s" % (i, nodes) )

            # Create and initialise the mesh function
            handler.start_meshfunction("facet_region", highest_dim-1, mesh.num_facets() )
            for index, physical_region in enumerate ( data ):
                handler.add_entity_meshfunction(index, physical_region)
            handler.end_meshfunction()

    # Check that we got all data
    if state == 10:
        print("Conversion done")
    else:
       _error("Missing data, unable to convert \n\ Did you use version 2.0 of the gmsh file format?")

    # Close files
    ifile.close()
Esempio n. 32
0
    def iterkeys(self):  # real signature unknown; restored from __doc__
        """ D.iterkeys() -> an iterator over the keys of D """
        for k in self._entity_values.iterkeys():
            # @todo - how do we get the entity back rather than our handle???
            yield MeshEntity(self._mesh, *k)

    def itervalues(self):  # real signature unknown; restored from __doc__
        """ D.itervalues() -> an iterator over the values of D """
        for v in self._entity_values.itervalues():
            # @todo - how do we get the entity back rather than our handle???
            yield v


mesh = Mesh()
editor = MeshEditor()
editor.open(mesh, 2, 2)  # topo_dim = 2, geom dim = 2

editor.init_vertices(6)
editor.init_cells(2)

vertex_0 = Vertex(mesh, 0)
vertex_1 = Vertex(mesh, 1)
vertex_2 = Vertex(mesh, 2)
vertex_3 = Vertex(mesh, 3)

vertex_4 = Vertex(mesh, 4)
vertex_5 = Vertex(mesh, 5)

editor.add_cell(0, 1, 2, 3)
editor.add_cell(1, 0, 2, 3)
Esempio n. 33
0
def fit2d(x0, y0, points, cells, lmbda, degree=1, solver="sparse"):
    # Convert points, cells to dolfin mesh
    editor = MeshEditor()
    mesh = Mesh()
    # topological and geometrical dimension 2
    editor.open(mesh, "triangle", 2, 2, 1)
    editor.init_vertices(len(points))
    editor.init_cells(len(cells))
    for k, point in enumerate(points):
        editor.add_vertex(k, point[:2])
    for k, cell in enumerate(cells.astype(numpy.uintp)):
        editor.add_cell(k, cell)
    editor.close()

    V = FunctionSpace(mesh, "CG", degree)
    return fit(x0, y0, V, lmbda, solver=solver)
Esempio n. 34
0
def gmsh_to_dolfin_mesh(ifilename, handler):
    """Convert between .gmsh v2.0 format (http://www.geuz.org/gmsh/) and .xml,
    parser implemented as a state machine:

        0 = read 'MeshFormat'
        1 = read  mesh format data
        2 = read 'EndMeshFormat'
        3 = read 'Nodes'
        4 = read  number of vertices
        5 = read  vertices
        6 = read 'EndNodes'
        7 = read 'Elements'
        8 = read  number of cells
        9 = read  cells
        10 = done

    Afterwards, extract physical region numbers if they are defined in
    the mesh file as a mesh function.

    """

    print "Converting from Gmsh format (.msh, .gmsh) to DOLFIN XML format"

    # The dimension of the gmsh element types supported here as well as the dolfin cell types for each dimension
    gmsh_dim = {1: 1, 2: 2, 4: 3}
    gmsh_cell_type = {1: "interval", 2: "triangle", 3: "tetrahedron"}
    # the gmsh element types supported for conversion
    supported_gmsh_element_types = [1, 2, 4]

    # Open files
    ifile = open(ifilename, "r")

    # Scan file for cell type
    cell_type = None
    highest_dim = 0
    line = ifile.readline()
    while line:

        # Remove newline
        if line[-1] == "\n":
            line = line[:-1]

        # Read dimension
        if line.find("$Elements") == 0:

            line = ifile.readline()
            num_elements = int(line)
            num_cells_counted = 0
            if num_elements == 0:
                _error("No elements found in gmsh file.")
            line = ifile.readline()

            # Now iterate through elements to find largest dimension.  Gmsh
            # format might include elements of lower dimensions in the element list.
            # We also need to count number of elements of correct dimensions.
            # Also determine which vertices are not used.
            dim_count = {1: 0, 2: 0, 3: 0}
            vertices_used = {1: [], 2: [], 3: []}
            # Array used to store gmsh tags for 1D (type 1/line), 2D (type 2/triangular) elements and 3D (type 4/tet) elements
            tags_for_dim = {1: [], 2: [], 3: []}

            while line.find("$EndElements") == -1:
                element = line.split()
                elem_type = int(element[1])
                num_tags = int(element[2])

                if elem_type in supported_gmsh_element_types:
                    dim = gmsh_dim[elem_type]
                    if highest_dim < dim:
                        highest_dim = dim

                    node_num_list = [
                        int(node) for node in element[3 + num_tags:]
                    ]
                    vertices_used[dim].extend(node_num_list)
                    if num_tags > 0:
                        tags_for_dim[dim].append(
                            tuple(int(tag) for tag in element[3:3 + num_tags]))
                    dim_count[dim] += 1
                else:
                    #TODO: output a warning here. "gmsh element type %d not supported" % elem_type
                    pass

                line = ifile.readline()
        else:
            # Read next line
            line = ifile.readline()

    # Check that we got the cell type and set num_cells_counted
    if highest_dim == 0:
        _error("Unable to find cells of supported type.")

    num_cells_counted = dim_count[highest_dim]
    vertex_set = set(vertices_used[highest_dim])
    vertices_used[highest_dim] = None

    vertex_dict = {}
    for n, v in enumerate(vertex_set):
        vertex_dict[v] = n

    # Step to beginning of file
    ifile.seek(0)

    # Set mesh type
    handler.set_mesh_type(gmsh_cell_type[highest_dim], highest_dim)

    # Initialise node list (gmsh does not export all vertexes in order)
    nodelist = {}

    # Current state
    state = 0

    # Write data
    num_vertices_read = 0
    num_cells_read = 0

    # Now handle the facet markings
    if len(tags_for_dim[highest_dim - 1]) > 0:
        # first construct the mesh
        from dolfin import MeshEditor, Mesh
        mesh = Mesh()
        me = MeshEditor()
        me.open(mesh, highest_dim, highest_dim)
    else:
        me = None

    while state != 10:

        # Read next line
        line = ifile.readline()
        if not line: break

        # Skip comments
        if line[0] == '#':
            continue

        # Remove newline
        if line[-1] == "\n":
            line = line[:-1]

        if state == 0:
            if line == "$MeshFormat":
                state = 1
        elif state == 1:
            (version, file_type, data_size) = line.split()
            state = 2
        elif state == 2:
            if line == "$EndMeshFormat":
                state = 3
        elif state == 3:
            if line == "$Nodes":
                state = 4
        elif state == 4:
            num_vertices = len(vertex_dict)
            handler.start_vertices(num_vertices)
            if me is not None:
                me.init_vertices(num_vertices)
            state = 5
        elif state == 5:
            (node_no, x, y, z) = line.split()
            node_no = int(node_no)
            x, y, z = [float(xx) for xx in (x, y, z)]
            if vertex_dict.has_key(node_no):
                node_no = vertex_dict[node_no]
            else:
                continue
            nodelist[int(node_no)] = num_vertices_read
            handler.add_vertex(num_vertices_read, [x, y, z])
            if me is not None:
                if highest_dim == 1:
                    me.add_vertex(num_vertices_read, x)
                elif highest_dim == 2:
                    me.add_vertex(num_vertices_read, x, y)
                elif highest_dim == 3:
                    me.add_vertex(num_vertices_read, x, y, z)

            num_vertices_read += 1

            if num_vertices == num_vertices_read:
                handler.end_vertices()
                state = 6
        elif state == 6:
            if line == "$EndNodes":
                state = 7
        elif state == 7:
            if line == "$Elements":
                state = 8
        elif state == 8:
            handler.start_cells(num_cells_counted)
            if me is not None:
                me.init_cells(num_cells_counted)

            state = 9
        elif state == 9:
            element = line.split()
            elem_type = int(element[1])
            num_tags = int(element[2])
            if elem_type in supported_gmsh_element_types:
                dim = gmsh_dim[elem_type]
            else:
                dim = 0
            if dim == highest_dim:
                node_num_list = [
                    vertex_dict[int(node)] for node in element[3 + num_tags:]
                ]
                for node in node_num_list:
                    if not node in nodelist:
                        _error("Vertex %d of %s %d not previously defined." %
                               (node, gmsh_cell_type[dim], num_cells_read))
                cell_nodes = [nodelist[n] for n in node_num_list]
                handler.add_cell(num_cells_read, cell_nodes)

                if me is not None:
                    me.add_cell(num_cells_read, *cell_nodes)

                num_cells_read += 1

            if num_cells_counted == num_cells_read:
                handler.end_cells()
                if me is not None:
                    me.close()
                state = 10
        elif state == 10:
            break

    # Write mesh function based on the Physical Regions defined by
    # gmsh, but only if they are not all zero. All zero physical
    # regions indicate that no physical regions were defined.
    if highest_dim not in [1, 2, 3]:
        _error("Gmsh tags not supported for dimension %i. Probably a bug" %
               dim)

    tags = tags_for_dim[highest_dim]
    physical_regions = tuple(tag[0] for tag in tags)
    if not all(tag == 0 for tag in physical_regions):
        handler.start_meshfunction("physical_region", dim, num_cells_counted)
        for i, physical_region in enumerate(physical_regions):
            handler.add_entity_meshfunction(i, physical_region)
        handler.end_meshfunction()

    # Now process the facet markers
    tags = tags_for_dim[highest_dim - 1]
    if len(tags) > 0:

        print tags
        print vertices_used[highest_dim - 1]

        physical_regions = tuple(tag[0] for tag in tags)
        if not all(tag == 0 for tag in physical_regions):
            mesh.init(highest_dim - 1, 0)

            # Get the facet-node connectivity information (reshape as a row of node indices per facet)
            facets_as_nodes = mesh.topology()(highest_dim - 1, 0)().reshape(
                mesh.num_facets(), highest_dim)

            #            from dolfin import MeshFunction
            #            # Create and initialise the mesh function
            #            facet_mark_function = MeshFunction ( 'uint', mesh, highest_dim-1 )
            #            facet_mark_function.set_all( 0 )
            handler.start_meshfunction("facet_region", highest_dim - 1,
                                       mesh.num_facets())

            facets_to_check = range(mesh.num_facets())

            data = [int(0 * k) for k in range(len(facets_to_check))]

            for i, physical_region in enumerate(physical_regions):
                nodes = [
                    n - 1
                    for n in vertices_used[highest_dim -
                                           1][2 * i:(2 * i + highest_dim)]
                ]
                nodes.sort()

                if physical_region != 0:
                    found = False
                    for j in range(len(facets_to_check)):
                        index = facets_to_check[j]
                        if all(facets_as_nodes[index, k] == nodes[k]
                               for k in range(len(nodes))):
                            found = True
                            facets_to_check.pop(j)
                            # set the value of the mesh function
                            #                            facet_mark_function[index] = physical_region
                            data[index] = physical_region
                            break

                    if not found:
                        raise Exception(
                            "The facet (%d) was not found to mark: %s" %
                            (i, nodes))


#            fname = os.path.splitext('tmp.xml')[0]
#            mesh_function_file = File("%s_%s.xml" % (fname, "facet_region"))
#            mesh_function_file << facet_mark_function

            for index, physical_region in enumerate(data):
                handler.add_entity_meshfunction(index, physical_region)
            handler.end_meshfunction()

            mf = MeshFunction('uint', mesh, 'tmp_facet_region.xml')
            plot(mf, interactive=True)

    # Check that we got all data
    if state == 10:
        print "Conversion done"
    else:
        _error(
            "Missing data, unable to convert \n\ Did you use version 2.0 of the gmsh file format?"
        )

    # Close files
    ifile.close()
Esempio n. 35
0
def _fit_dolfin(x0,
                y0,
                points,
                cells,
                lmbda: float,
                degree: int = 1,
                solver: str = "lsqr"):
    from dolfin import (
        BoundingBoxTree,
        Cell,
        EigenMatrix,
        FacetNormal,
        Function,
        FunctionSpace,
        Mesh,
        MeshEditor,
        Point,
        TestFunction,
        TrialFunction,
        assemble,
        dot,
        ds,
        dx,
        grad,
    )

    def _assemble_eigen(form):
        L = EigenMatrix()
        assemble(form, tensor=L)
        return L

    def _build_eval_matrix(V, points):
        """Build the sparse m-by-n matrix that maps a coefficient set for a function in
        V to the values of that function at m given points."""
        # See <https://www.allanswered.com/post/lkbkm/#zxqgk>
        mesh = V.mesh()

        bbt = BoundingBoxTree()
        bbt.build(mesh)
        dofmap = V.dofmap()
        el = V.element()
        sdim = el.space_dimension()

        rows = []
        cols = []
        data = []
        for i, x in enumerate(points):
            cell_id = bbt.compute_first_entity_collision(Point(*x))
            cell = Cell(mesh, cell_id)
            coordinate_dofs = cell.get_vertex_coordinates()

            rows.append(np.full(sdim, i))
            cols.append(dofmap.cell_dofs(cell_id))

            v = el.evaluate_basis_all(x, coordinate_dofs, cell_id)
            data.append(v)

        rows = np.concatenate(rows)
        cols = np.concatenate(cols)
        data = np.concatenate(data)

        m = len(points)
        n = V.dim()
        matrix = sparse.csr_matrix((data, (rows, cols)), shape=(m, n))
        return matrix

    editor = MeshEditor()
    mesh = Mesh()

    # Convert points, cells to dolfin mesh
    if cells.shape[1] == 2:
        editor.open(mesh, "interval", 1, 1, 1)
    else:
        # can only handle triangles for now
        assert cells.shape[1] == 3
        # topological and geometrical dimension 2
        editor.open(mesh, "triangle", 2, 2, 1)

    editor.init_vertices(len(points))
    editor.init_cells(len(cells))
    for k, point in enumerate(points):
        editor.add_vertex(k, point)
    for k, cell in enumerate(cells.astype(np.uintp)):
        editor.add_cell(k, cell)
    editor.close()

    V = FunctionSpace(mesh, "CG", degree)

    u = TrialFunction(V)
    v = TestFunction(V)

    mesh = V.mesh()
    n = FacetNormal(mesh)

    # omega = assemble(1 * dx(mesh))

    A = _assemble_eigen(dot(grad(u), grad(v)) * dx -
                        dot(n, grad(u)) * v * ds).sparray()
    A *= lmbda

    E = _build_eval_matrix(V, x0)

    # mass matrix
    M = _assemble_eigen(u * v * dx).sparray()

    x = _solve(A, M, E, y0, solver)
    u = Function(V)
    u.vector().set_local(x)
    return u
Esempio n. 36
0
def get_reference_element(dim=2):
    nodes, cell = get_reference_coordinates(dim)
    cells = np.atleast_2d(np.arange(
        dim + 1, dtype=np.uintp))  # connection of nodes (defines element)

    # this piece of code creates a mesh containing one element only
    mesh = Mesh()
    editor = MeshEditor()
    editor.open(mesh, cell, dim, dim)
    editor.init_vertices(dim + 1)
    editor.init_cells(1)
    for i, n in enumerate(nodes):
        p = Point(n)
        editor.add_vertex(i, p)
    for i, n in enumerate(cells):
        editor.add_cell(i, n)
    editor.close()
    return mesh
Esempio n. 37
0
    def setUp(self, *args, **kwargs):
        self.mesh = Mesh()
        editor = MeshEditor()
        editor.open(self.mesh, 2, 2) # topo_dim = 2, geom dim = 2

        editor.init_vertices(6)
        editor.init_cells(2)

        vertex_0 = Vertex(self.mesh, 0)
        vertex_1 = Vertex(self.mesh, 1)
        vertex_2 = Vertex(self.mesh, 2)
        vertex_3 = Vertex(self.mesh, 3)

        vertex_4 = Vertex(self.mesh, 4)
        vertex_5 = Vertex(self.mesh, 5)

        editor.add_cell(0,1,2,3)
        editor.add_cell(1,0,2,3)

        editor.close()
def IcosahedralSphereMesh(level, layers):
 
  from dolfin import Mesh, MeshEditor, File

  # generate vertices and cells
  (vertices, cells) = generate_icoshedral_sphere_mesh( level, layers)
  
  # init mesh and mesh editor helper
  mesh = Mesh()
  editor = MeshEditor()
  editor.open(mesh, 3, 3);
  
  # add vertices to mesh
  nVert = len(vertices)
  editor.init_vertices(nVert)

  verts = vertices.items()
  #verts = sorted(verts, key=lambda key: key[1])

  for v in verts:
    editor.add_vertex(v[1], v[0][0], v[0][1], v[0][2])

  ncells = len(cells)
  editor.init_cells(ncells)

  id = 0
  for c in cells:
    editor.add_cell(id, c[0], c[1], c[2], c[3])
    id += 1

  # done: create and return mesh object
  editor.close()
  return mesh
Esempio n. 39
0
def create_dolfin_mesh(points, cells):
    # https://bitbucket.org/fenics-project/dolfin/issues/845/initialize-mesh-from-vertices
    editor = MeshEditor()
    mesh = Mesh()
    editor.open(mesh, "triangle", 2, 2)
    editor.init_vertices(points.shape[0])
    editor.init_cells(cells.shape[0])
    for k, point in enumerate(points):
        editor.add_vertex(k, point)
    for k, cell in enumerate(cells):
        editor.add_cell(k, cell)
    editor.close()
    return mesh
Esempio n. 40
0
def fluent2xml(ifilename, ofilename):
    """Converting from ANSYS Fluent format (.msh) to FEniCS xml format
    The fluent mesh (the .msh file) is basically stored as a list of vertices, and then a 
    list of faces for each zone of the mesh, the interior and the boundaries."""

    # Use regular expressions to identify sections and tokens found in a fluent file
    re_dimline = re.compile(r"\(2\s(\d)\)")
    re_comment = re.compile(r"\(0\s.*")
    re_zone_init = re.compile(r"\(10\s\(0\s(\w+)\s(\w+)\s(\d+)\s(\d+)\)\)")
    re_zone = re.compile(r"\(10\s\((\w+)\s(\w+)\s(\w+)\s(\d+)\s(\d)\)(\(|)")
    re_face_init = re.compile(r"\(13(\s*)\(0\s+(\w+)\s+(\w+)\s+(0|0 0)\)\)")
    re_face = re.compile(
        r"\(13(\s*)\((\w+)\s+(\w+)\s+(\w+)\s+(\w+)\s+(\w+)\)(\s*)(\(|)")
    re_periodic = re.compile(r"\(18.*\((\w+)\s+(\w+)\s+(\w+)\s+(\w+)\).*\(")
    re_pfaces = re.compile(r"((^\s)|)(\w+)(\s*)(\w+)")
    re_cells_init = re.compile(
        r"\(12(\s*)\(0(\s+)(\w+)(\s+)(\w+)(\s+)(0|0 0)\)\)")
    re_cells = re.compile(r"\(12.*\((\w+)\s+(\w+)\s+(\w+)\s+(\d+)\s+(\d+)\)\)")
    re_cells2 = re.compile(
        r"\(12(\s*)\((\w+)\s+(\w+)\s+(\w+)\s+(\w+)\s+(\w+)\)(\s*)(\(|)")
    re_zones = re.compile(
        r"\((45|39)\s+\((\d+)\s+(\S+)\s+(\S+).*\)\((.*|[0-9]+[\.]*[0-9]*)\)\)")
    re_parthesis = re.compile(r"(^\s*\)(\s*)|^\s*\)\)(\s*)|^\s*\(\s*)")

    # Declare som maps that will be built when reading in the lists of vertices and faces:
    cell_map = {}  # Maps cell id with vertices
    boundary_cells = {
    }  # List of cells attached to a boundary facet. Key is zone id
    zones = {}  # zone information (not really used yet)

    def read_periodic(ifile, periodic_dx):
        """Scan past periodic section. Periodicity is computed by FEniCS."""
        while 1:
            line = ifile.readline()
            a = re.search(re_pfaces, line)
            if a:
                continue
            break

    def read_zone_vertices(dim, Nmin, Nmax, ifile, editor):
        """Scan ifile for vertices and add to mesh_editor."""
        # First line could be either just "(" or a regular vertex.
        # Check for initial paranthesis. If paranthesis then read a new line, else reset
        pos = ifile.tell()
        line = ifile.readline()
        if not re.search(re_parthesis, line):
            ifile.seek(pos)  # reset
        # read Nmax-Nmin vertices
        for i in range(Nmin, Nmax + 1):
            line = ifile.readline()
            vertex = [eval(x) for x in line.split()]
            if dim == 2:
                editor.add_vertex(i - Nmin, vertex[0], vertex[1])
            else:
                editor.add_vertex(i - Nmin, vertex[0], vertex[1], vertex[2])

    def read_faces(zone_id, Nmin, Nmax, bc_type, face, ifile):
        """Read all faces and create cell_map + boundary maps."""
        pos = ifile.tell()  # current position
        line = ifile.readline()
        if not re.search(
                re_parthesis, line
        ):  # check for initial paranthesis. If paranthesis then read a new line, else reset
            ifile.seek(pos)

        # read Nmax-Nmin faces
        for i in range(Nmin, Nmax + 1):
            line = ifile.readline()
            ln = line.split()
            if face == 0:
                nd = int(ln[0], 16)  # Number of vertices
                nds = [int(x, 16) for x in ln[1:(nd + 1)]]
                cells = [int(x, 16) for x in ln[(nd + 1):]]
            else:
                nd = face
                nds = [int(x, 16) for x in ln[:nd]]
                cells = [int(x, 16) for x in ln[nd:]]

            if min(cells) == 0:  # A boundary zone
                if zone_id in boundary_cells:
                    boundary_cells[zone_id][max(cells)] = array(nds)
                else:
                    boundary_cells[zone_id] = {max(cells): array(nds)}

            for c in cells:
                if c > 0:
                    if not c in cell_map:
                        cell_map[c] = copy(nds)
                    else:
                        cell_map[c] = list(Set(cell_map[c] + nds))

    def scan_fluent_mesh(ifile, mesh, editor):
        """Scan fluent mesh and generate maps."""
        dim = 0
        one = 0
        while 1:
            line = ifile.readline()
            if len(line) == 0:
                print 'Finished reading file\n'
                break

            if dim == 0:  # Dimension usually comes first
                a = re.search(re_dimline, line)
                if a:
                    print 'Reading dimensions\n'
                    dim = int(a.group(1))
                    editor.open(mesh, dim, dim)
                    continue

            if one == 0:  # The total number of vertices
                a = re.search(re_zone_init, line)
                if a:
                    print 'Reading zone info\n'
                    one, num_vertices, dummy1, dummy2 = int(a.group(1)), \
                        int(a.group(2), 16), int(a.group(3), 16), int(a.group(4))
                    editor.init_vertices(num_vertices)
                    continue

            a = re.search(re_zone, line)  # Vertices
            if a:
                zone_id, first_id, last_id = int(a.group(1), 16), \
                    int(a.group(2), 16), int(a.group(3), 16)
                print 'Reading ', last_id - first_id + 1, ' vertices in zone ', zone_id + 1, '\n'
                read_zone_vertices(dim, first_id, last_id, ifile, editor)
                continue

            a = re.search(re_zones, line)  # Zone info
            if a:
                print 'Reading zone info ', line
                dummy, zone_id, zone_type, zone_name, radius =  \
                        int(a.group(1)), int(a.group(2)),  a.group(3), \
                        a.group(4), a.group(5)
                zones[zone_id] = [zone_type, zone_name, radius]
                continue

            a = re.search(re_cells_init,
                          line)  # Get total number of cells/elements
            if a:
                print 'Reading cell info ', line
                first_id, tot_num_cells = int(a.group(3),
                                              16), int(a.group(5), 16)
                editor.init_cells(tot_num_cells)
                continue

            a = re.search(re_cells, line)  # Get the cell info.
            if a:
                zone_id, first_id, last_id, bc_type, element_type = \
                    int(a.group(1)), int(a.group(2), 16), int(a.group(3), 16), \
                    int(a.group(4), 16), int(a.group(5), 16)
                print 'Found ', last_id - first_id + 1, ' cells in zone ', zone_id, '\n'
                if last_id == 0:
                    raise TypeError("Zero elements!")
                continue

            a = re.search(re_cells2, line)  # Get the cell info.
            if a:
                raise TypeError(
                    "Wrong cell type. Can only handle one single cell type")

            a = re.search(re_face_init, line)
            if a:
                print 'Reading total number of faces\n', line
                continue

            a = re.search(re_face, line)
            if a:
                print 'Reading faces ', line
                zone_id, first_id, last_id, bc_type, face_type = \
                    int(a.group(2), 16), int(a.group(3), 16), int(a.group(4), 16), \
                    int(a.group(5), 16), int(a.group(6), 16)
                read_faces(zone_id, first_id, last_id, bc_type, face_type,
                           ifile)
                continue

            a = re.search(re_periodic, line)
            if a:
                print 'Scanning past periodic connectivity\n', line
                read_periodic(ifile, periodic_dx)
                continue

            if any([re.search(st, line) for st in (re_parthesis, re_comment)]) or \
                                                                not line.strip():
                continue

            # Should not make it here
            raise IOError('Something went wrong reading fluent mesh.')

    def write_fenics_file(ofile, mesh, editor):

        dim = mesh.geometry().dim()
        for i in range(1, len(cell_map) + 1):
            if dim == 2:
                editor.add_cell(i - 1, cell_map[i][0] - 1, cell_map[i][1] - 1,
                                cell_map[i][2] - 1)
            else:
                editor.add_cell(i - 1, cell_map[i][0] - 1, cell_map[i][1] - 1,
                                cell_map[i][2] - 1, cell_map[i][3] - 1)

        mesh.order()
        # Set MeshValueCollections from info in  boundary_cell
        #mvc = mesh.domains().markers(dim-1)
        md = mesh.domains()
        for zone, cells in boundary_cells.iteritems():
            for cell, nds in cells.iteritems():
                dolfin_cell = Cell(mesh, cell - 1)
                vertices_of_cell = dolfin_cell.entities(0)
                vertices_of_face = nds - 1
                for jj, ff in enumerate(facets(dolfin_cell)):
                    facet_vertices = ff.entities(0)
                    if all(map(lambda x: x in vertices_of_face,
                               facet_vertices)):
                        local_index = jj
                        break
                #mvc.set_value(cell-1, local_index, zone)
                md.set_marker((ff.index(), zone), dim - 1)

        ofile << mesh
        from dolfin import plot
        plot(mesh, interactive=True)
        print 'Finished writing FEniCS mesh\n'

    ifile = open(ifilename, "r")
    ofile = File(ofilename)
    mesh = Mesh()
    editor = MeshEditor()
    scan_fluent_mesh(ifile, mesh, editor)
    write_fenics_file(ofile, mesh, editor)
    ifile.close()
Esempio n. 41
0
def convert(ifilename, handler):
    """ Convert from Abaqus.

    The Abaqus format first defines a node block, then there should be a number
    of elements containing these nodes.
    """

    # Dictionary of nodes (maps node id to coordinates)
    nodes = {}

    # Dictionary of elements (maps cell id to list of cell nodes)
    elems = {}

    # Lists of nodes for given name (key)
    node_sets = {}

    # Lists of cells for given name (key)
    cell_sets = {}

    # Lists of surfaces for given name (key) in the format:
    # {'SS1': [set(['SS1_S1', 'S1']), set(['SS1_S4', 'S4'])]},
    # where SS1 is the name of the surface, SS1_S1 is the name of the
    # cell list whose first face is to be selected, ...
    surface_sets = {}

    # Open file Abaqus file
    csv_file = csv.reader(open(ifilename, 'rb'), delimiter=',', skipinitialspace=True)

    node_set_name = None
    generate = None

    # Set intial state state
    state = State.Init

    # Read data from input file
    for l in csv_file:

        # Sanity check
        if (len(l) == 0): print "Ooops, zero length."

        if l[0].startswith('**'): # Pass over comments
            continue
        elif l[0].startswith('*'): # Have a keyword
            state = State.Unknown

            if l[0].lower() == "*heading":
                state = State.ReadHeading

            elif l[0].lower() == "*part":
                part_name = _read_part_name(l)

            elif l[0].lower() == "*end part":
                state = State.Invalid

            elif l[0].lower() == "*node":
                node_set_name = _create_node_list_entry(node_sets, l)
                state = State.ReadNodes

            elif l[0].lower() == "*element":
                cell_type, cell_set_name = _read_element_keywords(cell_sets, l)
                state = State.ReadCells

            elif l[0].lower() == "*nset":
                node_set_name, generate = _read_nset_keywords(node_sets, l)
                state = State.ReadNodeSet

            elif l[0].lower() == "*elset":
                cell_set_name, generate = _read_elset_keywords(cell_sets, l)
                if generate:
                    print "WARNING: generation of *elsets not tested."
                state = State.ReadCellSet

            elif l[0].lower() == "*surface":
                surface_set_name, generate = _read_surface_keywords(surface_sets, l)
                state = State.ReadSurfaceSet

            else:
                print "WARNING: unrecognised Abaqus input keyword:", l[0]
                state = State.Unknown

        else:

            if state == State.ReadHeading:
                model_name = _read_heading(l)

            elif state == State.ReadNodes:
                node_id = int(l[0]) - 1
                coords = [float(c) for c in l[1:]]
                nodes[node_id] = coords
                if node_set_name is not None:
                    node_sets[node_set_name].add(node_id)

            elif state == State.ReadCells:
                cell_id = int(l[0]) - 1
                cell_connectivity = [int(v) - 1 for v in l[1:]]
                elems[cell_id] = cell_connectivity
                if cell_set_name is not None:
                    cell_sets[cell_set_name].add(cell_id)

            elif state == State.ReadNodeSet:

                try:
                    if generate:
                        n0, n1, increment = l
                        node_range = range(int(n0) - 1, int(n1) - 1, int(increment))
                        node_range.append(int(n1) - 1)
                        node_sets[node_set_name].update(node_range)
                    else:
                        # Strip empty term at end of list, if present
                        if l[-1] == '': l.pop(-1)
                        node_range = [int(n) - 1 for n in l]
                        node_sets[node_set_name].update(node_range)
                except:
                    print "WARNING: Non-integer node sets not yet supported."

            elif state == State.ReadCellSet:
                try:
                    if generate:
                        n0, n1, increment = l
                        cell_range = range(int(n0) - 1, int(n1) - 1, int(increment))
                        cell_range.append(int(n1) - 1)
                        cell_sets[cell_set_name].update(cell_range)
                    else:
                        # Strip empty term at end of list, if present
                        if l[-1] == '': l.pop(-1)
                        cell_range = [int(n) - 1 for n in l]
                        cell_sets[cell_set_name].update(cell_range)
                except:
                    print "WARNING: Non-integer element sets not yet supported."

            elif state == State.ReadSurfaceSet:
                # Strip empty term at end of list, if present
                if l[-1] == '': l.pop(-1)
                surface_sets[surface_set_name].update([tuple(l)])

            elif state == State.Invalid: # part
                raise StandardError("Inavlid Abaqus parser state..")


    # Close CSV object
    del csv_file

    # Write data to XML file
    # Note that vertices/cells must be consecutively numbered, which
    # isn't necessarily the case in Abaqus. Therefore we enumerate and
    # translate original IDs to sequence indexes if gaps are present.

    # FIXME
    handler.set_mesh_type("tetrahedron", 3)

    process_facets = len(surface_sets) > 0
    if process_facets:
        try:
            from dolfin import MeshEditor, Mesh
        except ImportError:
            _error("DOLFIN must be installed to handle Abaqus boundary regions")

        mesh = Mesh()
        mesh_editor = MeshEditor()
        mesh_editor.open(mesh, 3, 3)

    node_ids_order = {}
    # Check for gaps in vertex numbering
    node_ids = nodes.keys()
    if len(node_ids) > 0:
        vertex_gap = (min(node_ids) != 0 or max(node_ids) != len(node_ids) - 1)
        for x, y in enumerate(node_ids):
            node_ids_order[y]= x  # Maps Abaqus IDs to Dolfin IDs
    else:
        vertex_gap = True

    # Check for gaps in cell numbering
    elemids = elems.keys()
    if len(elemids) > 0:
        cell_gap = (min(elemids) != 0 or max(elemids) != len(elemids) - 1)
    else:
        cell_gap = True

    # Write vertices to XML file
    handler.start_vertices(len(nodes))
    if process_facets:
        mesh_editor.init_vertices (len(nodes))

    if not vertex_gap:

        for v_id, v_coords in nodes.items():
            handler.add_vertex(v_id, v_coords)
            if process_facets:
                mesh_editor.add_vertex(v_id, np.array(v_coords, dtype=np.float_))

    else:

        for idx, (v_id, v_coords) in enumerate(nodes.items()):
            handler.add_vertex(idx, v_coords)
            if process_facets:
                mesh_editor.add_vertex(idx, np.array(v_coords, dtype=np.float_))

    handler.end_vertices()

    # Write cells to XML file
    handler.start_cells(len(elems))
    if process_facets:
        mesh_editor.init_cells(len(elems))

    if not vertex_gap and not cell_gap:

        for c_index, c_data in elems.items():
            for v_id in c_data:
                if not (0 <= v_id < len(nodes)):
                    handler.error("Element %s references non-existent node %s" % (c_index, v_id))
            handler.add_cell(c_index, c_data)

            if process_facets:
                c_data_tmp = np.array(c_data)
                c_data_tmp.sort()
                mesh_editor.add_cell(c_index, np.array(c_data_tmp, dtype=np.uintp))


    elif not vertex_gap and cell_gap:

        for idx, (c_index, c_data) in enumerate(elems.items()):
            for v_id in c_data:
                if not (0 <= v_id < len(nodes)):
                    handler.error("Element %s references non-existent node %s" % (c_index, v_id))
            handler.add_cell(idx, c_data)

            if process_facets:
                c_data_tmp = np.array(c_data)
                c_data_tmp.sort()
                mesh_editor.add_cell(idx, np.array(c_data_tmp, dtype=np.uintp))

    else:

        for idx, (c_id, c_data) in enumerate(elems.items()):
            c_nodes = []
            for v_id in c_data:
                try: c_nodes.append(node_ids_order[v_id])  
                except ValueError:
                    handler.error("Element %s references non-existent node %s" % (c_id, v_id))
            handler.add_cell(idx, c_nodes)

            if process_facets:
                c_nodes.sort()
                mesh_editor.add_cell(idx, np.array(c_nodes, dtype=np.uintp))

    handler.end_cells()

    # Write MeshValueCollections to XML file
    handler.start_domains()

    # Build a abaqus node ID -> dolfin cell ID map (which is not unique but that is irrelevant here)
    #                           and its local entity.
    if len(node_sets.items()) > 0:
        node_cell_map = {}
        for c_dolfin_index, (c_index, c_data) in enumerate(elems.items()):
            c_data_tmp = np.array(c_data)
            c_data_tmp.sort()
            for local_entity, n_index in enumerate(c_data_tmp):
                node_cell_map[n_index] = (c_dolfin_index, local_entity)

    # Write vertex/node sets
    dim = 0
    for value, (name, node_set) in enumerate(node_sets.items()):
        handler.start_mesh_value_collection(name, dim, len(node_set), "uint")

        for node in node_set:
            try:
                cell, local_entity = node_cell_map[node]
                handler.add_entity_mesh_value_collection(dim, cell, value, local_entity=local_entity)
            except KeyError:
                print "Warning: Boundary references non-existent node %s" % node
        handler.end_mesh_value_collection()

    # Write cell/element sets
    dim = 3
    for name, s in cell_sets.items():
        handler.start_mesh_value_collection(name, dim, len(s), "uint")
        for cell in s:
            handler.add_entity_mesh_value_collection(dim, cell, 0)
        handler.end_mesh_value_collection()

    # Write surface sets
    if process_facets:
        dim = 2
        nodes_facet_map = _nodes_facet_map(mesh)

        data = [int(0)] * mesh.num_facets()
        S1 = [0, 1, 2]
        S2 = [0, 3, 1]
        S3 = [1, 3, 2]
        S4 = [2, 3, 0]
        node_selector = {'S1': S1,
                         'S2': S2,
                         'S3': S3,
                         'S4': S4,
                         }

        for index, (name, s) in enumerate(surface_sets.items()):
            cell_face_list = []
            for cell_set_name, face_index in s:
                cell_face_list += [(cell, face_index) for cell in cell_sets[cell_set_name]]

            for cell, face in cell_face_list:
                cell_nodes = elems[cell]
                # Extract the face nodes
                face_nodes = [cell_nodes[i] for i in node_selector[face]]
                dolfin_face_nodes = [node_ids_order[n] for n in face_nodes]
                dolfin_face_nodes.sort()
                # Convert the face_nodes to dolfin IDs
                face_id = nodes_facet_map[tuple(dolfin_face_nodes)]
                data[face_id] = index + 1

        # Create and initialise the mesh function
        handler.start_meshfunction("facet_region", dim, mesh.num_facets() )
        for index, physical_region in enumerate (data):
            handler.add_entity_meshfunction(index, physical_region)
        handler.end_meshfunction()


    handler.end_domains()
Esempio n. 42
0
def _main():
    args = _parse_cmd_arguments()

    content = np.load(args.infile)

    data = content.item()["data"]
    n = content.item()["n"]

    # # plot statistics
    # axes0 = problem.get_ellipse_axes(alpha0).T.flatten()
    # plt.plot(axes0, label='axes lengths before')
    # axes1 = problem.get_ellipse_axes(out.x).T.flatten()
    # plt.plot(axes1, label='axes lengths opt')
    # plt.legend()
    # plt.grid()

    # Plot unperturbed MacAdam
    # colorio.plot_luo_rigg(
    #     ellipse_scaling=1,
    colorio.save_macadam("macadam-native.png",
                         ellipse_scaling=10,
                         plot_rgb_triangle=False,
                         n=n)

    points, cells = meshzoo.triangle(corners=np.array([[0.0, 0.0, 0.0],
                                                       [1.0, 0.0, 0.0],
                                                       [0.0, 1.0, 0.0]]),
                                     n=n)

    # https://bitbucket.org/fenics-project/dolfin/issues/845/initialize-mesh-from-vertices
    editor = MeshEditor()
    mesh = Mesh()
    editor.open(mesh, "triangle", 2, 2)
    editor.init_vertices(points.shape[0])
    editor.init_cells(cells.shape[0])
    for k, point in enumerate(points):
        editor.add_vertex(k, point[:2])
    for k, cell in enumerate(cells):
        editor.add_cell(k, cell)
    editor.close()

    V = FunctionSpace(mesh, "CG", 1)

    def get_u(alpha):
        n = V.dim()
        ax = alpha[:n]
        ay = alpha[n:]

        ux = Function(V)
        ux.vector().set_local(ax)
        ux.vector().apply("")

        uy = Function(V)
        uy.vector().set_local(ay)
        uy.vector().apply("")
        return ux, uy

    # Plot perturbed MacAdam
    def transform(XY, data=data):
        is_solo = len(XY.shape) == 1
        if is_solo:
            XY = np.array([XY]).T
        # print(XY)
        ux, uy = get_u(data)
        out = np.array([[ux(x, y) for x, y in XY.T],
                        [uy(x, y) for x, y in XY.T]])
        if is_solo:
            out = out[..., 0]
        return out

    # colorio.plot_luo_rigg(
    #     ellipse_scaling=1,
    plt.figure()
    colorio.plot_macadam(
        ellipse_scaling=10,
        # xy_to_2d=problem.pade2d.eval,
        xy_to_2d=transform,
        plot_rgb_triangle=False,
        mesh_resolution=n,
    )
    # plt.xlim(-0.2, 0.9)
    # plt.ylim(+0.0, 0.7)
    plt.savefig(f"macadam-{n:03d}.png")
    return
Esempio n. 43
0
def build_mesh_old(cells, vertices):
    """Assemble a mesh object from cells and vertices."""
    mesh = Mesh()
    editor = MeshEditor()
    dim = len(vertices[0])
    if dim == 2:
        editor.open(mesh, 'triangle', 2, 2)
    else:
        editor.open(mesh, 'tetrahedron', 3, 3)
    editor.init_vertices(len(vertices))
    editor.init_cells(len(cells))
    for i, v in enumerate(vertices):
        editor.add_vertex(i, *v)
    for i, c in enumerate(cells):
        editor.add_cell(i, *c)
    editor.close()
    return mesh
Esempio n. 44
0
def build_interval_mesh(vertices, cells):
    '''Mesh of 1d topology in n-dims.'''
    imesh = Mesh()
    editor = MeshEditor()
    editor.open(imesh, 1, vertices.shape[1])
    editor.init_vertices(len(vertices))
    editor.init_cells(len(cells))

    # Add vertices
    for vertex_index, v in enumerate(vertices):
        editor.add_vertex(vertex_index, v)

    # Add cells
    for cell_index, (v0, v1) in enumerate(cells):
        editor.add_cell(cell_index, v0, v1)

    editor.close()

    return imesh
Esempio n. 45
0
 def get(self):
     """Just one cell."""
     topx, topy = self.pad(self.values)
     mesh = Mesh()
     editor = MeshEditor()
     editor.open(mesh, 2, 2)
     editor.init_vertices(3)
     editor.init_cells(1)
     editor.add_vertex(0, 0, 0)
     editor.add_vertex(1, 1, 0)
     editor.add_vertex(2, topx, topy)
     editor.add_cell(0, 0, 1, 2)
     editor.close()
     return mesh
Esempio n. 46
0
def _create_dolfin_mesh(points, cells):
    editor = MeshEditor()
    mesh = Mesh()
    # topological and geometrical dimension 2
    editor.open(mesh, "triangle", 2, 2, 1)
    editor.init_vertices(len(points))
    editor.init_cells(len(cells))
    for k, point in enumerate(points):
        editor.add_vertex(k, point[:2])
    for k, cell in enumerate(cells.astype(numpy.uintp)):
        editor.add_cell(k, cell)
    editor.close()
    return mesh
Esempio n. 47
0
 def get(self):
     """Build vertices from polar coordinates."""
     angle, dist = self.values
     if len(angle) < 3:
         angle = np.array(range(int(angle[0]))) * 360.0 / angle[0]
     while len(dist) < len(angle):
         dist = dist * 2
     dist = np.array(dist)
     sides = len(angle)
     mesh = Mesh()
     editor = MeshEditor()
     editor.open(mesh, 2, 2)
     editor.init_vertices(sides + 1)
     editor.init_cells(sides)
     editor.add_vertex(0, 0, 0)
     for i in range(1, sides + 1):
         editor.add_vertex(i, dist[i - 1] * cos(angle[i - 1] / 180.0 * pi),
                           dist[i - 1] * sin(angle[i - 1] / 180.0 * pi))
     for i in range(sides - 1):
         editor.add_cell(i, 0, i + 1, i + 2)
     editor.add_cell(sides - 1, 0, sides, 1)
     editor.close()
     return mesh
Esempio n. 48
0
def import_from_gmsh(fname):
    "Convert from gmsh to dolfin"

    # read with meshio
    msh = meshio.read(fname)

    # create a DOLFIN mesh (assuming 2d)
    gdim, tdim = 2, 2
    mm = Mesh()
    editor = MeshEditor()
    editor.open(mm, "triangle", gdim, tdim)

    npt = msh.points.shape[0]
    nc = msh.get_cells_type("triangle").shape[0]

    editor.init_vertices_global(npt, npt)
    editor.init_cells_global(nc, nc)

    for i, p in enumerate(msh.points):
        editor.add_vertex(i, p[:2])

    for i, c in enumerate(msh.get_cells_type("triangle")):
        editor.add_cell(i, c)

    editor.close()

    # domains
    md = mm.domains()
    md.init(tdim)
    markers = {}

    if 'gmsh:physical' not in msh.cell_data_dict:
        # no markers at all
        return mm, markers

    phy = msh.cell_data_dict['gmsh:physical']
    if 'triangle' in phy:
        for eid, val in enumerate(phy['triangle']):
            md.set_marker((eid, val), 2)

    if 'line' in phy:
        mm.init(0, 1)
        p2e = mm.topology()(0, 1)

        for l, k in zip(msh.get_cells_type("line"), phy['line']):
            e = set(p2e(l[0])).intersection(p2e(l[1])).pop()
            md.set_marker((e, k), 1)

    if 'vertex' in phy:
        for eid, val in zip(msh.get_cells_type("vertex"), phy['vertex']):
            md.set_marker((eid[0], val), 0)

    # names
    markers = tuple(
        {n: v.item()
         for n, (v, d) in msh.field_data.items() if d == dim}
        for dim in range(tdim + 1))

    return mm, markers
Esempio n. 49
0
 def get(self):
     """One triangle per side in smaller, two triangles in larger."""
     sides, R = self.pad(self.values)
     mesh = Mesh()
     large = [np.array((cos(2 * pi * i / sides), sin(2 * pi * i / sides)))
              for i in range(1, sides+1)]
     small = np.array([v * R for v in large])
     # centers of edges in large polygon
     center = np.array([(v + w) / 2
                        for v, w in zip(large, large[1:] + [large[0]])])
     large = np.array(large)
     editor = MeshEditor()
     editor.open(mesh, 2, 2)
     editor.init_vertices(3 * sides)
     editor.init_cells(3 * sides)
     for i in range(sides):
         editor.add_vertex(3 * i, *large[i])
         editor.add_vertex(3 * i + 1, *small[i])
         editor.add_vertex(3 * i + 2, *center[i])
     for i, j in zip(range(sides), range(1, sides) + [0]):
         editor.add_cell(3*i, 3*i, 3*i+1, 3*i+2)
         editor.add_cell(3*i+1, 3*i+1, 3*i+2, 3*j+1)
         editor.add_cell(3*i+2, 3*i+2, 3*j+1, 3*j)
     editor.close()
     return mesh
Esempio n. 50
0
def test_readme_images():
    from dolfin import (
        MeshEditor, Mesh, FunctionSpace, assemble, EigenMatrix, dot, grad, dx,
        TrialFunction, TestFunction
        )
    import meshzoo

    points, cells = meshzoo.rectangle(-1.0, 1.0, -1.0, 1.0, 20, 20)

    # Convert points, cells to dolfin mesh
    editor = MeshEditor()
    mesh = Mesh()
    # topological and geometrical dimension 2
    editor.open(mesh, 'triangle', 2, 2, 1)
    editor.init_vertices(len(points))
    editor.init_cells(len(cells))
    for k, point in enumerate(points):
        editor.add_vertex(k, point[:2])
    for k, cell in enumerate(cells.astype(numpy.uintp)):
        editor.add_cell(k, cell)
    editor.close()

    V = FunctionSpace(mesh, 'CG', 1)
    u = TrialFunction(V)
    v = TestFunction(V)
    L = EigenMatrix()
    assemble(dot(grad(u), grad(v)) * dx, tensor=L)
    A = L.sparray()

    # M = A.T.dot(A)
    M = A

    with tempfile.TemporaryDirectory() as temp_dir:
        filepath = os.path.join(temp_dir, 'test.png')
        betterspy.write_png(filepath, M, border_width=2)

    # betterspy.write_png(
    #     'ATA.png', M, border_width=2,
    #     colormap='viridis'
    #     )
    return
Esempio n. 51
0
 def get(self):
     """Eight cells."""
     mesh = Mesh()
     editor = MeshEditor()
     editor.open(mesh, 3, 3)
     editor.init_vertices(7)
     editor.init_cells(8)
     editor.add_vertex(0, 1, 0, 0)
     editor.add_vertex(1, 0, 1, 0)
     editor.add_vertex(2, 0, 0, 1)
     editor.add_vertex(3, -1, 0, 0)
     editor.add_vertex(4, 0, -1, 0)
     editor.add_vertex(5, 0, 0, -1)
     editor.add_vertex(6, 0, 0, 0)
     editor.add_cell(0, 6, 0, 1, 2)
     editor.add_cell(1, 6, 0, 1, 5)
     editor.add_cell(2, 6, 0, 4, 2)
     editor.add_cell(3, 6, 0, 4, 5)
     editor.add_cell(4, 6, 3, 1, 2)
     editor.add_cell(5, 6, 3, 1, 5)
     editor.add_cell(6, 6, 3, 4, 2)
     editor.add_cell(7, 6, 3, 4, 5)
     editor.close()
     return mesh
Esempio n. 52
0
def make_mesh(vertices, cells, cell_type):
    '''Mesh from data by MeshEditor'''
    gdim = cell_type.geometric_dimension()
    assert vertices.shape[1] == gdim

    tdim = cell_type.topological_dimension()

    mesh = Mesh()
    editor = MeshEditor()

    editor.open(mesh, str(cell_type), tdim, gdim)

    editor.init_vertices(len(vertices))
    editor.init_cells(len(cells))

    for vi, x in enumerate(vertices):
        editor.add_vertex(vi, x)

    for ci, c in enumerate(cells):
        editor.add_cell(ci, *c)

    editor.close()

    return mesh
Esempio n. 53
0
 def get(self):
     """Single cell."""
     a, b, c, d, e = self.pad(self.values)
     mesh = Mesh()
     editor = MeshEditor()
     editor.open(mesh, 3, 3)  # dimension
     editor.init_vertices(4)
     editor.init_cells(1)
     editor.add_vertex(0, 0, 0, 0)
     editor.add_vertex(1, 1, 0, 0)
     editor.add_vertex(2, a, b, 0)
     editor.add_vertex(3, c, d, e)
     editor.add_cell(0, 0, 1, 2, 3)
     editor.close()
     return mesh
Esempio n. 54
0
def build_interval_mesh(vertices, cells):
    '''Mesh of 1d topology in n-dims.'''
    imesh = Mesh()
    editor = MeshEditor()
    editor.open(imesh, 1, vertices.shape[1])
    editor.init_vertices(len(vertices))
    editor.init_cells(len(cells))

    # Add vertices
    for vertex_index, v in enumerate(vertices):
        editor.add_vertex(vertex_index, v)

    # Add cells
    for cell_index, (v0, v1) in enumerate(cells):
        editor.add_cell(cell_index, v0, v1)

    editor.close()

    return imesh