Example #1
0
def import_from_gmsh(fname):
    "Convert from gmsh to dolfin"

    # read with meshio
    msh = meshio.read(fname)

    # create a DOLFIN mesh (assuming 2d)
    gdim, tdim = 2, 2
    mm = Mesh()
    editor = MeshEditor()
    editor.open(mm, "triangle", gdim, tdim)

    npt = msh.points.shape[0]
    nc = msh.get_cells_type("triangle").shape[0]

    editor.init_vertices_global(npt, npt)
    editor.init_cells_global(nc, nc)

    for i, p in enumerate(msh.points):
        editor.add_vertex(i, p[:2])

    for i, c in enumerate(msh.get_cells_type("triangle")):
        editor.add_cell(i, c)

    editor.close()

    # domains
    md = mm.domains()
    md.init(tdim)
    markers = {}

    if 'gmsh:physical' not in msh.cell_data_dict:
        # no markers at all
        return mm, markers

    phy = msh.cell_data_dict['gmsh:physical']
    if 'triangle' in phy:
        for eid, val in enumerate(phy['triangle']):
            md.set_marker((eid, val), 2)

    if 'line' in phy:
        mm.init(0, 1)
        p2e = mm.topology()(0, 1)

        for l, k in zip(msh.get_cells_type("line"), phy['line']):
            e = set(p2e(l[0])).intersection(p2e(l[1])).pop()
            md.set_marker((e, k), 1)

    if 'vertex' in phy:
        for eid, val in zip(msh.get_cells_type("vertex"), phy['vertex']):
            md.set_marker((eid[0], val), 0)

    # names
    markers = tuple(
        {n: v.item()
         for n, (v, d) in msh.field_data.items() if d == dim}
        for dim in range(tdim + 1))

    return mm, markers
Example #2
0
def test_volume_quadrilateralR3(coordinates):

    mesh = Mesh(MPI.comm_world, CellType.Type.quadrilateral,
                numpy.array(coordinates, dtype=numpy.float64),
                numpy.array([[0, 1, 2, 3]], dtype=numpy.int32), [],
                cpp.mesh.GhostMode.none)

    mesh.init()
    cell = Cell(mesh, 0)

    assert cell.volume() == 1.0
Example #3
0
def convert_and_create_facet_mesh_function(ifilename, ofilename):
    # First convert the gmsh mesh
    meshconvert.convert2xml(ifilename, ofilename)

    # Now load the created mesh and initialise the required connectivity information
    mesh = Mesh(ofilename)
    mesh.order()

    File(ofilename) << mesh

    D = mesh.topology().dim()
    mesh.init(D - 1, 0)

    # read the data from the gmsh file once again
    dim_count, vertices_used, tags = process_gmsh_elements(ifilename, D - 1)
    # Get the facet-node connectivity information (reshape as a row of node indices per facet)
    facets_as_nodes = mesh.topology()(D - 1, 0)().reshape(mesh.num_facets(), D)

    # Create and initialise the mesh function
    facet_mark_function = MeshFunction('uint', mesh, D - 1)
    facet_mark_function.set_all(0)

    # set the relevant values of the mesh function
    facets_to_check = range(mesh.num_facets())
    for i in range(len(tags)):
        nodes = np.sort(np.array(vertices_used[2 * i:(2 * i + D)]))
        value = tags[i][0]

        if value != 0:
            found = False
            for j in range(len(facets_to_check)):
                index = facets_to_check[j]
                if np.array_equal(facets_as_nodes[index, :], nodes):
                    found = True
                    facets_to_check.pop(j)
                    # set the value of the mesh function
                    facet_mark_function[index] = value
                    break

            if not found:
                raise Exception("The facet (%d) was not found to mark: %s" %
                                (i, nodes))

    # save the mesh function to file
    fname = os.path.splitext(ofilename)[0]
    mesh_function_file = File("%s_%s.xml" % (fname, "facet_regions"))

    mesh_function_file << facet_mark_function
Example #4
0
def convert_and_create_facet_mesh_function ( ifilename, ofilename ):
    # First convert the gmsh mesh
    meshconvert.convert2xml ( ifilename, ofilename )
    
    # Now load the created mesh and initialise the required connectivity information
    mesh = Mesh ( ofilename )
    mesh.order()
    
    File ( ofilename ) << mesh
    
    D = mesh.topology().dim()
    mesh.init(D-1, 0)
    
    # read the data from the gmsh file once again
    dim_count, vertices_used, tags = process_gmsh_elements( ifilename, D-1 )
    # Get the facet-node connectivity information (reshape as a row of node indices per facet)
    facets_as_nodes = mesh.topology()(D-1,0)().reshape ( mesh.num_facets(), D )
    
    # Create and initialise the mesh function
    facet_mark_function = MeshFunction ( 'uint', mesh, D-1 )
    facet_mark_function.set_all( 0 )
    
    # set the relevant values of the mesh function
    facets_to_check = range( mesh.num_facets() )
    for i in range(len(tags)):
        nodes = np.sort(np.array(vertices_used[2*i:(2*i+D)]))
        value  = tags[i][0]
        
        if value != 0:
            found = False
            for j in range(len(facets_to_check)):
                index = facets_to_check[j]
                if np.array_equal(facets_as_nodes[index,:], nodes):
                    found = True;
                    facets_to_check.pop(j)
                    # set the value of the mesh function
                    facet_mark_function[index] = value
                    break;
                
            if not found:
                raise Exception ( "The facet (%d) was not found to mark: %s" % (i, nodes) )
        
    # save the mesh function to file
    fname = os.path.splitext(ofilename)[0]
    mesh_function_file = File("%s_%s.xml" % (fname, "facet_regions"))
    
    mesh_function_file << facet_mark_function
Example #5
0
def test_volume_quadrilateral_coplanarity_check_2(scaling):

    with pytest.raises(RuntimeError) as error:
        # Unit square cell scaled down by 'scaling' and the first
        # vertex is distorted so that the vertices are clearly non
        # coplanar
        mesh = Mesh(
            MPI.comm_world, CellType.Type.quadrilateral,
            numpy.array([[1.0, 0.5, 0.6], [0.0, scaling, 0.0],
                         [0.0, 0.0, scaling], [0.0, 1.0, 1.0]],
                        dtype=numpy.float64),
            numpy.array([[0, 1, 2, 3]], dtype=numpy.int32), [],
            cpp.mesh.GhostMode.none)
        mesh.init()
        cell = Cell(mesh, 0)
        cell.volume()

    assert "are not coplanar" in str(error.value)
Example #6
0
# Rossby radius.
LR=c/params["f"]

class InitialConditions(Expression):
    def __init__(self):
        pass
    def eval(self, values, X):
        r=(X[0]**2+X[1]**2)**0.5
        if r>0.0001:
            values[0]=-0.05*c*exp((r-r0)/LR)*X[0]/r*X[1]/r
            values[1]= 0.05*c*exp((r-r0)/LR)*X[0]/r*X[0]/r
            values[2]= 0.05*exp((r-r0)/LR)*X[0]/r
        else:
            values[0]=0.
            values[1]=0.
            values[2]=0.
    def value_shape(self):
        return (3,)

try:
    mesh=Mesh("basin.xml")
except RuntimeError:
    import sys
    import os.path

    mesh=Mesh(os.path.dirname(sys.argv[0]) + os.path.sep + "basin.xml")

mesh.order()
mesh.init()
Example #7
0
# Rossby radius.
LR = c / params["f"]


class InitialConditions(Expression):
    def eval(self, values, X):
        r = (X[0]**2 + X[1]**2)**0.5
        if r > 0.0001:
            values[0] = -0.05 * c * exp((r - r0) / LR) * X[0] / r * X[1] / r
            values[1] = 0.05 * c * exp((r - r0) / LR) * X[0] / r * X[0] / r
            values[2] = 0.05 * exp((r - r0) / LR) * X[0] / r
        else:
            values[0] = 0.
            values[1] = 0.
            values[2] = 0.

    def value_shape(self):
        return (3, )


try:
    mesh = Mesh("basin.xml")
except RuntimeError:
    import sys
    import os.path

    mesh = Mesh(os.path.dirname(sys.argv[0]) + os.path.sep + "basin.xml")

mesh.order()
mesh.init()
Example #8
0
def gmsh2xml(ifilename, handler):
    """Convert between .gmsh v2.0 format (http://www.geuz.org/gmsh/) and .xml,
    parser implemented as a state machine:

        0 = read 'MeshFormat'
        1 = read  mesh format data
        2 = read 'EndMeshFormat'
        3 = read 'Nodes'
        4 = read  number of vertices
        5 = read  vertices
        6 = read 'EndNodes'
        7 = read 'Elements'
        8 = read  number of cells
        9 = read  cells
        10 = done

    Afterwards, extract physical region numbers if they are defined in
    the mesh file as a mesh function.

    """

    print "Converting from Gmsh format (.msh, .gmsh) to DOLFIN XML format"

    # The dimension of the gmsh element types supported here as well as the dolfin cell types for each dimension
    gmsh_dim = {15: 0, 1: 1, 2: 2, 4: 3}
    cell_type_for_dim = {1: "interval", 2: "triangle", 3: "tetrahedron" }
    # the gmsh element types supported for conversion
    supported_gmsh_element_types = [1, 2, 4, 15]

    # Open files
    ifile = open(ifilename, "r")

    # Scan file for cell type
    cell_type = None
    highest_dim = 0
    line = ifile.readline()
    while line:

        # Remove newline
        if line[-1] == "\n":
            line = line[:-1]

        # Read dimension
        if line.find("$Elements") == 0:

            line = ifile.readline()
            num_elements = int(line)
            if num_elements == 0:
                _error("No elements found in gmsh file.")
            line = ifile.readline()

            # Now iterate through elements to find largest dimension.  Gmsh
            # format might include elements of lower dimensions in the element list.
            # We also need to count number of elements of correct dimensions.
            # Also determine which vertices are not used.
            dim_count = {0: 0, 1: 0, 2: 0, 3: 0}
            vertices_used_for_dim = {0: [], 1: [], 2: [], 3: []}
            # Array used to store gmsh tags for 1D (type 1/line), 2D (type 2/triangular) elements and 3D (type 4/tet) elements
            tags_for_dim = {0: [], 1: [], 2: [], 3: []}

            while line.find("$EndElements") == -1:
                element = line.split()
                elem_type = int(element[1])
                num_tags = int(element[2])
                if elem_type in supported_gmsh_element_types:
                    dim = gmsh_dim[elem_type]
                    if highest_dim < dim:
                        highest_dim = dim
                    node_num_list = [int(node) for node in element[3 + num_tags:]]
                    vertices_used_for_dim[dim].extend(node_num_list)
                    if num_tags > 0:
                        tags_for_dim[dim].append(tuple(int(tag) for tag in element[3:3+num_tags]))
                    dim_count[dim] += 1
                else:
                    #TODO: output a warning here. "gmsh element type %d not supported" % elem_type
                    pass
                line = ifile.readline()
        else:
            # Read next line
            line = ifile.readline()

    # Check that we got the cell type and set num_cells_counted
    if highest_dim == 0:
        _error("Unable to find cells of supported type.")

    num_cells_counted = dim_count[highest_dim]
    vertex_set = set(vertices_used_for_dim[highest_dim])
    vertices_used_for_dim[highest_dim] = None

    vertex_dict = {}
    for n,v in enumerate(vertex_set):
        vertex_dict[v] = n

    # Step to beginning of file
    ifile.seek(0)

    # Set mesh type
    handler.set_mesh_type(cell_type_for_dim[highest_dim], highest_dim)

    # Initialise node list (gmsh does not export all vertexes in order)
    nodelist = {}

    # Current state
    state = 0

    # Write data
    num_vertices_read = 0
    num_cells_read = 0

    # Only import the dolfin objects if facet markings exist
    process_facets = False
    if len(tags_for_dim[highest_dim-1]) > 0:
        # first construct the mesh
        try:
            from dolfin import MeshEditor, Mesh
        except ImportError:
            _error("DOLFIN must be installed to handle Gmsh boundary regions")
        mesh = Mesh()
        mesh_editor = MeshEditor ()
        mesh_editor.open( mesh, highest_dim, highest_dim )
        process_facets = True
    else:
        # TODO: Output a warning or an error here
        me = None

    while state != 10:

        # Read next line
        line = ifile.readline()
        if not line: break

        # Skip comments
        if line[0] == '#':
            continue

        # Remove newline
        if line[-1] == "\n":
            line = line[:-1]

        if state == 0:
            if line == "$MeshFormat":
                state = 1
        elif state == 1:
            (version, file_type, data_size) = line.split()
            state = 2
        elif state == 2:
            if line == "$EndMeshFormat":
                state = 3
        elif state == 3:
            if line == "$Nodes":
                state = 4
        elif state == 4:
            num_vertices = len(vertex_dict)
            handler.start_vertices(num_vertices)
            if process_facets:
                mesh_editor.init_vertices ( num_vertices )
            state = 5
        elif state == 5:
            (node_no, x, y, z) = line.split()
            node_no = int(node_no)
            x,y,z = [float(xx) for xx in (x,y,z)]
            if vertex_dict.has_key(node_no):
                node_no = vertex_dict[node_no]
            else:
                continue
            nodelist[int(node_no)] = num_vertices_read
            handler.add_vertex(num_vertices_read, [x, y, z])
            if process_facets:
                if highest_dim == 1:
                    coords = numpy.array([x])
                elif highest_dim == 2:
                    coords = numpy.array([x, y])
                elif highest_dim == 3:
                    coords = numpy.array([x, y, z])
                mesh_editor.add_vertex(num_vertices_read, coords)

            num_vertices_read +=1

            if num_vertices == num_vertices_read:
                handler.end_vertices()
                state = 6
        elif state == 6:
            if line == "$EndNodes":
                state = 7
        elif state == 7:
            if line == "$Elements":
                state = 8
        elif state == 8:
            handler.start_cells(num_cells_counted)
            if process_facets:
                mesh_editor.init_cells( num_cells_counted )

            state = 9
        elif state == 9:
            element = line.split()
            elem_type = int(element[1])
            num_tags  = int(element[2])
            if elem_type in supported_gmsh_element_types:
                dim = gmsh_dim[elem_type]
            else:
                dim = 0
            if dim == highest_dim:
                node_num_list = [vertex_dict[int(node)] for node in element[3 + num_tags:]]
                for node in node_num_list:
                    if not node in nodelist:
                        _error("Vertex %d of %s %d not previously defined." %
                              (node, cell_type_for_dim[dim], num_cells_read))
                cell_nodes = [nodelist[n] for n in node_num_list]
                handler.add_cell(num_cells_read, cell_nodes)

                if process_facets:
                    cell_nodes = numpy.array([nodelist[n] for n in node_num_list], dtype=numpy.uintp)
                    mesh_editor.add_cell(num_cells_read, cell_nodes)

                num_cells_read +=1

            if num_cells_counted == num_cells_read:
                handler.end_cells()
                if process_facets:
                    mesh_editor.close()
                state = 10
        elif state == 10:
            break

    # Write mesh function based on the Physical Regions defined by
    # gmsh, but only if they are not all zero. All zero physical
    # regions indicate that no physical regions were defined.
    if highest_dim not in [1,2,3]:
        _error("Gmsh tags not supported for dimension %i. Probably a bug" % dim)

    tags = tags_for_dim[highest_dim]
    physical_regions = tuple(tag[0] for tag in tags)
    if not all(tag == 0 for tag in physical_regions):
        handler.start_meshfunction("physical_region", dim, num_cells_counted)
        for i, physical_region in enumerate(physical_regions):
            handler.add_entity_meshfunction(i, physical_region)
        handler.end_meshfunction()

    # Now process the facet markers
    tags = tags_for_dim[highest_dim-1]
    if (len(tags) > 0) and (mesh is not None):
        physical_regions = tuple(tag[0] for tag in tags)
        if not all(tag == 0 for tag in physical_regions):
            mesh.init(highest_dim-1,0)

            # Get the facet-node connectivity information (reshape as a row of node indices per facet)
            if highest_dim==1:
              # for 1d meshes the mesh topology returns the vertex to vertex map, which isn't what we want
              # as facets are vertices
              facets_as_nodes = numpy.array([[i] for i in range(mesh.num_facets())])
            else:
              facets_as_nodes = mesh.topology()(highest_dim-1,0)().reshape ( mesh.num_facets(), highest_dim )

            # Build the reverse map
            nodes_as_facets = {}
            for facet in range(mesh.num_facets()):
              nodes_as_facets[tuple(facets_as_nodes[facet,:])] = facet

            data = [int(0*k) for k in range(mesh.num_facets()) ]
            for i, physical_region in enumerate(physical_regions):
                nodes = [n-1 for n in vertices_used_for_dim[highest_dim-1][highest_dim*i:(highest_dim*i+highest_dim)]]
                nodes.sort()

                if physical_region != 0:
                    try:
                        index = nodes_as_facets[tuple(nodes)]
                        data[index] = physical_region
                    except IndexError:
                        raise Exception ( "The facet (%d) was not found to mark: %s" % (i, nodes) )

#            # Create and initialise the mesh function
            handler.start_meshfunction("facet_region", highest_dim-1, mesh.num_facets() )
            for index, physical_region in enumerate ( data ):
                handler.add_entity_meshfunction(index, physical_region)
            handler.end_meshfunction()

    # Check that we got all data
    if state == 10:
        print "Conversion done"
    else:
       _error("Missing data, unable to convert \n\ Did you use version 2.0 of the gmsh file format?")

    # Close files
    ifile.close()
Example #9
0
File: test.py Project: alogg/dolfin
    def test_convert_triangle(self): # Disabled because it fails, see FIXME below
        # test no. 1
        from dolfin import Mesh, MPI
        if MPI.num_processes() != 1:
            return
        fname = os.path.join("data", "triangle")
        dfname = fname+".xml"
        
        # Read triangle file and convert to a dolfin xml mesh file
        meshconvert.triangle2xml(fname, dfname)

        # Read in dolfin mesh and check number of cells and vertices
        mesh = Mesh(dfname)
        self.assertEqual(mesh.num_vertices(), 96)
        self.assertEqual(mesh.num_cells(), 159)

        # Clean up
        os.unlink(dfname)


        # test no. 2
        from dolfin import MPI, Mesh, MeshFunction, \
                           edges, Edge, faces, Face, \
                           SubsetIterator, facets, CellFunction
        if MPI.num_processes() != 1:
            return
        fname = os.path.join("data", "test_Triangle_3")
        dfname = fname+".xml"
        dfname0 = fname+".attr0.xml"

        # Read triangle file and convert to a dolfin xml mesh file
        meshconvert.triangle2xml(fname, dfname)

        # Read in dolfin mesh and check number of cells and vertices
        mesh = Mesh(dfname)
        mesh.init()
        mfun = MeshFunction('double', mesh, dfname0)
        self.assertEqual(mesh.num_vertices(), 58)
        self.assertEqual(mesh.num_cells(), 58)

        # Create a size_t CellFunction and assign the values based on the
        # converted Meshfunction
        cf = CellFunction("size_t", mesh)
        cf.array()[mfun.array()==10.0] = 0
        cf.array()[mfun.array()==-10.0] = 1

        # Meassure total area of cells with 1 and 2 marker
        add = lambda x, y : x+y
        area0 = reduce(add, (Face(mesh, cell.index()).area() \
                             for cell in SubsetIterator(cf, 0)), 0.0)
        area1 = reduce(add, (Face(mesh, cell.index()).area() \
                             for cell in SubsetIterator(cf, 1)), 0.0)
        total_area = reduce(add, (face.area() for face in faces(mesh)), 0.0)

        # Check that all cells in the two domains are either above or below y=0
        self.assertTrue(all(cell.midpoint().y()<0 for cell in SubsetIterator(cf, 0)))
        self.assertTrue(all(cell.midpoint().y()>0 for cell in SubsetIterator(cf, 1)))
        
        # Check that the areas add up
        self.assertAlmostEqual(area0+area1, total_area)
        
        # Measure the edge length of the two edge domains
        edge_markers = mesh.domains().facet_domains()
        self.assertTrue(edge_markers is not None)
        length0 = reduce(add, (Edge(mesh, e.index()).length() \
                            for e in SubsetIterator(edge_markers, 0)), 0.0)
        length1 = reduce(add, (Edge(mesh, e.index()).length() \
                            for e in SubsetIterator(edge_markers, 1)), 0.0)
        
        # Total length of all edges and total length of boundary edges
        total_length = reduce(add, (e.length() for e in edges(mesh)), 0.0)
        boundary_length = reduce(add, (Edge(mesh, f.index()).length() \
                          for f in facets(mesh) if f.exterior()), 0.0)
        
        # Check that the edges add up
        self.assertAlmostEqual(length0+length1, total_length)
        self.assertAlmostEqual(length1, boundary_length)

        # Clean up
        os.unlink(dfname)
        os.unlink(dfname0)
Example #10
0
def gmsh2xml(ifilename, handler):
    """Convert between .gmsh v2.0 format (http://www.geuz.org/gmsh/) and .xml,
    parser implemented as a state machine:

        0 = read 'MeshFormat'
        1 = read  mesh format data
        2 = read 'EndMeshFormat'
        3 = read 'Nodes'
        4 = read  number of vertices
        5 = read  vertices
        6 = read 'EndNodes'
        7 = read 'Elements'
        8 = read  number of cells
        9 = read  cells
        10 = done

    Afterwards, extract physical region numbers if they are defined in
    the mesh file as a mesh function.

    """

    print("Converting from Gmsh format (.msh, .gmsh) to DOLFIN XML format")

    # The dimension of the gmsh element types supported here as well as the dolfin cell types for each dimension
    gmsh_dim = {15: 0, 1: 1, 2: 2, 4: 3}
    cell_type_for_dim = {1: "interval", 2: "triangle", 3: "tetrahedron" }
    # the gmsh element types supported for conversion
    supported_gmsh_element_types = [1, 2, 4, 15]

    # Open files
    ifile = open(ifilename, "r")

    # Scan file for cell type
    cell_type = None
    highest_dim = 0
    line = ifile.readline()
    while line:

        # Remove newline
        line = line.rstrip("\n\r")

        # Read dimension
        if line.find("$Elements") == 0:

            line = ifile.readline()
            num_elements = int(line)
            if num_elements == 0:
                _error("No elements found in gmsh file.")
            line = ifile.readline()

            # Now iterate through elements to find largest dimension.  Gmsh
            # format might include elements of lower dimensions in the element list.
            # We also need to count number of elements of correct dimensions.
            # Also determine which vertices are not used.
            dim_count = {0: 0, 1: 0, 2: 0, 3: 0}
            vertices_used_for_dim = {0: [], 1: [], 2: [], 3: []}
            # Array used to store gmsh tags for 1D (type 1/line), 2D (type 2/triangular) elements and 3D (type 4/tet) elements
            tags_for_dim = {0: [], 1: [], 2: [], 3: []}

            while line.find("$EndElements") == -1:
                element = line.split()
                elem_type = int(element[1])
                num_tags = int(element[2])
                if elem_type in supported_gmsh_element_types:
                    dim = gmsh_dim[elem_type]
                    if highest_dim < dim:
                        highest_dim = dim
                    node_num_list = [int(node) for node in element[3 + num_tags:]]
                    vertices_used_for_dim[dim].extend(node_num_list)
                    if num_tags > 0:
                        tags_for_dim[dim].append(tuple(int(tag) for tag in element[3:3+num_tags]))
                    dim_count[dim] += 1
                else:
                    #TODO: output a warning here. "gmsh element type %d not supported" % elem_type
                    pass
                line = ifile.readline()
        else:
            # Read next line
            line = ifile.readline()

    # Check that we got the cell type and set num_cells_counted
    if highest_dim == 0:
        _error("Unable to find cells of supported type.")

    num_cells_counted = dim_count[highest_dim]
    vertex_set = set(vertices_used_for_dim[highest_dim])
    vertices_used_for_dim[highest_dim] = None

    vertex_dict = {}
    for n,v in enumerate(vertex_set):
        vertex_dict[v] = n

    # Step to beginning of file
    ifile.seek(0)

    # Set mesh type
    handler.set_mesh_type(cell_type_for_dim[highest_dim], highest_dim)

    # Initialise node list (gmsh does not export all vertexes in order)
    nodelist = {}

    # Current state
    state = 0

    # Write data
    num_vertices_read = 0
    num_cells_read = 0

    # Only import the dolfin objects if facet markings exist
    process_facets = False
    if len(tags_for_dim[highest_dim-1]) > 0:
        # first construct the mesh
        try:
            from dolfin import MeshEditor, Mesh
        except ImportError:
            _error("DOLFIN must be installed to handle Gmsh boundary regions")
        mesh = Mesh()
        mesh_editor = MeshEditor ()
        mesh_editor.open( mesh, highest_dim, highest_dim )
        process_facets = True
    else:
        # TODO: Output a warning or an error here
        me = None

    while state != 10:

        # Read next line
        line = ifile.readline()
        if not line: break

        # Skip comments
        if line[0] == '#':
            continue

        # Remove newline
        line = line.rstrip("\n\r")

        if state == 0:
            if line == "$MeshFormat":
                state = 1
        elif state == 1:
            (version, file_type, data_size) = line.split()
            state = 2
        elif state == 2:
            if line == "$EndMeshFormat":
                state = 3
        elif state == 3:
            if line == "$Nodes":
                state = 4
        elif state == 4:
            num_vertices = len(vertex_dict)
            handler.start_vertices(num_vertices)
            if process_facets:
                mesh_editor.init_vertices_global(num_vertices, num_vertices)
            state = 5
        elif state == 5:
            (node_no, x, y, z) = line.split()
            node_no = int(node_no)
            x,y,z = [float(xx) for xx in (x,y,z)]
            if node_no in vertex_dict:
                node_no = vertex_dict[node_no]
            else:
                continue
            nodelist[int(node_no)] = num_vertices_read
            handler.add_vertex(num_vertices_read, [x, y, z])
            if process_facets:
                if highest_dim == 1:
                    coords = numpy.array([x])
                elif highest_dim == 2:
                    coords = numpy.array([x, y])
                elif highest_dim == 3:
                    coords = numpy.array([x, y, z])
                mesh_editor.add_vertex(num_vertices_read, coords)

            num_vertices_read +=1

            if num_vertices == num_vertices_read:
                handler.end_vertices()
                state = 6
        elif state == 6:
            if line == "$EndNodes":
                state = 7
        elif state == 7:
            if line == "$Elements":
                state = 8
        elif state == 8:
            handler.start_cells(num_cells_counted)
            if process_facets:
                mesh_editor.init_cells_global(num_cells_counted, num_cells_counted)

            state = 9
        elif state == 9:
            element = line.split()
            elem_type = int(element[1])
            num_tags  = int(element[2])
            if elem_type in supported_gmsh_element_types:
                dim = gmsh_dim[elem_type]
            else:
                dim = 0
            if dim == highest_dim:
                node_num_list = [vertex_dict[int(node)] for node in element[3 + num_tags:]]
                for node in node_num_list:
                    if not node in nodelist:
                        _error("Vertex %d of %s %d not previously defined." %
                              (node, cell_type_for_dim[dim], num_cells_read))
                cell_nodes = [nodelist[n] for n in node_num_list]
                handler.add_cell(num_cells_read, cell_nodes)

                if process_facets:
                    cell_nodes = numpy.array([nodelist[n] for n in node_num_list], dtype=numpy.uintp)
                    mesh_editor.add_cell(num_cells_read, cell_nodes)

                num_cells_read +=1

            if num_cells_counted == num_cells_read:
                handler.end_cells()
                if process_facets:
                    mesh_editor.close()
                state = 10
        elif state == 10:
            break

    # Write mesh function based on the Physical Regions defined by
    # gmsh, but only if they are not all zero. All zero physical
    # regions indicate that no physical regions were defined.
    if highest_dim not in [1,2,3]:
        _error("Gmsh tags not supported for dimension %i. Probably a bug" % dim)

    tags = tags_for_dim[highest_dim]
    physical_regions = tuple(tag[0] for tag in tags)
    if not all(tag == 0 for tag in physical_regions):
        handler.start_meshfunction("physical_region", dim, num_cells_counted)
        for i, physical_region in enumerate(physical_regions):
            handler.add_entity_meshfunction(i, physical_region)
        handler.end_meshfunction()

    # Now process the facet markers
    tags = tags_for_dim[highest_dim-1]
    if (len(tags) > 0) and (mesh is not None):
        physical_regions = tuple(tag[0] for tag in tags)
        if not all(tag == 0 for tag in physical_regions):
            mesh.init(highest_dim-1,0)

            # Get the facet-node connectivity information (reshape as a row of node indices per facet)
            if highest_dim==1:
              # for 1d meshes the mesh topology returns the vertex to vertex map, which isn't what we want
              # as facets are vertices
              facets_as_nodes = numpy.array([[i] for i in range(mesh.num_facets())])
            else:
              facets_as_nodes = mesh.topology()(highest_dim-1,0)().reshape ( mesh.num_facets(), highest_dim )

            # Build the reverse map
            nodes_as_facets = {}
            for facet in range(mesh.num_facets()):
              nodes_as_facets[tuple(facets_as_nodes[facet,:])] = facet

            data = [int(0*k) for k in range(mesh.num_facets()) ]
            for i, physical_region in enumerate(physical_regions):
                nodes = [n-1 for n in vertices_used_for_dim[highest_dim-1][highest_dim*i:(highest_dim*i+highest_dim)]]
                nodes.sort()

                if physical_region != 0:
                    try:
                        index = nodes_as_facets[tuple(nodes)]
                        data[index] = physical_region
                    except IndexError:
                        raise Exception ( "The facet (%d) was not found to mark: %s" % (i, nodes) )

            # Create and initialise the mesh function
            handler.start_meshfunction("facet_region", highest_dim-1, mesh.num_facets() )
            for index, physical_region in enumerate ( data ):
                handler.add_entity_meshfunction(index, physical_region)
            handler.end_meshfunction()

    # Check that we got all data
    if state == 10:
        print("Conversion done")
    else:
       _error("Missing data, unable to convert \n\ Did you use version 2.0 of the gmsh file format?")

    # Close files
    ifile.close()
Example #11
0
    def test_convert_triangle(
            self):  # Disabled because it fails, see FIXME below

        # test no. 1
        from dolfin import Mesh, MPI

        fname = os.path.join(os.path.dirname(__file__), "data", "triangle")
        dfname = fname + ".xml"

        # Read triangle file and convert to a dolfin xml mesh file
        meshconvert.triangle2xml(fname, dfname)

        # Read in dolfin mesh and check number of cells and vertices
        mesh = Mesh(dfname)
        self.assertEqual(mesh.num_vertices(), 96)
        self.assertEqual(mesh.num_cells(), 159)

        # Clean up
        os.unlink(dfname)

        # test no. 2
        from dolfin import MPI, Mesh, MeshFunction, \
                           edges, Edge, faces, Face, \
                           SubsetIterator, facets

        fname = os.path.join(os.path.dirname(__file__), "data",
                             "test_Triangle_3")
        dfname = fname + ".xml"
        dfname0 = fname + ".attr0.xml"

        # Read triangle file and convert to a dolfin xml mesh file
        meshconvert.triangle2xml(fname, dfname)

        # Read in dolfin mesh and check number of cells and vertices
        mesh = Mesh(dfname)
        mesh.init()
        mfun = MeshFunction('double', mesh, dfname0)
        self.assertEqual(mesh.num_vertices(), 58)
        self.assertEqual(mesh.num_cells(), 58)

        # Create a size_t MeshFunction and assign the values based on the
        # converted Meshfunction
        cf = MeshFunction("size_t", mesh, mesh.topology().dim())
        cf.array()[mfun.array() == 10.0] = 0
        cf.array()[mfun.array() == -10.0] = 1

        # Meassure total area of cells with 1 and 2 marker
        add = lambda x, y: x + y
        area0 = reduce(add, (Face(mesh, cell.index()).area() \
                             for cell in SubsetIterator(cf, 0)), 0.0)
        area1 = reduce(add, (Face(mesh, cell.index()).area() \
                             for cell in SubsetIterator(cf, 1)), 0.0)
        total_area = reduce(add, (face.area() for face in faces(mesh)), 0.0)

        # Check that all cells in the two domains are either above or below y=0
        self.assertTrue(
            all(cell.midpoint().y() < 0 for cell in SubsetIterator(cf, 0)))
        self.assertTrue(
            all(cell.midpoint().y() > 0 for cell in SubsetIterator(cf, 1)))

        # Check that the areas add up
        self.assertAlmostEqual(area0 + area1, total_area)

        # Measure the edge length of the two edge domains
        #edge_markers = mesh.domains().facet_domains()
        edge_markers = mesh.domains().markers(mesh.topology().dim() - 1)
        self.assertTrue(edge_markers is not None)
        #length0 = reduce(add, (Edge(mesh, e.index()).length() \
        #                    for e in SubsetIterator(edge_markers, 0)), 0.0)
        length0, length1 = 0.0, 0.0
        for item in list(edge_markers.items()):
            if item[1] == 0:
                e = Edge(mesh, int(item[0]))
                length0 += Edge(mesh, int(item[0])).length()
            elif item[1] == 1:
                length1 += Edge(mesh, int(item[0])).length()

        # Total length of all edges and total length of boundary edges
        total_length = reduce(add, (e.length() for e in edges(mesh)), 0.0)
        boundary_length = reduce(add, (Edge(mesh, f.index()).length() \
                          for f in facets(mesh) if f.exterior()), 0.0)

        # Check that the edges add up
        self.assertAlmostEqual(length0 + length1, total_length)
        self.assertAlmostEqual(length1, boundary_length)

        # Clean up
        os.unlink(dfname)
        os.unlink(dfname0)
def test(path, type='mf'):
    '''Evolve the tile in (n, n) pattern checking volume/surface properties'''

    comm = mpi_comm_world()
    h5 = HDF5File(comm, path, 'r')
    tile = Mesh()
    h5.read(tile, 'mesh', False)

    init_container = lambda type, dim: (
        MeshFunction('size_t', tile, dim, 0)
        if type == 'mf' else MeshValueCollection('size_t', tile, dim))

    for n in (2, 4):
        data = {}
        checks = {}
        for dim, name in zip((2, 3), ('surfaces', 'volumes')):
            # Get the collection
            collection = init_container(type, dim)
            h5.read(collection, name)

            if type == 'mvc': collection = as_meshf(collection)

            # Data to evolve
            tile.init(dim, 0)
            e2v = tile.topology()(dim, 0)
            # Only want to evolve tag 1 (interfaces) for the facets.
            data[(dim, 1)] = np.array(
                [e2v(e.index()) for e in SubsetIterator(collection, 1)],
                dtype='uintp')

            if dim == 2:
                check = lambda m, f: assemble(
                    FacetArea(m) * ds(domain=m,
                                      subdomain_data=f,
                                      subdomain_id=1) + avg(FacetArea(m)) *
                    dS(domain=m, subdomain_data=f, subdomain_id=1))
            else:
                check = lambda m, f: assemble(
                    CellVolume(m) * dx(
                        domain=m, subdomain_data=f, subdomain_id=1))

            checks[
                dim] = lambda m, f, t=tile, c=collection, n=n, check=check: abs(
                    check(m, f) - n**2 * check(t, c)) / (n**2 * check(t, c))

        t = Timer('x')
        mesh, mesh_data = TileMesh(tile, (n, n), mesh_data=data)
        info('\tTiling took %g s. Ncells %d, nvertices %d, \n' %
             (t.stop(), mesh.num_vertices(), mesh.num_cells()))

        foos = mf_from_data(mesh, mesh_data)
        # Mesh Functions
        from_mf = np.array([checks[dim](mesh, foos[dim]) for dim in (2, 3)])

        mvcs = mvc_from_data(mesh, mesh_data)
        foos = as_meshf(mvcs)
        # Mesh ValueCollections
        from_mvc = np.array([checks[dim](mesh, foos[dim]) for dim in (2, 3)])

        assert np.linalg.norm(from_mf - from_mvc) < 1E-13
        # I ignore shared facets so there is bound to be some error in facets
        # Volume should match well
        print from_mf
Example #13
0
def mesh_around_1d(mesh, size=1, scale=10, padding=0.05):
    '''
    From a 1d in xd (X > 1) mesh (in XML format) produce a Xd mesh where
    the 1d structure is embedded. Mesh size close to strucure should 
    be size(given as multiple of hmin(), elsewhere scale * size. Padding 
    controls size of the bounding box.
    '''
    dot = mesh.find('.')
    root, ext = mesh[:dot], mesh[dot:]
    assert ext == '.xml' or ext == '.xml.gz', ext

    mesh = Mesh(mesh)
    gdim = mesh.geometry().dim()
    assert gdim > 1 and mesh.topology().dim() == 1

    x = mesh.coordinates()
    mesh.init(1, 0)

    # Compute fall back mesh size:
    assert size > 0
    size = mesh.hmin() * size

    # Don't allow zero padding - collision of lines with bdry segfaults
    # too ofter so we prevent it
    assert padding > 0
    # Finally scale better be positive
    assert scale > 0

    point = (lambda xi: tuple(xi) + (0, ))\
            if gdim == 2 else (lambda xi: tuple(xi))

    geo = '.'.join([root, 'geo'])
    with open(geo, 'w') as outfile:
        # Setup
        outfile.write('SetFactory("OpenCASCADE");\n')
        outfile.write('size = %g;\n' % size)
        outfile.write('SIZE = %g;\n' % (size * scale))

        # Points
        fmt = 'Point(%d) = {%.16f, %.16f, %.16f, size};\n'
        for i, xi in enumerate(x, 1):
            outfile.write(fmt % ((i, ) + point(xi)))
        # Lines
        fmt = 'Line(%d) = {%d, %d};\n'
        for i, cell in enumerate(cells(mesh), 1):
            outfile.write(fmt % ((i, ) + tuple(cell.entities(0) + 1)))

        # BBox
        xmin, xmax = x.min(0), x.max(0)
        padding = (xmax - xmin) * padding / 2.
        xmin -= padding
        xmax += padding
        dx = xmax - xmin

        if gdim == 2 or dx[-1] < 1E-14:  # All points are on a plane
            rect = 'Rectangle(1) = {%g, %g, %g, %g, %g};\n' % (
                xmin[0], xmin[1], 0 if gdim == 2 else xmin[2], dx[0], dx[1])
            outfile.write(rect)
            bbox = 'Surface'
        else:
            box = 'Box(1) = {%g, %g, %g, %g, %g, %g};\n' % (
                xmin[0], xmin[1], xmin[2], dx[0], dx[1], dx[2])
            outfile.write(box)
            bbox = 'Volume'

        # Crack
        for line in xrange(1, mesh.num_cells() + 1):
            outfile.write('Line{%d} In %s{1};\n' % (line, bbox))

        # Add Physical volume/surface
        outfile.write('Physical %s(1) = {1};\n' % bbox)

        # Add Physical surface/line
        lines = ', '.join(
            map(lambda v: '%d' % v, xrange(1,
                                           mesh.num_cells() + 1)))
        outfile.write('Physical Line(1) = {%s};\n' % lines)
    return geo, gdim
Example #14
0
def gmsh_to_dolfin_mesh(ifilename, handler):
    """Convert between .gmsh v2.0 format (http://www.geuz.org/gmsh/) and .xml,
    parser implemented as a state machine:

        0 = read 'MeshFormat'
        1 = read  mesh format data
        2 = read 'EndMeshFormat'
        3 = read 'Nodes'
        4 = read  number of vertices
        5 = read  vertices
        6 = read 'EndNodes'
        7 = read 'Elements'
        8 = read  number of cells
        9 = read  cells
        10 = done

    Afterwards, extract physical region numbers if they are defined in
    the mesh file as a mesh function.

    """

    print "Converting from Gmsh format (.msh, .gmsh) to DOLFIN XML format"

    # The dimension of the gmsh element types supported here as well as the dolfin cell types for each dimension
    gmsh_dim = {1: 1, 2: 2, 4: 3}
    gmsh_cell_type = {1: "interval", 2: "triangle", 3: "tetrahedron"}
    # the gmsh element types supported for conversion
    supported_gmsh_element_types = [1, 2, 4]

    # Open files
    ifile = open(ifilename, "r")

    # Scan file for cell type
    cell_type = None
    highest_dim = 0
    line = ifile.readline()
    while line:

        # Remove newline
        if line[-1] == "\n":
            line = line[:-1]

        # Read dimension
        if line.find("$Elements") == 0:

            line = ifile.readline()
            num_elements = int(line)
            num_cells_counted = 0
            if num_elements == 0:
                _error("No elements found in gmsh file.")
            line = ifile.readline()

            # Now iterate through elements to find largest dimension.  Gmsh
            # format might include elements of lower dimensions in the element list.
            # We also need to count number of elements of correct dimensions.
            # Also determine which vertices are not used.
            dim_count = {1: 0, 2: 0, 3: 0}
            vertices_used = {1: [], 2: [], 3: []}
            # Array used to store gmsh tags for 1D (type 1/line), 2D (type 2/triangular) elements and 3D (type 4/tet) elements
            tags_for_dim = {1: [], 2: [], 3: []}

            while line.find("$EndElements") == -1:
                element = line.split()
                elem_type = int(element[1])
                num_tags = int(element[2])

                if elem_type in supported_gmsh_element_types:
                    dim = gmsh_dim[elem_type]
                    if highest_dim < dim:
                        highest_dim = dim

                    node_num_list = [
                        int(node) for node in element[3 + num_tags:]
                    ]
                    vertices_used[dim].extend(node_num_list)
                    if num_tags > 0:
                        tags_for_dim[dim].append(
                            tuple(int(tag) for tag in element[3:3 + num_tags]))
                    dim_count[dim] += 1
                else:
                    #TODO: output a warning here. "gmsh element type %d not supported" % elem_type
                    pass

                line = ifile.readline()
        else:
            # Read next line
            line = ifile.readline()

    # Check that we got the cell type and set num_cells_counted
    if highest_dim == 0:
        _error("Unable to find cells of supported type.")

    num_cells_counted = dim_count[highest_dim]
    vertex_set = set(vertices_used[highest_dim])
    vertices_used[highest_dim] = None

    vertex_dict = {}
    for n, v in enumerate(vertex_set):
        vertex_dict[v] = n

    # Step to beginning of file
    ifile.seek(0)

    # Set mesh type
    handler.set_mesh_type(gmsh_cell_type[highest_dim], highest_dim)

    # Initialise node list (gmsh does not export all vertexes in order)
    nodelist = {}

    # Current state
    state = 0

    # Write data
    num_vertices_read = 0
    num_cells_read = 0

    # Now handle the facet markings
    if len(tags_for_dim[highest_dim - 1]) > 0:
        # first construct the mesh
        from dolfin import MeshEditor, Mesh
        mesh = Mesh()
        me = MeshEditor()
        me.open(mesh, highest_dim, highest_dim)
    else:
        me = None

    while state != 10:

        # Read next line
        line = ifile.readline()
        if not line: break

        # Skip comments
        if line[0] == '#':
            continue

        # Remove newline
        if line[-1] == "\n":
            line = line[:-1]

        if state == 0:
            if line == "$MeshFormat":
                state = 1
        elif state == 1:
            (version, file_type, data_size) = line.split()
            state = 2
        elif state == 2:
            if line == "$EndMeshFormat":
                state = 3
        elif state == 3:
            if line == "$Nodes":
                state = 4
        elif state == 4:
            num_vertices = len(vertex_dict)
            handler.start_vertices(num_vertices)
            if me is not None:
                me.init_vertices(num_vertices)
            state = 5
        elif state == 5:
            (node_no, x, y, z) = line.split()
            node_no = int(node_no)
            x, y, z = [float(xx) for xx in (x, y, z)]
            if vertex_dict.has_key(node_no):
                node_no = vertex_dict[node_no]
            else:
                continue
            nodelist[int(node_no)] = num_vertices_read
            handler.add_vertex(num_vertices_read, [x, y, z])
            if me is not None:
                if highest_dim == 1:
                    me.add_vertex(num_vertices_read, x)
                elif highest_dim == 2:
                    me.add_vertex(num_vertices_read, x, y)
                elif highest_dim == 3:
                    me.add_vertex(num_vertices_read, x, y, z)

            num_vertices_read += 1

            if num_vertices == num_vertices_read:
                handler.end_vertices()
                state = 6
        elif state == 6:
            if line == "$EndNodes":
                state = 7
        elif state == 7:
            if line == "$Elements":
                state = 8
        elif state == 8:
            handler.start_cells(num_cells_counted)
            if me is not None:
                me.init_cells(num_cells_counted)

            state = 9
        elif state == 9:
            element = line.split()
            elem_type = int(element[1])
            num_tags = int(element[2])
            if elem_type in supported_gmsh_element_types:
                dim = gmsh_dim[elem_type]
            else:
                dim = 0
            if dim == highest_dim:
                node_num_list = [
                    vertex_dict[int(node)] for node in element[3 + num_tags:]
                ]
                for node in node_num_list:
                    if not node in nodelist:
                        _error("Vertex %d of %s %d not previously defined." %
                               (node, gmsh_cell_type[dim], num_cells_read))
                cell_nodes = [nodelist[n] for n in node_num_list]
                handler.add_cell(num_cells_read, cell_nodes)

                if me is not None:
                    me.add_cell(num_cells_read, *cell_nodes)

                num_cells_read += 1

            if num_cells_counted == num_cells_read:
                handler.end_cells()
                if me is not None:
                    me.close()
                state = 10
        elif state == 10:
            break

    # Write mesh function based on the Physical Regions defined by
    # gmsh, but only if they are not all zero. All zero physical
    # regions indicate that no physical regions were defined.
    if highest_dim not in [1, 2, 3]:
        _error("Gmsh tags not supported for dimension %i. Probably a bug" %
               dim)

    tags = tags_for_dim[highest_dim]
    physical_regions = tuple(tag[0] for tag in tags)
    if not all(tag == 0 for tag in physical_regions):
        handler.start_meshfunction("physical_region", dim, num_cells_counted)
        for i, physical_region in enumerate(physical_regions):
            handler.add_entity_meshfunction(i, physical_region)
        handler.end_meshfunction()

    # Now process the facet markers
    tags = tags_for_dim[highest_dim - 1]
    if len(tags) > 0:

        print tags
        print vertices_used[highest_dim - 1]

        physical_regions = tuple(tag[0] for tag in tags)
        if not all(tag == 0 for tag in physical_regions):
            mesh.init(highest_dim - 1, 0)

            # Get the facet-node connectivity information (reshape as a row of node indices per facet)
            facets_as_nodes = mesh.topology()(highest_dim - 1, 0)().reshape(
                mesh.num_facets(), highest_dim)

            #            from dolfin import MeshFunction
            #            # Create and initialise the mesh function
            #            facet_mark_function = MeshFunction ( 'uint', mesh, highest_dim-1 )
            #            facet_mark_function.set_all( 0 )
            handler.start_meshfunction("facet_region", highest_dim - 1,
                                       mesh.num_facets())

            facets_to_check = range(mesh.num_facets())

            data = [int(0 * k) for k in range(len(facets_to_check))]

            for i, physical_region in enumerate(physical_regions):
                nodes = [
                    n - 1
                    for n in vertices_used[highest_dim -
                                           1][2 * i:(2 * i + highest_dim)]
                ]
                nodes.sort()

                if physical_region != 0:
                    found = False
                    for j in range(len(facets_to_check)):
                        index = facets_to_check[j]
                        if all(facets_as_nodes[index, k] == nodes[k]
                               for k in range(len(nodes))):
                            found = True
                            facets_to_check.pop(j)
                            # set the value of the mesh function
                            #                            facet_mark_function[index] = physical_region
                            data[index] = physical_region
                            break

                    if not found:
                        raise Exception(
                            "The facet (%d) was not found to mark: %s" %
                            (i, nodes))


#            fname = os.path.splitext('tmp.xml')[0]
#            mesh_function_file = File("%s_%s.xml" % (fname, "facet_region"))
#            mesh_function_file << facet_mark_function

            for index, physical_region in enumerate(data):
                handler.add_entity_meshfunction(index, physical_region)
            handler.end_meshfunction()

            mf = MeshFunction('uint', mesh, 'tmp_facet_region.xml')
            plot(mf, interactive=True)

    # Check that we got all data
    if state == 10:
        print "Conversion done"
    else:
        _error(
            "Missing data, unable to convert \n\ Did you use version 2.0 of the gmsh file format?"
        )

    # Close files
    ifile.close()
Example #15
0
def scalar_laplacians(
    mesh: df.Mesh,
    markers: Optional[Dict[str, int]] = None,
    ffun: Optional[MeshFunction] = None,
    use_krylov_solver: bool = False,
    krylov_solver_atol: Optional[float] = None,
    krylov_solver_rtol: Optional[float] = None,
    krylov_solver_max_its: Optional[int] = None,
    verbose: bool = False,
    strict: bool = False,
) -> Dict[str, df.Function]:
    """
    Calculate the laplacians

    Arguments
    ---------
    mesh : dolfin.Mesh
       A dolfin mesh
    markers : dict (optional)
        A dictionary with the markers for the
        different bondaries defined in the facet function
        or within the mesh itself.
        The follwing markers must be provided:
        'base', 'lv', 'epi, 'rv' (optional).
        If the markers are not provided the following default
        vales will be used: base = 10, rv = 20, lv = 30, epi = 40.
    fiber_space : str
        A string on the form {familiy}_{degree} which
        determines for what space the fibers should be calculated for.
    use_krylov_solver: bool
        If True use Krylov solver, by default False
    krylov_solver_atol: float (optional)
        If a Krylov solver is used, this option specifies a
        convergence criterion in terms of the absolute
        residual. Default: 1e-15.
    krylov_solver_rtol: float (optional)
        If a Krylov solver is used, this option specifies a
        convergence criterion in terms of the relative
        residual. Default: 1e-10.
    krylov_solver_max_its: int (optional)
        If a Krylov solver is used, this option specifies the
        maximum number of iterations to perform. Default: 10000.
    verbose: bool
        If true, print more info, by default False
    strict: bool
        If true raise RuntimeError if solutions does not sum to 1.0
    """

    if not isinstance(mesh, df.Mesh):
        raise TypeError("Expected a dolfin.Mesh as the mesh argument.")

    # Init connectivities
    mesh.init(2)
    if ffun is None:
        ffun = df.MeshFunction("size_t", mesh, 2, mesh.domains())

    # Boundary markers, solutions and cases
    cases, boundaries, markers = find_cases_and_boundaries(ffun, markers)
    markers_str = "\n".join(
        ["{}: {}".format(k, v) for k, v in markers.items()])
    df.info(("Compute scalar laplacian solutions with the markers: \n"
             "{}").format(markers_str, ), )

    check_boundaries_are_marked(
        mesh=mesh,
        ffun=ffun,
        markers=markers,
        boundaries=boundaries,
    )

    # Compte the apex to base solutons
    num_vertices = mesh.num_vertices()
    num_cells = mesh.num_cells()
    if mesh.mpi_comm().size > 1:
        num_vertices = mesh.mpi_comm().allreduce(num_vertices)
        num_cells = mesh.mpi_comm().allreduce(num_cells)
    df.info("  Num vertices: {0}".format(num_vertices))
    df.info("  Num cells: {0}".format(num_cells))

    if "mv" in cases and "av" in cases:
        # Use Doste approach
        pass

    # Else use the Bayer approach
    return bayer(
        cases=cases,
        mesh=mesh,
        markers=markers,
        ffun=ffun,
        verbose=verbose,
        use_krylov_solver=use_krylov_solver,
        strict=strict,
        krylov_solver_atol=krylov_solver_atol,
        krylov_solver_rtol=krylov_solver_rtol,
        krylov_solver_max_its=krylov_solver_max_its,
    )