Exemplo n.º 1
0
def mark_facets(mesh: df.Mesh, ffun: df.MeshFunction):
    """
    Mark mesh according to facet function
    """
    for facet in df.facets(mesh):

        if ffun[facet] == 2**64 - 1:
            ffun[facet] = 0

        mesh.domains().set_marker((facet.index(), ffun[facet]), 2)
Exemplo n.º 2
0
Arquivo: test.py Projeto: alogg/dolfin
    def test_convert_diffpack(self):
        from dolfin import Mesh, MPI, MeshFunction
        if MPI.num_processes() != 1:
            return
        fname = os.path.join("data", "diffpack_tet")
        dfname = fname+".xml"
        
        # Read triangle file and convert to a dolfin xml mesh file
        meshconvert.diffpack2xml(fname+".grid", dfname)

        # Read in dolfin mesh and check number of cells and vertices
        mesh = Mesh(dfname)
        self.assertEqual(mesh.num_vertices(), 27)
        self.assertEqual(mesh.num_cells(), 48)
        self.assertEqual(mesh.domains().markers(3).size(), 48)
        self.assertEqual(mesh.domains().markers(2).size(), 16)

        mf_basename = dfname.replace(".xml", "_marker_%d.xml")
        for marker, num in [(3, 9), (6, 9), (7, 3), (8, 1)]:

            mf_name = mf_basename % marker
            mf = MeshFunction("uint", mesh, mf_name)
            self.assertEqual(sum(mf.array()==marker), num)
            os.unlink(mf_name)
        
        # Clean up
        os.unlink(dfname)
Exemplo n.º 3
0
    def test_convert_diffpack_2d(self):

        from dolfin import Mesh, MPI, MeshFunction

        fname = os.path.join(os.path.dirname(__file__), "data", "diffpack_tri")
        dfname = fname + ".xml"

        # Read triangle file and convert to a dolfin xml mesh file
        meshconvert.diffpack2xml(fname + ".grid", dfname)

        # Read in dolfin mesh and check number of cells and vertices
        mesh = Mesh(dfname)

        self.assertEqual(mesh.num_vertices(), 41)
        self.assertEqual(mesh.num_cells(), 64)
        self.assertEqual(len(mesh.domains().markers(2)), 64)

        mf_basename = dfname.replace(".xml", "_marker_%d.xml")
        for marker, num in [(1, 10), (2, 5), (3, 5)]:

            mf_name = mf_basename % marker
            mf = MeshFunction("size_t", mesh, mf_name)
            self.assertEqual(sum(mf.array() == marker), num)
            os.unlink(mf_name)

        # Clean up
        os.unlink(dfname)
Exemplo n.º 4
0
    def test_convert_diffpack(self):
        from dolfin import Mesh, MPI, MeshFunction, mpi_comm_world
        if MPI.size(mpi_comm_world()) != 1:
            return
        fname = os.path.join("data", "diffpack_tet")
        dfname = fname + ".xml"

        # Read triangle file and convert to a dolfin xml mesh file
        meshconvert.diffpack2xml(fname + ".grid", dfname)

        # Read in dolfin mesh and check number of cells and vertices
        mesh = Mesh(dfname)
        self.assertEqual(mesh.num_vertices(), 27)
        self.assertEqual(mesh.num_cells(), 48)
        self.assertEqual(len(mesh.domains().markers(3)), 48)
        self.assertEqual(len(mesh.domains().markers(2)), 16)

        mf_basename = dfname.replace(".xml", "_marker_%d.xml")
        for marker, num in [(3, 9), (6, 9), (7, 3), (8, 1)]:

            mf_name = mf_basename % marker
            mf = MeshFunction("size_t", mesh, mf_name)
            self.assertEqual(sum(mf.array() == marker), num)
            os.unlink(mf_name)

        # Clean up
        os.unlink(dfname)
Exemplo n.º 5
0
    def test_convert_diffpack_2d(self):

        from dolfin import Mesh, MPI, MeshFunction, mpi_comm_world

        fname = os.path.join(os.path.dirname(__file__), "data", "diffpack_tri")
        dfname = fname+".xml"

        # Read triangle file and convert to a dolfin xml mesh file
        meshconvert.diffpack2xml(fname+".grid", dfname)

        # Read in dolfin mesh and check number of cells and vertices
        mesh = Mesh(dfname)

        self.assertEqual(mesh.num_vertices(), 41)
        self.assertEqual(mesh.num_cells(), 64)
        self.assertEqual(len(mesh.domains().markers(2)), 64)

        mf_basename = dfname.replace(".xml", "_marker_%d.xml")
        for marker, num in [(1,10), (2,5), (3,5)]:

            mf_name = mf_basename % marker
            mf = MeshFunction("size_t", mesh, mf_name)
            self.assertEqual(sum(mf.array()==marker), num)
            os.unlink(mf_name)

        # Clean up
        os.unlink(dfname)
Exemplo n.º 6
0
def import_from_gmsh(fname):
    "Convert from gmsh to dolfin"

    # read with meshio
    msh = meshio.read(fname)

    # create a DOLFIN mesh (assuming 2d)
    gdim, tdim = 2, 2
    mm = Mesh()
    editor = MeshEditor()
    editor.open(mm, "triangle", gdim, tdim)

    npt = msh.points.shape[0]
    nc = msh.get_cells_type("triangle").shape[0]

    editor.init_vertices_global(npt, npt)
    editor.init_cells_global(nc, nc)

    for i, p in enumerate(msh.points):
        editor.add_vertex(i, p[:2])

    for i, c in enumerate(msh.get_cells_type("triangle")):
        editor.add_cell(i, c)

    editor.close()

    # domains
    md = mm.domains()
    md.init(tdim)
    markers = {}

    if 'gmsh:physical' not in msh.cell_data_dict:
        # no markers at all
        return mm, markers

    phy = msh.cell_data_dict['gmsh:physical']
    if 'triangle' in phy:
        for eid, val in enumerate(phy['triangle']):
            md.set_marker((eid, val), 2)

    if 'line' in phy:
        mm.init(0, 1)
        p2e = mm.topology()(0, 1)

        for l, k in zip(msh.get_cells_type("line"), phy['line']):
            e = set(p2e(l[0])).intersection(p2e(l[1])).pop()
            md.set_marker((e, k), 1)

    if 'vertex' in phy:
        for eid, val in zip(msh.get_cells_type("vertex"), phy['vertex']):
            md.set_marker((eid[0], val), 0)

    # names
    markers = tuple(
        {n: v.item()
         for n, (v, d) in msh.field_data.items() if d == dim}
        for dim in range(tdim + 1))

    return mm, markers
Exemplo n.º 7
0
def mark_biv_mesh(
    mesh: df.Mesh,
    ffun: Optional[df.MeshFunction] = None,
    markers: Optional[Dict[str, int]] = None,
    tol: float = 0.01,
    values: Dict[str, int] = {
        "lv": 0,
        "septum": 1,
        "rv": 2
    },
) -> df.MeshFunction:

    from .ldrb import scalar_laplacians

    scalars = scalar_laplacians(mesh=mesh, ffun=ffun, markers=markers)

    for cell in df.cells(mesh):

        lv = scalars["lv"](cell.midpoint())
        rv = scalars["rv"](cell.midpoint())
        epi = scalars["epi"](cell.midpoint())

        print(cell.index(), "lv = {}, rv = {}".format(lv, rv))

        if (lv > tol or epi > 1 - tol) and rv < tol:
            print("LV")
            value = values["lv"]
            if lv < tol and rv > lv:
                value = values["rv"]
        elif (rv > tol or epi > 1 - tol) and lv < tol:
            print("RV")
            value = values["rv"]
        else:
            print("SEPTUM")
            value = values["septum"]

        mesh.domains().set_marker((cell.index(), value), 3)

    sfun = df.MeshFunction("size_t", mesh, 3, mesh.domains())
    return sfun
Exemplo n.º 8
0
def write_fenics_file(dim, ofilename):
    ofile  = File(ofilename + '.xml')
    mesh = Mesh()
    editor = MeshEditor()
    editor.open(mesh, dim, dim)
    editor.init_vertices(nodes.shape[1])
    editor.init_cells(len(cell_map))    
    for i in range(nodes.shape[1]):
        if dim == 2:
            editor.add_vertex(i, nodes[0, i], nodes[1, i])
        else:
            editor.add_vertex(i, nodes[0, i], nodes[1, i], nodes[2, i])
            
    for i in range(1, len(cell_map)+1):
        if dim == 2:
            editor.add_cell(i-1, cell_map[i][0]-1, cell_map[i][1]-1, cell_map[i][2]-1)
        else:
            editor.add_cell(i-1, cell_map[i][0]-1, cell_map[i][1]-1, cell_map[i][2]-1, cell_map[i][3]-1)
    
    mesh.order()
    mvc = mesh.domains().markers(dim-1)
    for zone, cells in boundary_cells.iteritems():
        for cell, nds in cells.iteritems():
            dolfin_cell = Cell(mesh, cell-1)
            nodes_of_cell = dolfin_cell.entities(0)
            #print cell
            #print nodes_of_cell
            nodes_of_face = nds - 1
            #print nodes_of_face
            for jj, ff in enumerate(facets(dolfin_cell)):
                facet_nodes = ff.entities(0)
                #print facet_nodes
                if all(map(lambda x: x in nodes_of_face, facet_nodes)):
                    local_index = jj
                    break
            mvc.set_value(cell-1, local_index, zone)
        
    ofile << mesh        
    from dolfin import plot
    plot(mesh, interactive=True)
    print 'Finished writing FEniCS mesh\n'
Exemplo n.º 9
0
    def _copy(self, deepcopy):
        new_mesh = Mesh(self.mesh)

        new_markerfunctions = {}
        for dim, fun in ((0, "vfun"), (1, "ffun"), (2, "cfun")):
            f_old = get_attribute(self, fun)
            if f_old is None:
                continue
            f = MeshFunction("size_t", new_mesh, dim, new_mesh.domains())
            f.set_values(f_old.array())
            new_markerfunctions[fun] = f
        markerfunctions = MarkerFunctions2D(**new_markerfunctions)

        new_microstructure = {}
        for field in ("f0", "s0", "n0"):
            v0_old = get_attribute(self, field)
            if v0_old is None:
                continue
            v0 = map_vector_field(v0_old, new_mesh)
            new_microstructure[field] = v0
        microstructure = Microstructure(**new_microstructure)

        new_crl_basis = {}
        for basis in ("c0", "r0", "l0"):
            v0_old = get_attribute(self, basis)
            if v0_old is None:
                continue
            v0 = map_vector_field(v0_old, new_mesh)
            new_crl_basis[basis] = v0
        crl_basis = CRLBasis(**new_crl_basis)

        return dict(
            mesh=new_mesh,
            markers=self.markers,
            markerfunctions=markerfunctions,
            microstructure=microstructure,
            crl_basis=crl_basis,
        )
Exemplo n.º 10
0
Arquivo: test.py Projeto: alogg/dolfin
    def test_convert_triangle(self): # Disabled because it fails, see FIXME below
        # test no. 1
        from dolfin import Mesh, MPI
        if MPI.num_processes() != 1:
            return
        fname = os.path.join("data", "triangle")
        dfname = fname+".xml"
        
        # Read triangle file and convert to a dolfin xml mesh file
        meshconvert.triangle2xml(fname, dfname)

        # Read in dolfin mesh and check number of cells and vertices
        mesh = Mesh(dfname)
        self.assertEqual(mesh.num_vertices(), 96)
        self.assertEqual(mesh.num_cells(), 159)

        # Clean up
        os.unlink(dfname)


        # test no. 2
        from dolfin import MPI, Mesh, MeshFunction, \
                           edges, Edge, faces, Face, \
                           SubsetIterator, facets, CellFunction
        if MPI.num_processes() != 1:
            return
        fname = os.path.join("data", "test_Triangle_3")
        dfname = fname+".xml"
        dfname0 = fname+".attr0.xml"

        # Read triangle file and convert to a dolfin xml mesh file
        meshconvert.triangle2xml(fname, dfname)

        # Read in dolfin mesh and check number of cells and vertices
        mesh = Mesh(dfname)
        mesh.init()
        mfun = MeshFunction('double', mesh, dfname0)
        self.assertEqual(mesh.num_vertices(), 58)
        self.assertEqual(mesh.num_cells(), 58)

        # Create a size_t CellFunction and assign the values based on the
        # converted Meshfunction
        cf = CellFunction("size_t", mesh)
        cf.array()[mfun.array()==10.0] = 0
        cf.array()[mfun.array()==-10.0] = 1

        # Meassure total area of cells with 1 and 2 marker
        add = lambda x, y : x+y
        area0 = reduce(add, (Face(mesh, cell.index()).area() \
                             for cell in SubsetIterator(cf, 0)), 0.0)
        area1 = reduce(add, (Face(mesh, cell.index()).area() \
                             for cell in SubsetIterator(cf, 1)), 0.0)
        total_area = reduce(add, (face.area() for face in faces(mesh)), 0.0)

        # Check that all cells in the two domains are either above or below y=0
        self.assertTrue(all(cell.midpoint().y()<0 for cell in SubsetIterator(cf, 0)))
        self.assertTrue(all(cell.midpoint().y()>0 for cell in SubsetIterator(cf, 1)))
        
        # Check that the areas add up
        self.assertAlmostEqual(area0+area1, total_area)
        
        # Measure the edge length of the two edge domains
        edge_markers = mesh.domains().facet_domains()
        self.assertTrue(edge_markers is not None)
        length0 = reduce(add, (Edge(mesh, e.index()).length() \
                            for e in SubsetIterator(edge_markers, 0)), 0.0)
        length1 = reduce(add, (Edge(mesh, e.index()).length() \
                            for e in SubsetIterator(edge_markers, 1)), 0.0)
        
        # Total length of all edges and total length of boundary edges
        total_length = reduce(add, (e.length() for e in edges(mesh)), 0.0)
        boundary_length = reduce(add, (Edge(mesh, f.index()).length() \
                          for f in facets(mesh) if f.exterior()), 0.0)
        
        # Check that the edges add up
        self.assertAlmostEqual(length0+length1, total_length)
        self.assertAlmostEqual(length1, boundary_length)

        # Clean up
        os.unlink(dfname)
        os.unlink(dfname0)
Exemplo n.º 11
0
def load_geometry_from_h5(
    h5name,
    h5group="",
    fendo=None,
    fepi=None,
    include_sheets=True,
    comm=mpi_comm_world,
):
    """Load geometry and other mesh data from
    a h5file to an object.
    If the file contains muliple fiber fields
    you can spefify the angles, and if the file
    contais sheets and cross-sheets this can also
    be included

    :param str h5name: Name of the h5file
    :param str h5group: The group within the file
    :param int fendo: Helix fiber angle (endocardium) (if available)
    :param int fepi: Helix fiber angle (epicardium) (if available)
    :param bool include_sheets: Include sheets and cross-sheets
    :returns: An object with geometry data
    :rtype: object

    """
    # Set default groups
    ggroup = "{}/geometry".format(h5group)
    mgroup = "{}/mesh".format(ggroup)
    lgroup = "{}/local basis functions".format(h5group)
    fgroup = "{}/microstructure".format(h5group)

    if not os.path.isfile(h5name):
        raise IOError("File {} does not exist".format(h5name))

    # Check that the given file contains
    # the geometry in the given h5group
    if not check_h5group(h5name, mgroup, delete=False, comm=comm):
        msg = ("Error!\nGroup: '{}' does not exist in file:"
               "\n{}").format(mgroup, h5name)

        with h5py.File(h5name) as h:
            keys = h.keys()
        msg += "\nPossible values for the h5group are {}".format(keys)
        raise IOError(msg)

    # Dummy class to store attributes in
    class Geometry(object):
        pass

    geo = Geometry()

    with HDF5File(comm, h5name, "r") as h5file:

        # Load mesh
        mesh = Mesh(comm)
        h5file.read(mesh, mgroup, False)
        geo.mesh = mesh

        # Get mesh functions
        meshfunctions = ["vfun", "efun", "ffun", "cfun"]\
            if mesh.topology().dim() == 3 else ["vfun", "ffun", "cfun"]

        for dim, attr in enumerate(meshfunctions):
            dgroup = "{}/mesh/meshfunction_{}".format(ggroup, dim)
            mf = MeshFunction("size_t", mesh, dim, mesh.domains())

            if h5file.has_dataset(dgroup):
                h5file.read(mf, dgroup)
                setattr(geo, attr, mf)

        load_local_basis(h5file, lgroup, mesh, geo)
        load_microstructure(h5file, fgroup, mesh, geo, include_sheets)

        # Load the boundary markers
        markers = load_markers(h5file, mesh, ggroup, dgroup)
        geo.markers = markers

        origmeshgroup = "{}/original_geometry".format(h5group)
        if h5file.has_dataset(origmeshgroup):
            original_mesh = Mesh(comm)
            h5file.read(original_mesh, origmeshgroup, True)
            setattr(geo, "original_geometry", original_mesh)

    for attr in meshfunctions:
        if not hasattr(geo, attr):
            setattr(geo, attr, None)

    for attr in (["f0", "s0", "n0", "r0", "c0", "l0"]):
        if not hasattr(geo, attr):
            setattr(geo, attr, None)

    return geo
Exemplo n.º 12
0
    def test_convert_triangle(
            self):  # Disabled because it fails, see FIXME below

        # test no. 1
        from dolfin import Mesh, MPI

        fname = os.path.join(os.path.dirname(__file__), "data", "triangle")
        dfname = fname + ".xml"

        # Read triangle file and convert to a dolfin xml mesh file
        meshconvert.triangle2xml(fname, dfname)

        # Read in dolfin mesh and check number of cells and vertices
        mesh = Mesh(dfname)
        self.assertEqual(mesh.num_vertices(), 96)
        self.assertEqual(mesh.num_cells(), 159)

        # Clean up
        os.unlink(dfname)

        # test no. 2
        from dolfin import MPI, Mesh, MeshFunction, \
                           edges, Edge, faces, Face, \
                           SubsetIterator, facets

        fname = os.path.join(os.path.dirname(__file__), "data",
                             "test_Triangle_3")
        dfname = fname + ".xml"
        dfname0 = fname + ".attr0.xml"

        # Read triangle file and convert to a dolfin xml mesh file
        meshconvert.triangle2xml(fname, dfname)

        # Read in dolfin mesh and check number of cells and vertices
        mesh = Mesh(dfname)
        mesh.init()
        mfun = MeshFunction('double', mesh, dfname0)
        self.assertEqual(mesh.num_vertices(), 58)
        self.assertEqual(mesh.num_cells(), 58)

        # Create a size_t MeshFunction and assign the values based on the
        # converted Meshfunction
        cf = MeshFunction("size_t", mesh, mesh.topology().dim())
        cf.array()[mfun.array() == 10.0] = 0
        cf.array()[mfun.array() == -10.0] = 1

        # Meassure total area of cells with 1 and 2 marker
        add = lambda x, y: x + y
        area0 = reduce(add, (Face(mesh, cell.index()).area() \
                             for cell in SubsetIterator(cf, 0)), 0.0)
        area1 = reduce(add, (Face(mesh, cell.index()).area() \
                             for cell in SubsetIterator(cf, 1)), 0.0)
        total_area = reduce(add, (face.area() for face in faces(mesh)), 0.0)

        # Check that all cells in the two domains are either above or below y=0
        self.assertTrue(
            all(cell.midpoint().y() < 0 for cell in SubsetIterator(cf, 0)))
        self.assertTrue(
            all(cell.midpoint().y() > 0 for cell in SubsetIterator(cf, 1)))

        # Check that the areas add up
        self.assertAlmostEqual(area0 + area1, total_area)

        # Measure the edge length of the two edge domains
        #edge_markers = mesh.domains().facet_domains()
        edge_markers = mesh.domains().markers(mesh.topology().dim() - 1)
        self.assertTrue(edge_markers is not None)
        #length0 = reduce(add, (Edge(mesh, e.index()).length() \
        #                    for e in SubsetIterator(edge_markers, 0)), 0.0)
        length0, length1 = 0.0, 0.0
        for item in list(edge_markers.items()):
            if item[1] == 0:
                e = Edge(mesh, int(item[0]))
                length0 += Edge(mesh, int(item[0])).length()
            elif item[1] == 1:
                length1 += Edge(mesh, int(item[0])).length()

        # Total length of all edges and total length of boundary edges
        total_length = reduce(add, (e.length() for e in edges(mesh)), 0.0)
        boundary_length = reduce(add, (Edge(mesh, f.index()).length() \
                          for f in facets(mesh) if f.exterior()), 0.0)

        # Check that the edges add up
        self.assertAlmostEqual(length0 + length1, total_length)
        self.assertAlmostEqual(length1, boundary_length)

        # Clean up
        os.unlink(dfname)
        os.unlink(dfname0)
Exemplo n.º 13
0
def scalar_laplacians(
    mesh: df.Mesh,
    markers: Optional[Dict[str, int]] = None,
    ffun: Optional[MeshFunction] = None,
    use_krylov_solver: bool = False,
    krylov_solver_atol: Optional[float] = None,
    krylov_solver_rtol: Optional[float] = None,
    krylov_solver_max_its: Optional[int] = None,
    verbose: bool = False,
    strict: bool = False,
) -> Dict[str, df.Function]:
    """
    Calculate the laplacians

    Arguments
    ---------
    mesh : dolfin.Mesh
       A dolfin mesh
    markers : dict (optional)
        A dictionary with the markers for the
        different bondaries defined in the facet function
        or within the mesh itself.
        The follwing markers must be provided:
        'base', 'lv', 'epi, 'rv' (optional).
        If the markers are not provided the following default
        vales will be used: base = 10, rv = 20, lv = 30, epi = 40.
    fiber_space : str
        A string on the form {familiy}_{degree} which
        determines for what space the fibers should be calculated for.
    use_krylov_solver: bool
        If True use Krylov solver, by default False
    krylov_solver_atol: float (optional)
        If a Krylov solver is used, this option specifies a
        convergence criterion in terms of the absolute
        residual. Default: 1e-15.
    krylov_solver_rtol: float (optional)
        If a Krylov solver is used, this option specifies a
        convergence criterion in terms of the relative
        residual. Default: 1e-10.
    krylov_solver_max_its: int (optional)
        If a Krylov solver is used, this option specifies the
        maximum number of iterations to perform. Default: 10000.
    verbose: bool
        If true, print more info, by default False
    strict: bool
        If true raise RuntimeError if solutions does not sum to 1.0
    """

    if not isinstance(mesh, df.Mesh):
        raise TypeError("Expected a dolfin.Mesh as the mesh argument.")

    # Init connectivities
    mesh.init(2)
    if ffun is None:
        ffun = df.MeshFunction("size_t", mesh, 2, mesh.domains())

    # Boundary markers, solutions and cases
    cases, boundaries, markers = find_cases_and_boundaries(ffun, markers)
    markers_str = "\n".join(
        ["{}: {}".format(k, v) for k, v in markers.items()])
    df.info(("Compute scalar laplacian solutions with the markers: \n"
             "{}").format(markers_str, ), )

    check_boundaries_are_marked(
        mesh=mesh,
        ffun=ffun,
        markers=markers,
        boundaries=boundaries,
    )

    # Compte the apex to base solutons
    num_vertices = mesh.num_vertices()
    num_cells = mesh.num_cells()
    if mesh.mpi_comm().size > 1:
        num_vertices = mesh.mpi_comm().allreduce(num_vertices)
        num_cells = mesh.mpi_comm().allreduce(num_cells)
    df.info("  Num vertices: {0}".format(num_vertices))
    df.info("  Num cells: {0}".format(num_cells))

    if "mv" in cases and "av" in cases:
        # Use Doste approach
        pass

    # Else use the Bayer approach
    return bayer(
        cases=cases,
        mesh=mesh,
        markers=markers,
        ffun=ffun,
        verbose=verbose,
        use_krylov_solver=use_krylov_solver,
        strict=strict,
        krylov_solver_atol=krylov_solver_atol,
        krylov_solver_rtol=krylov_solver_rtol,
        krylov_solver_max_its=krylov_solver_max_its,
    )
Exemplo n.º 14
0
def dolfin_ldrb(
    mesh: df.Mesh,
    fiber_space: str = "CG_1",
    ffun: Optional[df.MeshFunction] = None,
    markers: Optional[Dict[str, int]] = None,
    log_level: int = logging.INFO,
    use_krylov_solver: bool = False,
    krylov_solver_atol: Optional[float] = None,
    krylov_solver_rtol: Optional[float] = None,
    krylov_solver_max_its: Optional[int] = None,
    strict: bool = False,
    save_markers: bool = False,
    alpha_endo_lv: float = 40,
    alpha_epi_lv: float = -50,
    alpha_endo_rv: Optional[float] = None,
    alpha_epi_rv: Optional[float] = None,
    alpha_endo_sept: Optional[float] = None,
    alpha_epi_sept: Optional[float] = None,
    beta_endo_lv: float = -65,
    beta_epi_lv: float = 25,
    beta_endo_rv: Optional[float] = None,
    beta_epi_rv: Optional[float] = None,
    beta_endo_sept: Optional[float] = None,
    beta_epi_sept: Optional[float] = None,
):
    r"""
    Create fiber, cross fibers and sheet directions

    Arguments
    ---------
    mesh : dolfin.Mesh
        The mesh
    fiber_space : str
        A string on the form {familiy}_{degree} which
        determines for what space the fibers should be calculated for.
        If not provdied, then a first order Lagrange space will be used,
        i.e Lagrange_1.
    ffun : dolfin.MeshFunctionSizet (optional)
        A facet function containing markers for the boundaries.
        If not provided, the markers stored within the mesh will
        be used.
    markers : dict (optional)
        A dictionary with the markers for the
        different bondaries defined in the facet function
        or within the mesh itself.
        The follwing markers must be provided:
        'base', 'lv', 'epi, 'rv' (optional).
        If the markers are not provided the following default
        vales will be used: base = 10, rv = 20, lv = 30, epi = 40
    log_level : int
        How much to print. DEBUG=10, INFO=20, WARNING=30.
        Default: INFO
    use_krylov_solver: bool
        If True use Krylov solver, by default False
    krylov_solver_atol: float (optional)
        If a Krylov solver is used, this option specifies a
        convergence criterion in terms of the absolute
        residual. Default: 1e-15.
    krylov_solver_rtol: float (optional)
        If a Krylov solver is used, this option specifies a
        convergence criterion in terms of the relative
        residual. Default: 1e-10.
    krylov_solver_max_its: int (optional)
        If a Krylov solver is used, this option specifies the
        maximum number of iterations to perform. Default: 10000.
    strict: bool
        If true raise RuntimeError if solutions does not sum to 1.0
    save_markers: bool
        If true save markings of the geometry. This is nice if you
        want to see that the LV, RV and Septum are marked correctly.
    angles : kwargs
        Keyword arguments with the fiber and sheet angles.
        It is possible to set different angles on the LV,
        RV and septum, however it either the RV or septum
        angles are not provided, then the angles on the LV
        will be used. The default values are taken from the
        original paper, namely

        .. math::

            \alpha_{\text{endo}} &= 40 \\
            \alpha_{\text{epi}} &= -50 \\
            \beta_{\text{endo}} &= -65 \\
            \beta_{\text{epi}} &= 25

        The following keyword arguments are possible:

        alpha_endo_lv : scalar
            Fiber angle at the LV endocardium.
        alpha_epi_lv : scalar
            Fiber angle at the LV epicardium.
        beta_endo_lv : scalar
            Sheet angle at the LV endocardium.
        beta_epi_lv : scalar
            Sheet angle at the LV epicardium.
        alpha_endo_rv : scalar
            Fiber angle at the RV endocardium.
        alpha_epi_rv : scalar
            Fiber angle at the RV epicardium.
        beta_endo_rv : scalar
            Sheet angle at the RV endocardium.
        beta_epi_rv : scalar
            Sheet angle at the RV epicardium.
        alpha_endo_sept : scalar
            Fiber angle at the septum endocardium.
        alpha_epi_sept : scalar
            Fiber angle at the septum epicardium.
        beta_endo_sept : scalar
            Sheet angle at the septum endocardium.
        beta_epi_sept : scalar
            Sheet angle at the septum epicardium.


    """
    df.set_log_level(log_level)

    if not isinstance(mesh, df.Mesh):
        raise TypeError("Expected a dolfin.Mesh as the mesh argument.")

    if ffun is None:
        ffun = df.MeshFunction("size_t", mesh, 2, mesh.domains())
    # Solve the Laplace-Dirichlet problem
    verbose = log_level < logging.INFO
    data = laplace(
        mesh=mesh,
        fiber_space=fiber_space,
        markers=markers,
        ffun=ffun,
        use_krylov_solver=use_krylov_solver,
        krylov_solver_atol=krylov_solver_atol,
        krylov_solver_rtol=krylov_solver_rtol,
        krylov_solver_max_its=krylov_solver_max_its,
        verbose=verbose,
        strict=strict,
    )

    dofs = dofs_from_function_space(mesh, fiber_space)
    marker_scalar = np.zeros_like(data["lv_scalar"])
    system = compute_fiber_sheet_system(
        dofs=dofs,
        marker_scalar=marker_scalar,
        alpha_endo_lv=alpha_endo_lv,
        alpha_epi_lv=alpha_epi_lv,
        alpha_endo_rv=alpha_endo_rv,
        alpha_epi_rv=alpha_epi_rv,
        alpha_endo_sept=alpha_endo_sept,
        alpha_epi_sept=alpha_epi_sept,
        beta_endo_lv=beta_endo_lv,
        beta_epi_lv=beta_epi_lv,
        beta_endo_rv=beta_endo_rv,
        beta_epi_rv=beta_epi_rv,
        beta_endo_sept=beta_endo_sept,
        beta_epi_sept=beta_epi_sept,
        **data,
    )  # type:ignore

    if save_markers:
        Vv = utils.space_from_string(fiber_space, mesh, dim=1)
        markers_fun = df.Function(Vv)
        markers_fun.vector().set_local(marker_scalar)
        markers_fun.vector().apply("insert")
        df.File("markers.pvd") << markers_fun

    df.set_log_level(log_level)
    return fiber_system_to_dolfin(system, mesh, fiber_space)
Exemplo n.º 15
0
def dolfin_ldrb(
    mesh: df.Mesh,
    fiber_space: str = "CG_1",
    ffun: Optional[df.MeshFunction] = None,
    markers: Optional[Dict[str, int]] = None,
    log_level: int = logging.INFO,
    use_krylov_solver: bool = False,
    strict: bool = False,
    **angles: Optional[float],
):
    r"""
    Create fiber, cross fibers and sheet directions

    Arguments
    ---------
    mesh : dolfin.Mesh
        The mesh
    fiber_space : str
        A string on the form {familiy}_{degree} which
        determines for what space the fibers should be calculated for.
        If not provdied, then a first order Lagrange space will be used,
        i.e Lagrange_1.
    ffun : dolfin.MeshFunctionSizet (optional)
        A facet function containing markers for the boundaries.
        If not provided, the markers stored within the mesh will
        be used.
    markers : dict (optional)
        A dictionary with the markers for the
        different bondaries defined in the facet function
        or within the mesh itself.
        The follwing markers must be provided:
        'base', 'lv', 'epi, 'rv' (optional).
        If the markers are not provided the following default
        vales will be used: base = 10, rv = 20, lv = 30, epi = 40
    log_level : int
        How much to print. DEBUG=10, INFO=20, WARNING=30.
        Default: INFO
    use_krylov_solver: bool
        If True use Krylov solver, by default False
    strict: bool
        If true raise RuntimeError if solutions does not sum to 1.0
    angles : kwargs
        Keyword arguments with the fiber and sheet angles.
        It is possible to set different angles on the LV,
        RV and septum, however it either the RV or septum
        angles are not provided, then the angles on the LV
        will be used. The default values are taken from the
        original paper, namely

        .. math::

            \alpha_{\text{endo}} &= 40 \\
            \alpha_{\text{epi}} &= -50 \\
            \beta_{\text{endo}} &= -65 \\
            \beta_{\text{epi}} &= 25

        The following keyword arguments are possible:

        alpha_endo_lv : scalar
            Fiber angle at the LV endocardium.
        alpha_epi_lv : scalar
            Fiber angle at the LV epicardium.
        beta_endo_lv : scalar
            Sheet angle at the LV endocardium.
        beta_epi_lv : scalar
            Sheet angle at the LV epicardium.
        alpha_endo_rv : scalar
            Fiber angle at the RV endocardium.
        alpha_epi_rv : scalar
            Fiber angle at the RV epicardium.
        beta_endo_rv : scalar
            Sheet angle at the RV endocardium.
        beta_epi_rv : scalar
            Sheet angle at the RV epicardium.
        alpha_endo_sept : scalar
            Fiber angle at the septum endocardium.
        alpha_epi_sept : scalar
            Fiber angle at the septum epicardium.
        beta_endo_sept : scalar
            Sheet angle at the septum endocardium.
        beta_epi_sept : scalar
            Sheet angle at the septum epicardium.


    """
    df.set_log_level(log_level)

    if not isinstance(mesh, df.Mesh):
        raise TypeError("Expected a dolfin.Mesh as the mesh argument.")

    if ffun is None:
        ffun = df.MeshFunction("size_t", mesh, 2, mesh.domains())
    # Solve the Laplace-Dirichlet problem
    verbose = log_level < logging.INFO
    data = laplace(
        mesh=mesh,
        fiber_space=fiber_space,
        markers=markers,
        ffun=ffun,
        use_krylov_solver=use_krylov_solver,
        verbose=verbose,
        strict=strict,
    )

    dofs = dofs_from_function_space(mesh, fiber_space)

    system = compute_fiber_sheet_system(dofs=dofs, **data,
                                        **angles)  # type:ignore

    df.set_log_level(log_level)
    return fiber_system_to_dolfin(system, mesh, fiber_space)