Esempio n. 1
0
def test_export_xdmf():
    mesh = fenics.UnitSquareMesh(3, 3)
    V = fenics.FunctionSpace(mesh, 'P', 1)
    folder = "Solution"
    exports = {
        "xdmf": {
            "functions": ['solute', 'retention'],
            "labels":  ['a', 'b'],
            "folder": folder
        }
        }
    files = [fenics.XDMFFile(folder + "/" + "a.xdmf"),
             fenics.XDMFFile(folder + "/" + "b.xdmf")]
    assert FESTIM.export_xdmf(
        [fenics.Function(V), fenics.Function(V)],
        exports, files, 20) is None

    exports["xdmf"]["functions"] = ['solute', 'blabla']

    with pytest.raises(KeyError, match=r'blabla'):
        FESTIM.export_xdmf(
            [fenics.Function(V), fenics.Function(V)],
            exports, files, 20)

    exports["xdmf"]["functions"] = ['solute', '13']
    with pytest.raises(KeyError, match=r'13'):
        FESTIM.export_xdmf(
            [fenics.Function(V), fenics.Function(V)],
            exports, files, 20)
    def readCellExpression(self,
                           group_value_dict,
                           value_type="scalar",
                           overlap=lambda x: x[0],
                           *args,
                           **kwargs):
        """
        Reads cell expression and returns it.
        """
        value_type_dictionary = {
            "scalar": ScalarCellExpressionFromXDMF,
            "vector2d": Vector2DCellExpressionFromXDMF,
            "vector3d": Vector3DCellExpressionFromXDMF
        }

        self.readMesh()
        xdmffile = fenics.XDMFFile(self.xdmffilename)
        cf = value_type_dictionary[value_type.lower()](group_value_dict,
                                                       overlap=overlap,
                                                       *args,
                                                       **kwargs)
        cf.init()
        for (key, value) in cf.group_value_dict.items():
            cf.markers[key] = fenics.MeshFunction("size_t", self.mesh,
                                                  self.mesh.topology().dim())
            xdmffile.read(cf.markers[key], key)
            cf.dx[key] = fenics.Measure("dx",
                                        domain=self.mesh,
                                        subdomain_data=cf.markers[key])
        xdmffile.close()
        return cf
 def readMesh(self):
     """
     If mesh instance is None, read mesh instance from file denoted
     by filename property.
     """
     # TODO: implement mesh read in for open file
     if self.mesh is None:
         xdmffile = fenics.XDMFFile(self.xdmffilename)
         self.mesh = fenics.Mesh()
         xdmffile.read(self.mesh)
         xdmffile.close()
Esempio n. 4
0
    def build_mesh(self):
        path_to_msh = 'data/gmsh/{}/{}/mesh'.format(self.case_name,
                                                    self.mesh_refinement_level)
        save_with_meshio(path_to_msh, 2)

        path_to_xdmf = 'data/gmsh/{}/{}/mesh.xdmf'.format(
            self.case_name, self.mesh_refinement_level)
        xdmf_mesh = fe.XDMFFile(path_to_xdmf)
        self.mesh = fe.Mesh()
        xdmf_mesh.read(self.mesh)

        self.length = 500
        self.height = 500
        self.segment = 30

        # domain = mshr.Polygon([fe.Point(0., 0.),
        #                        fe.Point(0., self.height)])
        # resolution = 100 if self.local_refinement_iteration == 0 else 200
        # self.mesh = mshr.generate_mesh(domain, resolution)

        # Reference
        # https://scicomp.stackexchange.com/questions/32647/how-to-use-meshfunction-in-fenics-dolfin
        # https://fenicsproject.org/qa/596/setting-condition-for-mesh-refinement/
        # for i in range(self.local_refinement_iteration):
        #     cell_markers = fe.MeshFunction('bool', self.mesh, self.mesh.topology().dim())
        #     cell_markers.set_all(False)
        #     for cell in fe.cells(self.mesh):
        #         p = cell.midpoint()
        #         if  p[0] > 1./20.*self.length and p[0] < 10.5/20.*self.length and p[1] > 9.5/20.*self.height and p[1] < 13/20*self.height:
        #         # if np.sqrt((p[0] - self.length/2.)**2 + (p[1] - self.height/2.)**2) < self.length/5.:
        #             cell_markers[cell] = True
        #     self.mesh = fe.refine(self.mesh, cell_markers)

        length = self.length
        height = self.height
        segment = self.segment

        class Lower(fe.SubDomain):
            def inside(self, x, on_boundary):
                return on_boundary and fe.near(x[1], 0)

        class Upper(fe.SubDomain):
            def inside(self, x, on_boundary):
                return on_boundary and fe.near(x[1], height)

        class Segment(fe.SubDomain):
            def inside(self, x, on_boundary):
                return fe.near(x[1], height / 2.) and fe.near(
                    x[0], length - segment)
                # return  fe.near(x[1], height/2.) and x[0] >= length - segment

        self.lower = Lower()
        self.upper = Upper()
        self.segment = Segment()
Esempio n. 5
0
def test_define_xdmf_files():
    folder = "Solution"
    expected = [fenics.XDMFFile(folder + "/" + "a.xdmf"),
                fenics.XDMFFile(folder + "/" + "b.xdmf")]
    exports = {
        "xdmf": {
            "functions": ['solute', '1'],
            "labels":  ['a', 'b'],
            "folder": folder
        }
        }
    assert len(expected) == len(FESTIM.define_xdmf_files(exports))

    # Test an int type for folder
    with pytest.raises(TypeError, match=r'str'):
        folder = 123
        exports = {
            "xdmf": {
                "functions": ['solute', '1'],
                "labels":  ['a', 'b'],
                "folder": folder
            }
            }
        FESTIM.define_xdmf_files(exports)

    # Test an empty string for folder
    with pytest.raises(ValueError, match=r'empty string'):
        folder = ''
        exports = {
            "xdmf": {
                "functions": ['solute', '1'],
                "labels":  ['a', 'b'],
                "folder": folder
            }
            }
        FESTIM.define_xdmf_files(exports)
Esempio n. 6
0
 def readFacetFunction(self, group_value_dict, *args, **kwargs):
     """
     Reads facet function and returns it.
     """
     self.readMesh()
     xdmffile = fenics.XDMFFile(self.xdmffilename)
     ff = FacetFunctionFromXDMF(group_value_dict, *args, **kwargs)
     ff.init()
     for (key, value) in ff.group_value_dict.items():
         ff.markers[key] = fenics.MeshFunction("size_t", self.mesh, self.mesh.topology().dim() - 1)
         xdmffile.read(ff.markers[key], key)
         ff.marked[key] = value.get("marked", 1)
         ff.ds[key] = fenics.Measure("ds", domain=self.mesh, subdomain_data=ff.markers[key])
         ff.bcs[key] = value
     xdmffile.close()
     return ff
Esempio n. 7
0
 def readXDMFFile(self, xdmffilename, group_value_dict):
     xdmffile = fenics.XDMFFile(xdmffilename)
     self.group_value_dict = group_value_dict
     self.mesh = fenics.Mesh()
     xdmffile.read(self.mesh)
     self.markers = {}
     self.marked = {}
     self.ds = {}
     self.bcs = {}
     for (key, value) in self.group_value_dict.iteritems():
         # Fenics interface here: create facet function of type size_t (positive int) for every group
         # TODO: examine whether size_t is appropriate or this class could be generalized
         self.markers[key] = fenics.FacetFunction("size_t", self.mesh)
         xdmffile.read(self.markers[key], key)
         self.marked[key] = value.get("marked", 1)
         self.ds[key] = fenics.Measure("ds", domain=self.mesh, subdomain_data=self.markers[key])
         self.bcs[key] = value
     xdmffile.close()
Esempio n. 8
0
    def __init__(self,
                 mesh: fe.Mesh,
                 constitutive_model: ConstitutiveModelBase,
                 function_space: fe.FunctionSpace,
                 bf: fe.Expression = fe.Expression('0', degree=0),
                 outputs: [Outputs] = DEFAULT_OUTPUTS):
        self._mesh = mesh
        self._constitutive_model = constitutive_model
        self.V = function_space
        self.T = fe.TensorFunctionSpace(mesh, "P", 1)
        self.S = fe.FunctionSpace(mesh, "P", 1)

        self._bf = bf
        self.w = None
        self._outputs = outputs

        self._output_files = {
            output: fe.XDMFFile(f'./{job_name}/{output.name}.xdmf')
            for output in self._outputs
        }
Esempio n. 9
0
    def build_mesh(self):
        # Read mesh from matlab file .mat and convert it to FEniCS mesh
        mesh_matlab = loadmat('data/mat/mesh/PF-1.mat')
        points = mesh_matlab['p']
        cells = mesh_matlab['t']
        points = points.T
        cells = [("triangle", (cells[:-1, :] - 1).T)]
        meshio.write_points_cells(
            'data/xdmf/{}/mesh.xdmf'.format(self.case_name), points, cells)
        xdmf_mesh = fe.XDMFFile('data/xdmf/{}/mesh.xdmf'.format(
            self.case_name))
        self.mesh = fe.Mesh()
        xdmf_mesh.read(self.mesh)

        self.length = 1
        self.height = 1
        length = self.length
        height = self.height

        class Lower(fe.SubDomain):
            def inside(self, x, on_boundary):
                return on_boundary and fe.near(x[1], 0)

        class Upper(fe.SubDomain):
            def inside(self, x, on_boundary):
                return on_boundary and fe.near(x[1], height)

        class Corner(fe.SubDomain):
            def inside(self, x, on_boundary):
                return fe.near(x[0], 0) and fe.near(x[1], 0)

        class Middle(fe.SubDomain):
            def inside(self, x, on_boundary):
                return np.absolute(
                    x[1] - 0.5) < 1e-2 and (x[0] - 0.3) * (x[0] - 0.7) < 1e-7

        self.lower = Lower()
        self.upper = Upper()
        self.corner = Corner()
        self.middle = Middle()
Esempio n. 10
0
        def readXDMFfile(self, xdmffilename, group_value_dict):
            """
            Initialization of CellExpressionXDMF by reading an XDMF file.

            @param: xdmffilename: path to xdmf file
            @param: group_value_dict: {"groupname":function(x)}

            function(x) is a function which is evaluated at the marked positions of the cells
            """
            xdmffile = fenics.XDMFFile(xdmffilename)
            self.group_value_dict = group_value_dict
            self.mesh = fenics.Mesh()
            xdmffile.read(self.mesh)
            self.markers = {}
            self.dx = {}
            for (key, value) in self.group_value_dict.iteritems():
                # Fenics interface here: create cell function of type int for every group
                # TODO: examine whether int is appropriate or this class could be generalized
                self.markers[key] = fenics.CellFunction("size_t", self.mesh)
                xdmffile.read(self.markers[key], key)
                self.dx[key] = fenics.Measure("dx", domain=self.mesh, subdomain_data=self.markers[key])
            xdmffile.close()
Esempio n. 11
0
# will also compute the stress field and save it, along with the displacement field, in
# a ``XDMFFile``.
# The option `flush_ouput` enables to open the result file before the loop is finished,
# the ``function_share_mesh`` option tells that only one mesh is used for all functions
# of a given time step (displacement and stress) while the ``rewrite_function_mesh`` enforces
# that the same mesh is used for all time steps. These two options enables writing the mesh
# information only once instead of :math:`2N_{steps}` times::

# Time-stepping
time = np.linspace(0, T, Nsteps + 1)
u_tip = np.zeros((Nsteps + 1, ))
energies = np.zeros((Nsteps + 1, 4))
E_damp = 0
E_ext = 0
sig = fnc.Function(Vsig, name="sigma")
xdmf_file = fnc.XDMFFile("%s.xdmf" % (archivo))
xdmf_file.parameters["flush_output"] = True
xdmf_file.parameters["functions_share_mesh"] = True
xdmf_file.parameters["rewrite_function_mesh"] = False

# The time loop is now started, the loading is first evaluated at :math:`t=t_{n+1-\alpha_f}`. The
# corresponding system right-hand side is then assembled and the system is solved. The different
# fields are then updated with the newly computed quantities. Finally, some post-processing is
# performed: stresses are computed and written to the result file and the tip displacement and
# the different energies are recorded:


def local_project(v, V, u=None):
    """Element-wise projection using LocalSolver"""
    dv = fnc.TrialFunction(V)
    v_ = fnc.TestFunction(V)
Esempio n. 12
0
def SolveProblem(LoadCase, ConstitutiveModel, BCsType, FinalRelativeStretch, RelativeStepSize, Dimensions, NumberElements, Mesh, V, u, du, v, Ic, J, F, Psi, Plot = False, Paraview = False):
    
    if LoadCase == 'Compression':
                
        # Load case
        [u_0, u_1, InitialState, Direction, Normal, NumberSteps, DeltaStretch] = LoadCaseDefinition(LoadCase, FinalRelativeStretch, RelativeStepSize, Dimensions, BCsType)
        
        
    elif LoadCase == 'Tension':
                
        # Load case
        [u_0, u_1, InitialState, Direction, Normal, NumberSteps, DeltaStretch] = LoadCaseDefinition(LoadCase, FinalRelativeStretch, RelativeStepSize, Dimensions, BCsType)

        
    elif LoadCase == 'SimpleShear':
                
        # Load case
        [u_0, u_1, InitialState, Direction, Normal, NumberSteps, DeltaStretch] = LoadCaseDefinition(LoadCase, FinalRelativeStretch*2, RelativeStepSize*2, Dimensions, BCsType)

    # Boundary conditions
    [BoundaryConditions, ds] = BCsDefinition(Dimensions, Mesh, V, u_0, u_1, LoadCase, BCsType)
    
    # Estimation of the displacement field using Neo-Hookean model (necessary for Ogden)
    u = Estimate(Ic, J, u, v, du, BoundaryConditions, InitialState, u_1)
    
    # Reformulate the problem with the correct constitutive model
    Pi = Psi * fe.dx

    # First directional derivative of the potential energy
    Fpi = fe.derivative(Pi,u,v)

    # Jacobian of Fpi
    Jac = fe.derivative(Fpi,u,du)
    
    # Define option for the compiler (optional)
    ffc_options = {"optimize": True, \
                   "eliminate_zeros": True, \
                   "precompute_basis_const": True, \
                   "precompute_ip_const": True }

    # Define the problem
    Problem = fe.NonlinearVariationalProblem(Fpi, u, BoundaryConditions, Jac, form_compiler_parameters=ffc_options)

    # Define the solver
    Solver = fe.NonlinearVariationalSolver(Problem)

    # Set solver parameters (optional)
    Prm = Solver.parameters
    Prm['nonlinear_solver'] = 'newton'
    Prm['newton_solver']['linear_solver'] = 'cg'             # Conjugate gradient
    Prm['newton_solver']['preconditioner'] = 'icc'           # Incomplete Choleski
    Prm['newton_solver']['krylov_solver']['nonzero_initial_guess'] = True
    
    # Data frame to store values
    cols = ['Stretches','P']
    df = pd.DataFrame(columns=cols, index=range(int(NumberSteps)+1), dtype='float64')
    
    if Paraview == True:
        # Results File
        Output_Path = os.path.join('OptimizationResults', BCsType, ConstitutiveModel)
        ResultsFile = xdmffile = fe.XDMFFile(os.path.join(Output_Path, str(NumberElements) + 'Elements_' + LoadCase + '.xdmf'))
        ResultsFile.parameters["flush_output"] = True
        ResultsFile.parameters["functions_share_mesh"] = True
    
    if Plot == True:
        plt.rc('figure', figsize=[12,7])
        fig = plt.figure()
        ax = fig.add_subplot(1, 1, 1)
    
    # Set the stretch state to initial state
    StretchState = InitialState
    
    # Loop to solve for each step
    for Step in range(int(NumberSteps+1)):

        # Update current state
        u_1.s = StretchState

        # Compute solution and save displacement
        Solver.solve()

        # First Piola Kirchoff (nominal) stress
        P = fe.diff(Psi, F)

        # Nominal stress vectors normal to upper surface
        p = fe.dot(P,Normal)

        # Reaction force on the upper surface
        f = fe.assemble(fe.inner(p,Direction)*ds(2))

        # Mean nominal stress on the upper surface
        Pm = f/fe.assemble(1*ds(2))

        # Save values to table
        df.loc[Step].Stretches = StretchState
        df.loc[Step].P = Pm

        # Plot
        if Plot == True:
            ax.cla()
            ax.plot(df.Stretches, df.P,  color = 'r', linestyle = '--', label = 'P', marker = 'o', markersize = 8, fillstyle='none')
            ax.set_xlabel('Stretch ratio (-)')
            ax.set_ylabel('Stresses (kPa)')
            ax.xaxis.set_major_locator(plt.MultipleLocator(0.02))
            ax.legend(loc='upper left', frameon=True, framealpha=1)
            display(fig)
            clear_output(wait=True)
            
        if Paraview == True:
            # Project the displacement onto the vector function space
            u_project = fe.project(u, V, solver_type='cg')
            u_project.rename('Displacement (mm)', '')
            ResultsFile.write(u_project,Step)
            
            # Compute nominal stress vector
            p_project = fe.project(p, V)
            p_project.rename("Nominal stress vector (kPa)","")
            ResultsFile.write(p_project,Step)



        # Update the stretch state
        StretchState += DeltaStretch

    return df
Esempio n. 13
0
 def __init__(self, file_name: str, append_to_existing: bool,
              field: fenics.Function, field_name: str):
     self.append = append_to_existing
     self.file = fenics.XDMFFile(file_name)
     self.field = field
     self.field_name = field_name
    hessp=SR1(),
    tol=1e-7,
    constraints=volume_constraint,
    bounds=((0, 1.0),) * A.dim(),
    options={"verbose": 3, "gtol": 1e-7, "maxiter": 20},
)

q.assign(0.1)
res = minimize(
    min_f,
    res.x,
    method="trust-constr",
    jac=True,
    hessp=SR1(),
    tol=1e-7,
    constraints=volume_constraint,
    bounds=((0, 1.0),) * A.dim(),
    options={"verbose": 3, "gtol": 1e-7, "maxiter": 100},
)

rho_opt_final = from_numpy(res.x, fenics.Function(A))

c = fenics.plot(rho_opt_final)
plt.colorbar(c)
plt.show()

rho_opt_file = fenics.XDMFFile(
    fenics.MPI.comm_world, "output/control_solution_final.xdmf"
)
rho_opt_file.write(rho_opt_final)
Esempio n. 15
0
def test_xdmf_context_unit__ci__():

    with fenics.XDMFFile("test.xdmf") as solution_file:

        return
Esempio n. 16
0
def test_xdmf_unit__ci__():

    solution_file = fenics.XDMFFile("test.xdmf")
Esempio n. 17
0
def navierStokes(projectId, mesh, faceSets, boundarySets, config):

    log("Navier Stokes Analysis has started")

    # this is the default directory, when user request for download all files in this directory is being compressed and sent to the user
    resultDir = "./Results/"

    if len(config["steps"]) > 1:
        return "more than 1 step is not supported yet"

    # config is a dictionary containing all the user inputs for solver configurations
    t_init = 0.0
    t_final = float(config['steps'][0]["finalTime"])
    t_num = int(config['steps'][0]["iterationNo"])
    dt = ((t_final - t_init) / t_num)
    t = t_init

    #
    #  Viscosity coefficient.
    #
    nu = float(config['materials'][0]["viscosity"])
    rho = float(config['materials'][0]["density"])

    #
    #  Declare Finite Element Spaces
    # do not use triangle directly
    P2 = fn.VectorElement("P", mesh.ufl_cell(), 2)
    P1 = fn.FiniteElement("P", mesh.ufl_cell(), 1)
    TH = fn.MixedElement([P2, P1])
    V = fn.VectorFunctionSpace(mesh, "P", 2)
    Q = fn.FunctionSpace(mesh, "P", 1)
    W = fn.FunctionSpace(mesh, TH)

    #
    #  Declare Finite Element Functions
    #
    (u, p) = fn.TrialFunctions(W)
    (v, q) = fn.TestFunctions(W)
    w = fn.Function(W)
    u0 = fn.Function(V)
    p0 = fn.Function(Q)

    #
    # Macros needed for weak formulation.
    #
    def contract(u, v):
        return fn.inner(fn.nabla_grad(u), fn.nabla_grad(v))

    def b(u, v, w):
        return 0.5 * (fn.inner(fn.dot(u, fn.nabla_grad(v)), w) -
                      fn.inner(fn.dot(u, fn.nabla_grad(w)), v))

    # Define boundaries
    bcs = []
    for BC in config['BCs']:
        if BC["boundaryType"] == "wall":
            for edge in json.loads(BC["edges"]):
                bcs.append(
                    fn.DirichletBC(W.sub(0),
                                   fn.Constant((0.0, 0.0, 0.0)),
                                   boundarySets,
                                   int(edge),
                                   method='topological'))
        if BC["boundaryType"] == "inlet":
            vel = json.loads(BC['value'])
            for edge in json.loads(BC["edges"]):
                bcs.append(
                    fn.DirichletBC(W.sub(0),
                                   fn.Expression(
                                       (str(vel[0]), str(vel[1]), str(vel[2])),
                                       degree=2),
                                   boundarySets,
                                   int(edge),
                                   method='topological'))
        if BC["boundaryType"] == "outlet":
            for edge in json.loads(BC["edges"]):
                bcs.append(
                    fn.DirichletBC(W.sub(1),
                                   fn.Constant(float(BC['value'])),
                                   boundarySets,
                                   int(edge),
                                   method='topological'))

    f = fn.Constant((0.0, 0.0, 0.0))

    #  weak form NSE
    NSE = (1.0/dt)*fn.inner(u, v)*fn.dx + b(u0, u, v)*fn.dx + nu * \
        contract(u, v)*fn.dx - fn.div(v)*p*fn.dx + q*fn.div(u)*fn.dx
    LNSE = fn.inner(f, v) * fn.dx + (1. / dt) * fn.inner(u0, v) * fn.dx

    velocity_file = fn.XDMFFile(resultDir + "/vel.xdmf")
    pressure_file = fn.XDMFFile(resultDir + "/pressure.xdmf")
    velocity_file.parameters["flush_output"] = True
    velocity_file.parameters["functions_share_mesh"] = True
    pressure_file.parameters["flush_output"] = True
    pressure_file.parameters["functions_share_mesh"] = True
    #
    # code for projecting a boundary condition into a file for visualization
    #
    # for bc in bcs:
    #     bc.apply(w.vector())
    # fn.File("para_plotting/bc.pvd") << w.sub(0)

    for jj in range(0, t_num):
        t = t + dt
        # print('t = ' + str(t))
        A, b = fn.assemble_system(NSE, LNSE, bcs)
        fn.solve(A, w.vector(), b)
        # fn.solve(NSE==LNSE,w,bcs)
        fn.assign(u0, w.sub(0))
        fn.assign(p0, w.sub(1))
        # Save Solutions to Paraview File
        if (jj % 20 == 0):
            velocity_file.write(u0, t)
            pressure_file.write(p0, t)
            sendFile(projectId, resultDir + "vel.xdmf")
            sendFile(projectId, resultDir + "vel.h5")
            sendFile(projectId, resultDir + "pressure.xdmf")
            sendFile(projectId, resultDir + "pressure.h5")
            statusUpdate(projectId, "STARTED", {"progress": jj / t_num * 100})
# Define variational problem for step 3 (Velocity correction step)
a3 = fs.dot(u, v) * fs.dx
L3 = fs.dot(u_, v) * fs.dx - k * fs.dot(fs.nabla_grad(p_ - p_n), v) * fs.dx

# Assemble matrices
A1 = fs.assemble(a1)
A2 = fs.assemble(a2)
A3 = fs.assemble(a3)

# Apply boundary conditions to matrices
[bc.apply(A1) for bc in bcu]
[bc.apply(A2) for bc in bcp]

# Create XDMF files for visualization output
xdmffile_u = fs.XDMFFile('navier_stokes_cylinder/velocity.xdmf')
xdmffile_p = fs.XDMFFile('navier_stokes_cylinder/pressure.xdmf')

# Create time series (for use in reaction_system.py)
timeseries_u = fs.TimeSeries('navier_stokes_cylinder/velocity_series')
timeseries_p = fs.TimeSeries('navier_stokes_cylinder/pressure_series')

# Save mesh to file (for use in reaction_system.py)
fs.File('navier_stokes_cylinder/cylinder.xml.gz') << mesh

# Create progress bar
progress = fs.Progress('Time-stepping', num_steps)
#fs.set_log_level(0)

# Time-stepping
t = 0
Esempio n. 19
0
import fenics as fn
import numpy as np
import sympy as sym
import scipy as sc
from scipy import constants
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm  #Colormap
import meshio as mio
#import mshr as msr

# %%

# %%
mesh = fn.Mesh()
with fn.XDMFFile("meshing/Dmesh.xdmf") as infile:
    infile.read(mesh)
mvc = fn.MeshValueCollection("size_t", mesh, 2)
with fn.XDMFFile("meshing/Dmf.xdmf") as infile:
    infile.read(mvc, "name_to_read")
mf = fn.cpp.mesh.MeshFunctionSizet(mesh, mvc)

# %%
M = 2  #species

Poly = fn.FiniteElement('Lagrange', mesh.ufl_cell(), 2)
Multi = fn.FiniteElement('Real', mesh.ufl_cell(), 0)
ElemP = [Poly] * (M + 1)
ElemR = [Multi] * (M)
Elem = [ElemP + ElemR][0]
Mixed = fn.MixedElement(Elem)
Esempio n. 20
0
 def create_custom_xdmf_files(self):
     self.file_results_custom = fe.XDMFFile(
         'data/xdmf/{}/u_refine_{}_mfem_{}.xdmf'.format(
             self.case_name, self.local_refinement_iteration,
             self.map_flag))
     self.file_results_custom.parameters["functions_share_mesh"] = True
def import_mesh(arg):
	"""Imports a mesh file for use with CASHOCS / FEniCS.



	This function imports a mesh file that was generated by GMSH and converted to
	.xdmf with the command line function :ref:`cashocs-convert <cashocs_convert>`.
	If there are Physical quantities specified in the GMSH file, these are imported
	to the subdomains and boundaries output of this function and can also be directly
	accessed via the measures, e.g., with ``dx(1)``, ``ds(1)``, etc.

	Parameters
	----------
	arg : str or configparser.ConfigParser
		This is either a string, in which case it corresponds to the location
		of the mesh file in .xdmf file format, or a config file that
		has this path stored in its settings, under the section Mesh, as
		parameter ``mesh_file``.

	Returns
	-------
	mesh : dolfin.cpp.mesh.Mesh
		The imported (computational) mesh.
	subdomains : dolfin.cpp.mesh.MeshFunctionSizet
		A :py:class:`fenics.MeshFunction` object containing the subdomains,
		i.e., the Physical regions marked in the GMSH
		file.
	boundaries : dolfin.cpp.mesh.MeshFunctionSizet
		A MeshFunction object containing the boundaries,
		i.e., the Physical regions marked in the GMSH
		file. Can, e.g., be used to set up boundary
		conditions.
	dx : ufl.measure.Measure
		The volume measure of the mesh corresponding to
		the subdomains (i.e. GMSH Physical region indices).
	ds : ufl.measure.Measure
		The surface measure of the mesh corresponding to
		the boundaries (i.e. GMSH Physical region indices).
	dS : ufl.measure.Measure
		The interior facet measure of the mesh corresponding
		to boundaries (i.e. GMSH Physical region indices).
	"""

	start_time = time.time()
	info('Importing mesh.')
	
	# Check for the file format
	if type(arg) == str:
		mesh_file = arg
		mesh_attribute = 'str'
	elif type(arg) == configparser.ConfigParser:
		mesh_attribute = 'config'
		### overloading for remeshing
		if not arg.getboolean('Mesh', 'remesh', fallback=False):
			mesh_file = arg.get('Mesh', 'mesh_file')
		else:
			if not ('_cashocs_remesh_flag' in sys.argv):
				mesh_file = arg.get('Mesh', 'mesh_file')
			else:
				temp_dir = sys.argv[-1]
				with open(temp_dir + '/temp_dict.json', 'r') as file:
					temp_dict = json.load(file)
				mesh_file = temp_dict['mesh_file']
				
				
	else:
		raise InputError('cashocs.geometry.import_mesh', 'arg', 'Not a valid argument for import_mesh. Has to be either a path to a mesh file (str) or a config.')

	if mesh_file[-5:] == '.xdmf':
		file_string = mesh_file[:-5]
	else:
		raise InputError('cashocs.geometry.import_mesh', 'arg', 'Not a suitable mesh file format. Has to end in .xdmf.')

	mesh = fenics.Mesh()
	xdmf_file = fenics.XDMFFile(mesh.mpi_comm(), mesh_file)
	xdmf_file.read(mesh)
	xdmf_file.close()

	subdomains_mvc = fenics.MeshValueCollection('size_t', mesh, mesh.geometric_dimension())
	boundaries_mvc = fenics.MeshValueCollection('size_t', mesh, mesh.geometric_dimension() - 1)

	if os.path.isfile(file_string + '_subdomains.xdmf'):
		xdmf_subdomains = fenics.XDMFFile(mesh.mpi_comm(), file_string + '_subdomains.xdmf')
		xdmf_subdomains.read(subdomains_mvc, 'subdomains')
		xdmf_subdomains.close()
	if os.path.isfile(file_string + '_boundaries.xdmf'):
		xdmf_boundaries = fenics.XDMFFile(mesh.mpi_comm(), file_string + '_boundaries.xdmf')
		xdmf_boundaries.read(boundaries_mvc, 'boundaries')
		xdmf_boundaries.close()

	subdomains = fenics.MeshFunction('size_t', mesh, subdomains_mvc)
	boundaries = fenics.MeshFunction('size_t', mesh, boundaries_mvc)

	dx = fenics.Measure('dx', domain=mesh, subdomain_data=subdomains)
	ds = fenics.Measure('ds', domain=mesh, subdomain_data=boundaries)
	dS = fenics.Measure('dS', domain=mesh, subdomain_data=boundaries)

	end_time = time.time()
	info('Done Importing Mesh. Elapsed Time: ' + format(end_time - start_time, '.3e') + ' s\n')

	# Add an attribute to the mesh to show with what procedure it was generated
	mesh._cashocs_generator = mesh_attribute

	# Check the mesh quality of the imported mesh in case a config file is passed
	if type(arg) == configparser.ConfigParser:
		mesh_quality_tol_lower = arg.getfloat('MeshQuality', 'tol_lower', fallback=0.0)
		mesh_quality_tol_upper =  arg.getfloat('MeshQuality', 'tol_upper', fallback=1e-15)
		
		if not mesh_quality_tol_lower < mesh_quality_tol_upper:
			raise ConfigError('MeshQuality', 'tol_lower', 'The lower remeshing tolerance has to be strictly smaller than the upper remeshing tolerance')
	
		if mesh_quality_tol_lower > 0.9*mesh_quality_tol_upper:
			warning('You are using a lower remesh tolerance (tol_lower) close to the upper one (tol_upper). This may slow down the optimization considerably.')
		
		mesh_quality_measure = arg.get('MeshQuality', 'measure', fallback='skewness')
		if not mesh_quality_measure in ['skewness', 'maximum_angle', 'radius_ratios', 'condition_number']:
			raise ConfigError('MeshQuality', 'measure', 'Has to be one of \'skewness\', \'maximum_angle\', \'condition_number\', or \'radius_ratios\'.')
	
		mesh_quality_type = arg.get('MeshQuality', 'type', fallback='min')
		if not mesh_quality_type in ['min', 'minimum', 'avg', 'average']:
			raise ConfigError('MeshQuality', 'type', 'Has to be one of \'min\', \'minimum\', \'avg\', or \'average\'.')
		
		if mesh_quality_type in ['min', 'minimum']:
			if mesh_quality_measure == 'skewness':
				current_mesh_quality = MeshQuality.min_skewness(mesh)
			elif mesh_quality_measure == 'maximum_angle':
				current_mesh_quality = MeshQuality.min_maximum_angle(mesh)
			elif mesh_quality_measure == 'radius_ratios':
				current_mesh_quality = MeshQuality.min_radius_ratios(mesh)
			elif mesh_quality_measure == 'condition_number':
				current_mesh_quality = MeshQuality.min_condition_number(mesh)
	
		else:
			if mesh_quality_measure == 'skewness':
				current_mesh_quality = MeshQuality.avg_skewness(mesh)
			elif mesh_quality_measure == 'maximum_angle':
				current_mesh_quality = MeshQuality.avg_maximum_angle(mesh)
			elif mesh_quality_measure == 'radius_ratios':
				current_mesh_quality = MeshQuality.avg_radius_ratios(mesh)
			elif mesh_quality_measure == 'condition_number':
				current_mesh_quality = MeshQuality.avg_condition_number(mesh)
		
		if not ('_cashocs_remesh_flag' in sys.argv):
			if current_mesh_quality < mesh_quality_tol_lower:
				raise InputError('cashocs.geometry.import_mesh', 'arg',
								 'The quality of the mesh file you have specified is not sufficient for evaluating the cost functional.\n'
								 'It currently is ' + format(current_mesh_quality, '.3e') + ' but has to be at least ' + format(mesh_quality_tol_lower, '.3e') + '.')
			
			if current_mesh_quality < mesh_quality_tol_upper:
				raise InputError('cashocs.geometry.import_mesh', 'arg',
								 'The quality of the mesh file you have specified is not sufficient for computing the shape gradient.\n '
								 'It currently is ' + format(current_mesh_quality, '.3e') + ' but has to be at least ' + format(mesh_quality_tol_upper, '.3e') + '.')
		
		else:
			if current_mesh_quality < mesh_quality_tol_lower:
				raise InputError('cashocs.geometry.import_mesh', 'arg',
								 'Remeshing failed.\n'
								 'The quality of the mesh file generated through remeshing is not sufficient for evaluating the cost functional.\n'
								 'It currently is ' + format(current_mesh_quality, '.3e') + ' but has to be at least ' + format(mesh_quality_tol_lower, '.3e') + '.')
			
			if current_mesh_quality < mesh_quality_tol_upper:
				raise InputError('cashocs.geometry.import_mesh', 'arg',
								 'Remeshing failed.\n'
								 'The quality of the mesh file generated through remeshing is not sufficient for computing the shape gradient.\n '
								 'It currently is ' + format(current_mesh_quality, '.3e') + ' but has to be at least ' + format(mesh_quality_tol_upper, '.3e') + '.')
		
	return mesh, subdomains, boundaries, dx, ds, dS
Esempio n. 22
0
def test_xdmf():

    solution_file = fenics.XDMFFile("test.xdmf")
Esempio n. 23
0
    def staggered_solve(self):
        self.U = fe.VectorFunctionSpace(self.mesh, 'CG', 1)
        self.W = fe.FunctionSpace(self.mesh, 'CG', 1)

        self.WW = fe.FunctionSpace(self.mesh, 'DG', 0)
        self.EE = fe.TensorFunctionSpace(self.mesh, 'DG', 0)
        self.MM = fe.VectorFunctionSpace(self.mesh, 'CG', 1)

        self.eta = fe.TestFunction(self.U)
        self.zeta = fe.TestFunction(self.W)
        q = fe.TestFunction(self.WW)

        del_x = fe.TrialFunction(self.U)
        del_d = fe.TrialFunction(self.W)
        p = fe.TrialFunction(self.WW)

        self.x_new = fe.Function(self.U, name="u")
        self.d_new = fe.Function(self.W, name="d")
        self.d_pre = fe.Function(self.W)
        self.x_pre = fe.Function(self.U)

        x_old = fe.Function(self.U)
        d_old = fe.Function(self.W)

        self.H_old = fe.Function(self.WW)

        self.map_plot = fe.Function(self.MM, name="m")
        e = fe.Function(self.EE, name="e")

        self.create_custom_xdmf_files()

        self.file_results = fe.XDMFFile('data/xdmf/{}/u.xdmf'.format(
            self.case_name))
        self.file_results.parameters["functions_share_mesh"] = True

        vtkfile_e = fe.File('data/pvd/simulation/{}/e.pvd'.format(
            self.case_name))
        vtkfile_u = fe.File('data/pvd/simulation/{}/u.pvd'.format(
            self.case_name))
        vtkfile_d = fe.File('data/pvd/simulation/{}/d.pvd'.format(
            self.case_name))

        for i, (disp, rp) in enumerate(
                zip(self.displacements, self.relaxation_parameters)):
            print('\n')
            print(
                '================================================================================='
            )
            print('>> Step {}, disp boundary condition = {} [mm]'.format(
                i, disp))
            print(
                '================================================================================='
            )
            self.i = i
            self.update_weak_form_due_to_Model_C_bug()

            if self.update_weak_form:
                self.set_bcs_staggered()
                print("Update weak form...")
                self.build_weak_form_staggered()

                print("Taking derivatives of weak form...")
                J_u = fe.derivative(self.G_u, self.x_new, del_x)
                J_d = fe.derivative(self.G_d, self.d_new, del_d)
                print("Define nonlinear problems...")
                p_u = fe.NonlinearVariationalProblem(self.G_u, self.x_new,
                                                     self.BC_u, J_u)
                p_d = fe.NonlinearVariationalProblem(self.G_d, self.d_new,
                                                     self.BC_d, J_d)
                print("Define solvers...")
                solver_u = fe.NonlinearVariationalSolver(p_u)
                solver_d = fe.NonlinearVariationalSolver(p_d)
                self.update_weak_form = False

                print("Update history weak form")
                a = p * q * fe.dx
                L = history(self.H_old, self.update_history(),
                            self.psi_cr) * q * fe.dx

                if self.map_flag:
                    self.interpolate_map()
                    # delta_x = self.x - self.x_hat
                    # self.map_plot.assign(fe.project(delta_x, self.MM))

            self.presLoad.t = disp

            newton_prm = solver_u.parameters['newton_solver']
            newton_prm['maximum_iterations'] = 100
            # newton_prm['absolute_tolerance'] = 1e-8
            newton_prm['relaxation_parameter'] = rp

            newton_prm = solver_d.parameters['newton_solver']
            newton_prm['maximum_iterations'] = 100
            # newton_prm['absolute_tolerance'] = 1e-8
            newton_prm['relaxation_parameter'] = rp

            vtkfile_e_staggered = fe.File(
                'data/pvd/simulation/{}/step{}/e.pvd'.format(
                    self.case_name, i))
            vtkfile_u_staggered = fe.File(
                'data/pvd/simulation/{}/step{}/u.pvd'.format(
                    self.case_name, i))
            vtkfile_d_staggered = fe.File(
                'data/pvd/simulation/{}/step{}/d.pvd'.format(
                    self.case_name, i))
            iteration = 0
            err = 1.
            while err > self.staggered_tol:
                iteration += 1

                solver_d.solve()

                solver_u.solve()

                if self.solution_scheme == 'explicit':
                    break

                # # Remarks(Tianju): self.x_new.vector() does not behave as expected: producing nan values
                # The following lines of codes cause issues
                # We use an error measure similar in https://doi.org/10.1007/s10704-019-00372-y
                # np_x_new = np.asarray(self.x_new.vector())
                # np_d_new = np.asarray(self.d_new.vector())
                # np_x_old = np.asarray(x_old.vector())
                # np_d_old = np.asarray(d_old.vector())
                # err_x = np.linalg.norm(np_x_new - np_x_old) / np.sqrt(len(np_x_new))
                # err_d = np.linalg.norm(np_d_new - np_d_old) / np.sqrt(len(np_d_new))
                # err = max(err_x, err_d)

                # # Remarks(Tianju): dolfin (2019.1.0) errornorm function has severe bugs not behave as expected
                # The bug seems to be fixed in later versions
                # The following sometimes produces nonzero results in dolfin (2019.1.0)
                # print(fe.errornorm(self.d_new, self.d_new, norm_type='l2'))

                err_x = fe.errornorm(self.x_new, x_old, norm_type='l2')
                err_d = fe.errornorm(self.d_new, d_old, norm_type='l2')
                err = max(err_x, err_d)

                x_old.assign(self.x_new)
                d_old.assign(self.d_new)
                e.assign(
                    fe.project(strain(self.mfem_grad(self.x_new)), self.EE))

                print(
                    '---------------------------------------------------------------------------------'
                )
                print(
                    '>> iteration. {}, err_u = {:.5}, err_d = {:.5}, error = {:.5}'
                    .format(iteration, err_x, err_d, err))
                print(
                    '---------------------------------------------------------------------------------'
                )

                # vtkfile_e_staggered << e
                # vtkfile_u_staggered << self.x_new
                # vtkfile_d_staggered << self.d_new

                if err < self.staggered_tol or iteration >= self.staggered_maxiter:
                    print(
                        '================================================================================='
                    )
                    print('\n')
                    break

            print("L2 projection to update the history function...")
            fe.solve(a == L, self.H_old, [])

            # self.d_pre.assign(self.d_new)
            # self.H_old.assign(fe.project(history(self.H_old, self.update_history(), self.psi_cr), self.WW))

            if self.map_flag and not self.finish_flag:
                self.update_map()

            if self.compute_and_save_intermediate_results:
                print("Save files...")
                self.file_results.write(e, i)
                self.file_results.write(self.x_new, i)
                self.file_results.write(self.d_new, i)
                self.file_results.write(self.map_plot, i)

                vtkfile_e << e
                vtkfile_u << self.x_new
                vtkfile_d << self.d_new

                # Assume boundary is not affected by the map.
                # There's no need to use the mfem_grad wrapper so that fe.grad is used for speed-up
                print("Define forces...")
                sigma = cauchy_stress_plus(strain(fe.grad(self.x_new)),
                                           self.psi)
                sigma_minus = cauchy_stress_minus(strain(fe.grad(self.x_new)),
                                                  self.psi_minus)
                sigma_plus = cauchy_stress_plus(strain(fe.grad(self.x_new)),
                                                self.psi_plus)
                sigma_degraded = g_d(self.d_new) * sigma_plus + sigma_minus

                print("Compute forces...")
                if self.case_name == 'pure_shear':
                    f_full = float(fe.assemble(sigma[0, 1] * self.ds(1)))
                    f_degraded = float(
                        fe.assemble(sigma_degraded[0, 1] * self.ds(1)))
                else:
                    f_full = float(fe.assemble(sigma[1, 1] * self.ds(1)))
                    f_degraded = float(
                        fe.assemble(sigma_degraded[1, 1] * self.ds(1)))

                print("Force full is {}".format(f_full))
                print("Force degraded is {}".format(f_degraded))
                self.delta_u_recorded.append(disp)
                self.force_full.append(f_full)
                self.force_degraded.append(f_degraded)

                # if force_upper < 0.5 and i > 10:
                #     break

                if self.display_intermediate_results and i % 10 == 0:
                    self.show_force_displacement()

                self.save_data_in_loop()

        if self.display_intermediate_results:
            plt.ioff()
            plt.show()
Esempio n. 24
0
u = fn.Function(V)
v = fn.TestFunction(V)
F = (ufl.inner(ufl.grad(u), ufl.grad(v)) - f_opt * v) * ufl.dx
bc = fn.DirichletBC(V, 0.0, "on_boundary")
fn.solve(F == 0, u, bc)
print(f"norm of f_opt is {fn.norm(f_opt)}")

# interpolatation of UFL forms does not work in FEniCS, hence projection
CG3 = fn.FunctionSpace(mesh, "CG", 3)
control_error = fn.errornorm(fn.project(f_analytic, CG3), f_opt)
state_error = fn.errornorm(fn.project(u_analytic, CG3), u)
print("h(min):           %e." % mesh.hmin())
print("Error in state:   %e." % state_error)
print("Error in control: %e." % control_error)

# Write solutions to XDMFFile, can be visualized with paraview
# First time step is approximated solution, second timestep is analytic
# solution

out_f = fn.XDMFFile("output/f_jax_scipy.xdmf")
out_f.write_checkpoint(f_opt, "f", 0.0, fn.XDMFFile.Encoding.HDF5, True)
out_f.write_checkpoint(
    fn.project(f_analytic, W), "f", 1.0, fn.XDMFFile.Encoding.HDF5, True
)

out_u = fn.XDMFFile("output/u_jax_scipy.xdmf")
out_u.write_checkpoint(u, "u", 0.0, fn.XDMFFile.Encoding.HDF5, True)
out_u.write_checkpoint(
    fn.project(u_analytic, V), "u", 1.0, fn.XDMFFile.Encoding.HDF5, True
)
Esempio n. 25
0
def import_mesh(arg):
    """Imports a mesh file for use with CASHOCS / FEniCS.



	This function imports a mesh file that was generated by GMSH and converted to
	.xdmf with the command line function :ref:`cashocs-convert <cashocs_convert>`.
	If there are Physical quantities specified in the GMSH file, these are imported
	to the subdomains and boundaries output of this function and can also be directly
	accessed via the measures, e.g., with ``dx(1)``, ``ds(1)``, etc.

	Parameters
	----------
	arg : str or configparser.ConfigParser
		This is either a string, in which case it corresponds to the location
		of the mesh file in .xdmf file format, or a config file that
		has this path stored in its settings, under the section Mesh, as
		parameter ``mesh_file``.

	Returns
	-------
	mesh : dolfin.cpp.mesh.Mesh
		The imported (computational) mesh.
	subdomains : dolfin.cpp.mesh.MeshFunctionSizet
		A :py:class:`fenics.MeshFunction` object containing the subdomains,
		i.e., the Physical regions marked in the GMSH
		file.
	boundaries : dolfin.cpp.mesh.MeshFunctionSizet
		A MeshFunction object containing the boundaries,
		i.e., the Physical regions marked in the GMSH
		file. Can, e.g., be used to set up boundary
		conditions.
	dx : ufl.measure.Measure
		The volume measure of the mesh corresponding to
		the subdomains (i.e. GMSH Physical region indices).
	ds : ufl.measure.Measure
		The surface measure of the mesh corresponding to
		the boundaries (i.e. GMSH Physical region indices).
	dS : ufl.measure.Measure
		The interior facet measure of the mesh corresponding
		to boundaries (i.e. GMSH Physical region indices).
	"""

    start_time = time.time()
    print('Importing mesh to FEniCS')
    # Check for the file format

    if type(arg) == str:
        mesh_file = arg
        mesh_attribute = 'str'
    elif type(arg) == configparser.ConfigParser:
        mesh_attribute = 'config'
        ### overloading for remeshing
        if not arg.getboolean('Mesh', 'remesh', fallback=False):
            mesh_file = arg.get('Mesh', 'mesh_file')
        else:
            if not ('_cashocs_remesh_flag' in sys.argv):
                mesh_file = arg.get('Mesh', 'mesh_file')
            else:
                temp_dir = sys.argv[-1]
                with open(temp_dir + '/temp_dict.json', 'r') as file:
                    temp_dict = json.load(file)
                mesh_file = temp_dict['mesh_file']

    else:
        raise InputError(
            'cashocs.geometry.import_mesh', 'arg',
            'Not a valid argument for import_mesh. Has to be either a path to a mesh file (str) or a config.'
        )

    if mesh_file[-5:] == '.xdmf':
        file_string = mesh_file[:-5]
    else:
        raise InputError(
            'cashocs.geometry.import_mesh', 'arg',
            'Not a suitable mesh file format. Has to end in .xdmf.')

    mesh = fenics.Mesh()
    xdmf_file = fenics.XDMFFile(mesh.mpi_comm(), mesh_file)
    xdmf_file.read(mesh)
    xdmf_file.close()

    subdomains_mvc = fenics.MeshValueCollection('size_t', mesh,
                                                mesh.geometric_dimension())
    boundaries_mvc = fenics.MeshValueCollection('size_t', mesh,
                                                mesh.geometric_dimension() - 1)

    if os.path.isfile(file_string + '_subdomains.xdmf'):
        xdmf_subdomains = fenics.XDMFFile(mesh.mpi_comm(),
                                          file_string + '_subdomains.xdmf')
        xdmf_subdomains.read(subdomains_mvc, 'subdomains')
        xdmf_subdomains.close()
    if os.path.isfile(file_string + '_boundaries.xdmf'):
        xdmf_boundaries = fenics.XDMFFile(mesh.mpi_comm(),
                                          file_string + '_boundaries.xdmf')
        xdmf_boundaries.read(boundaries_mvc, 'boundaries')
        xdmf_boundaries.close()

    subdomains = fenics.MeshFunction('size_t', mesh, subdomains_mvc)
    boundaries = fenics.MeshFunction('size_t', mesh, boundaries_mvc)

    dx = fenics.Measure('dx', domain=mesh, subdomain_data=subdomains)
    ds = fenics.Measure('ds', domain=mesh, subdomain_data=boundaries)
    dS = fenics.Measure('dS', domain=mesh, subdomain_data=boundaries)

    end_time = time.time()
    print('Done Importing Mesh. Elapsed Time: ' +
          format(end_time - start_time, '.3e') + ' s')
    print('')

    # Add an attribute to the mesh to show with what procedure it was generated
    mesh._cashocs_generator = mesh_attribute

    return mesh, subdomains, boundaries, dx, ds, dS
Esempio n. 26
0
def run(output_dir="output/wang2010_natural_convection_air",
        rayleigh_number=1.e6,
        prandtl_number=0.71,
        stefan_number=0.045,
        heat_capacity=1.,
        thermal_conductivity=1.,
        liquid_viscosity=1.,
        solid_viscosity=1.e8,
        gravity=(0., -1.),
        m_B=None,
        ddT_m_B=None,
        penalty_parameter=1.e-7,
        temperature_of_fusion=-1.e12,
        regularization_smoothing_factor=0.005,
        mesh=fenics.UnitSquareMesh(fenics.dolfin.mpi_comm_world(), 20, 20,
                                   "crossed"),
        initial_values_expression=("0.", "0.", "0.",
                                   "0.5*near(x[0],  0.) -0.5*near(x[0],  1.)"),
        boundary_conditions=[{
            "subspace": 0,
            "value_expression": ("0.", "0."),
            "degree": 3,
            "location_expression":
            "near(x[0],  0.) | near(x[0],  1.) | near(x[1], 0.) | near(x[1],  1.)",
            "method": "topological"
        }, {
            "subspace": 2,
            "value_expression": "0.5",
            "degree": 2,
            "location_expression": "near(x[0],  0.)",
            "method": "topological"
        }, {
            "subspace": 2,
            "value_expression": "-0.5",
            "degree": 2,
            "location_expression": "near(x[0],  1.)",
            "method": "topological"
        }],
        start_time=0.,
        end_time=10.,
        time_step_size=1.e-3,
        stop_when_steady=True,
        steady_relative_tolerance=1.e-4,
        adaptive=False,
        adaptive_metric="all",
        adaptive_solver_tolerance=1.e-4,
        nlp_absolute_tolerance=1.e-8,
        nlp_relative_tolerance=1.e-8,
        nlp_max_iterations=50,
        restart=False,
        restart_filepath=""):
    """Run Phaseflow.
    
    Phaseflow is configured entirely through the arguments in this run() function.
    
    See the tests and examples for demonstrations of how to use this.
    """

    # Handle default function definitions.
    if m_B is None:

        def m_B(T, Ra, Pr, Re):

            return T * Ra / (Pr * Re**2)

    if ddT_m_B is None:

        def ddT_m_B(T, Ra, Pr, Re):

            return Ra / (Pr * Re**2)

    # Report arguments.
    phaseflow.helpers.print_once(
        "Running Phaseflow with the following arguments:")

    phaseflow.helpers.print_once(phaseflow.helpers.arguments())

    phaseflow.helpers.mkdir_p(output_dir)

    if fenics.MPI.rank(fenics.mpi_comm_world()) is 0:

        arguments_file = open(output_dir + "/arguments.txt", "w")

        arguments_file.write(str(phaseflow.helpers.arguments()))

        arguments_file.close()

    # Check if 1D/2D/3D.
    dimensionality = mesh.type().dim()

    phaseflow.helpers.print_once("Running " + str(dimensionality) +
                                 "D problem")

    # Initialize time.
    if restart:

        with h5py.File(restart_filepath, "r") as h5:

            time = h5["t"].value

            assert (abs(time - start_time) < TIME_EPS)

    else:

        time = start_time

    # Define the mixed finite element and the solution function space.
    W_ele = make_mixed_fe(mesh.ufl_cell())

    W = fenics.FunctionSpace(mesh, W_ele)

    # Set the initial values.
    if restart:

        mesh = fenics.Mesh()

        with fenics.HDF5File(mesh.mpi_comm(), restart_filepath, "r") as h5:

            h5.read(mesh, "mesh", True)

        W_ele = make_mixed_fe(mesh.ufl_cell())

        W = fenics.FunctionSpace(mesh, W_ele)

        w_n = fenics.Function(W)

        with fenics.HDF5File(mesh.mpi_comm(), restart_filepath, "r") as h5:

            h5.read(w_n, "w")

    else:

        w_n = fenics.interpolate(
            fenics.Expression(initial_values_expression, element=W_ele), W)

    # Organize the boundary conditions.
    bcs = []

    for item in boundary_conditions:

        bcs.append(
            fenics.DirichletBC(W.sub(item["subspace"]),
                               item["value_expression"],
                               item["location_expression"],
                               method=item["method"]))

    # Set the variational form.
    """Set local names for math operators to improve readability."""
    inner, dot, grad, div, sym = fenics.inner, fenics.dot, fenics.grad, fenics.div, fenics.sym
    """The linear, bilinear, and trilinear forms b, a, and c, follow the common notation 
    for applying the finite element method to the incompressible Navier-Stokes equations,
    e.g. from danaila2014newton and huerta2003fefluids.
    """
    def b(u, q):
        return -div(u) * q  # Divergence

    def D(u):

        return sym(grad(u))  # Symmetric part of velocity gradient

    def a(mu, u, v):

        return 2. * mu * inner(D(u), D(v))  # Stokes stress-strain

    def c(w, z, v):

        return dot(dot(grad(z), w), v)  # Convection of the velocity field

    dt = fenics.Constant(time_step_size)

    Re = fenics.Constant(reynolds_number)

    Ra = fenics.Constant(rayleigh_number)

    Pr = fenics.Constant(prandtl_number)

    Ste = fenics.Constant(stefan_number)

    C = fenics.Constant(heat_capacity)

    K = fenics.Constant(thermal_conductivity)

    g = fenics.Constant(gravity)

    def f_B(T):

        return m_B(T=T, Ra=Ra, Pr=Pr, Re=Re) * g  # Buoyancy force, $f = ma$

    gamma = fenics.Constant(penalty_parameter)

    T_f = fenics.Constant(temperature_of_fusion)

    r = fenics.Constant(regularization_smoothing_factor)

    def P(T):

        return 0.5 * (1. - fenics.tanh(
            (T_f - T) / r))  # Regularized phase field.

    mu_l = fenics.Constant(liquid_viscosity)

    mu_s = fenics.Constant(solid_viscosity)

    def mu(T):

        return mu_s + (mu_l - mu_s) * P(T)  # Variable viscosity.

    L = C / Ste  # Latent heat

    u_n, p_n, T_n = fenics.split(w_n)

    w_w = fenics.TrialFunction(W)

    u_w, p_w, T_w = fenics.split(w_w)

    v, q, phi = fenics.TestFunctions(W)

    w_k = fenics.Function(W)

    u_k, p_k, T_k = fenics.split(w_k)

    F = (b(u_k, q) - gamma * p_k * q + dot(u_k - u_n, v) / dt + c(u_k, u_k, v)
         + b(v, p_k) + a(mu(T_k), u_k, v) + dot(f_B(T_k), v) + C / dt *
         (T_k - T_n) * phi - dot(C * T_k * u_k, grad(phi)) +
         K / Pr * dot(grad(T_k), grad(phi)) + 1. / dt * L *
         (P(T_k) - P(T_n)) * phi) * fenics.dx

    def ddT_f_B(T):

        return ddT_m_B(T=T, Ra=Ra, Pr=Pr, Re=Re) * g

    def sech(theta):

        return 1. / fenics.cosh(theta)

    def dP(T):

        return sech((T_f - T) / r)**2 / (2. * r)

    def dmu(T):

        return (mu_l - mu_s) * dP(T)

    # Set the Jacobian (formally the Gateaux derivative) in variational form.
    JF = (b(u_w, q) - gamma * p_w * q + dot(u_w, v) / dt + c(u_k, u_w, v) +
          c(u_w, u_k, v) + b(v, p_w) + a(T_w * dmu(T_k), u_k, v) +
          a(mu(T_k), u_w, v) + dot(T_w * ddT_f_B(T_k), v) +
          C / dt * T_w * phi - dot(C * T_k * u_w, grad(phi)) -
          dot(C * T_w * u_k, grad(phi)) + K / Pr * dot(grad(T_w), grad(phi)) +
          1. / dt * L * T_w * dP(T_k) * phi) * fenics.dx

    # Set the functional metric for the error estimator for adaptive mesh refinement.
    """I haven't found a good way to make this flexible yet.
    Ideally the user would be able to write the metric, but this would require giving the user
    access to much data that phaseflow is currently hiding.
    """
    M = P(T_k) * fenics.dx

    if adaptive_metric == "phase_only":

        pass

    elif adaptive_metric == "all":

        M += T_k * fenics.dx

        for i in range(dimensionality):

            M += u_k[i] * fenics.dx

    else:

        assert (False)

    # Make the problem.
    problem = fenics.NonlinearVariationalProblem(F, w_k, bcs, JF)

    # Make the solvers.
    """ For the purposes of this project, it would be better to just always use the adaptive solver; but
    unfortunately the adaptive solver encounters nan's whenever evaluating the error for problems not 
    involving phase-change. So far my attempts at writing a MWE to reproduce the  issue have failed.
    """
    adaptive_solver = fenics.AdaptiveNonlinearVariationalSolver(problem, M)

    adaptive_solver.parameters["nonlinear_variational_solver"]["newton_solver"]["maximum_iterations"]\
        = nlp_max_iterations

    adaptive_solver.parameters["nonlinear_variational_solver"]["newton_solver"]["absolute_tolerance"]\
        = nlp_absolute_tolerance

    adaptive_solver.parameters["nonlinear_variational_solver"]["newton_solver"]["relative_tolerance"]\
        = nlp_relative_tolerance

    static_solver = fenics.NonlinearVariationalSolver(problem)

    static_solver.parameters["newton_solver"][
        "maximum_iterations"] = nlp_max_iterations

    static_solver.parameters["newton_solver"][
        "absolute_tolerance"] = nlp_absolute_tolerance

    static_solver.parameters["newton_solver"][
        "relative_tolerance"] = nlp_relative_tolerance

    # Open a context manager for the output file.
    with fenics.XDMFFile(output_dir + "/solution.xdmf") as solution_file:

        # Write the initial values.
        write_solution(solution_file, w_n, time)

        if start_time >= end_time - TIME_EPS:

            phaseflow.helpers.print_once(
                "Start time is already too close to end time. Only writing initial values."
            )

            return w_n, mesh

        # Solve each time step.
        progress = fenics.Progress("Time-stepping")

        fenics.set_log_level(fenics.PROGRESS)

        for it in range(1, MAX_TIME_STEPS):

            if (time > end_time - TIME_EPS):

                break

            if adaptive:

                adaptive_solver.solve(adaptive_solver_tolerance)

            else:

                static_solver.solve()

            time = start_time + it * time_step_size

            phaseflow.helpers.print_once("Reached time t = " + str(time))

            write_solution(solution_file, w_k, time)

            # Write checkpoint/restart files.
            restart_filepath = output_dir + "/restart_t" + str(time) + ".h5"

            with fenics.HDF5File(fenics.mpi_comm_world(), restart_filepath,
                                 "w") as h5:

                h5.write(mesh.leaf_node(), "mesh")

                h5.write(w_k.leaf_node(), "w")

            if fenics.MPI.rank(fenics.mpi_comm_world()) is 0:

                with h5py.File(restart_filepath, "r+") as h5:

                    h5.create_dataset("t", data=time)

            # Check for steady state.
            if stop_when_steady and steady(W, w_k, w_n,
                                           steady_relative_tolerance):

                phaseflow.helpers.print_once(
                    "Reached steady state at time t = " + str(time))

                break

            # Set initial values for next time step.
            w_n.leaf_node().vector()[:] = w_k.leaf_node().vector()

            # Report progress.
            progress.update(time / end_time)

            if time >= (end_time - fenics.dolfin.DOLFIN_EPS):

                phaseflow.helpers.print_once("Reached end time, t = " +
                                             str(end_time))

                break

    # Return the interpolant to sample inside of Python.
    w_k.rename("w", "state")

    return w_k, mesh