Пример #1
0
def test_mpi_atomicity(tempdir):
    comm_world = MPI.comm_world
    if MPI.size(comm_world) > 1:
        filename = os.path.join(tempdir, "mpiatomic.h5")
        with HDF5File(MPI.comm_world, filename, "w") as f:
            assert f.get_mpi_atomicity() is False
            f.set_mpi_atomicity(True)
            assert f.get_mpi_atomicity() is True
Пример #2
0
# Copyright (C) 2014-2014 Aslak Wigdahl Bergersen
#
# This file is part of DOLFINX (https://www.fenicsproject.org)
#
# SPDX-License-Identifier:    LGPL-3.0-or-later
"""Shared skips for unit tests involving dolfinx."""

import pytest

from dolfinx import MPI
from dolfinx.common import has_petsc_complex

# Skips with respect to parallel or serial
xfail_in_parallel = pytest.mark.xfail(
    MPI.size(MPI.comm_world) > 1,
    reason="This test does not yet work in parallel.")
skip_in_parallel = pytest.mark.skipif(
    MPI.size(MPI.comm_world) > 1,
    reason="This test should only be run in serial.")

# Skips with respect to the scalar type
skip_if_complex = pytest.mark.skipif(
    has_petsc_complex, reason="This test does not work in complex mode.")
xfail_if_complex = pytest.mark.xfail(
    has_petsc_complex, reason="This test does not work in complex mode.")
Пример #3
0
                vertices_fiat = np.sort(
                    vertex_global_indices[np.array(entity_topology)])
                assert all(vertices_fiat == vertices_dolfin)


def test_mesh_topology_lifetime():
    """Check that lifetime of Mesh.topology is bound to underlying mesh object"""
    mesh = UnitSquareMesh(MPI.comm_world, 4, 4)
    rc = sys.getrefcount(mesh)
    topology = mesh.topology
    assert sys.getrefcount(mesh) == rc + 1
    del topology
    assert sys.getrefcount(mesh) == rc


@pytest.mark.xfail(condition=MPI.size(MPI.comm_world) > 1,
                   reason="Small meshes fail in parallel")
def test_small_mesh():
    mesh3d = UnitCubeMesh(MPI.comm_world, 1, 1, 1)
    gdim = mesh3d.geometry.dim
    assert mesh3d.num_entities_global(gdim) == 6

    mesh2d = UnitSquareMesh(MPI.comm_world, 1, 1)
    gdim = mesh2d.geometry.dim
    assert mesh2d.num_entities_global(gdim) == 2

    mesh1d = UnitIntervalMesh(MPI.comm_world, 2)
    gdim = mesh1d.geometry.dim
    assert mesh1d.num_entities_global(gdim) == 2

Пример #4
0
import pytest
from petsc4py import PETSc

from dolfinx import (MPI, Function, FunctionSpace, Mesh, MeshFunction,
                     MeshValueCollection, TensorFunctionSpace, UnitCubeMesh,
                     UnitIntervalMesh, UnitSquareMesh, VectorFunctionSpace,
                     cpp, has_petsc_complex)
from dolfinx.cpp.mesh import CellType
from dolfinx.io import XDMFFile
from dolfinx_utils.test.fixtures import tempdir
from ufl import FiniteElement, VectorElement

assert (tempdir)

# Supported XDMF file encoding
if MPI.size(MPI.comm_world) > 1:
    encodings = (XDMFFile.Encoding.HDF5, )
else:
    encodings = (XDMFFile.Encoding.HDF5, XDMFFile.Encoding.ASCII)

# Data types supported in templating
data_types = (('int', int), ('size_t', int), ('double', float))

# Finite elements tested
fe_1d_shapes = ["interval"]
fe_2d_shapes = ["triangle"]
fe_3d_shapes = ["tetrahedron"]
fe_families = ["CG", "DG"]
fe_degrees = [0, 1, 3]
topological_dim = [1, 2, 3]
number_cells = [6, 10]
def dS_from_measure(mesh):
    boundaries = MeshFunction("size_t", mesh, mesh.topology.dim - 1, 1)
    dS = Measure("dS")(subdomain_data=boundaries, domain=mesh)
    return dS


def dS_from_measure_and_subdomain(mesh):
    dS = dS_from_measure(mesh)
    return dS(1)


@pytest.mark.parametrize("mode", [
    pytest.param(GhostMode.none),
    pytest.param(GhostMode.shared_facet,
                 marks=pytest.mark.xfail(
                     condition=MPI.size(MPI.comm_world) == 1,
                     reason="Shared ghost modes fail in serial")),
    pytest.param(GhostMode.shared_vertex,
                 marks=pytest.mark.xfail(
                     condition=MPI.size(MPI.comm_world) == 1,
                     reason="Shared ghost modes fail in serial"))
])
@pytest.mark.parametrize(
    "dx", [dx_from_ufl, dx_from_measure, dx_from_measure_and_subdomain])
@pytest.mark.parametrize(
    "ds", [ds_from_ufl, ds_from_measure, ds_from_measure_and_subdomain])
def test_ghost_mesh_assembly(mode, dx, ds):
    mesh = UnitSquareMesh(MPI.comm_world, 12, 12, ghost_mode=mode)
    V = FunctionSpace(mesh, ("Lagrange", 1))
    u, v = ufl.TrialFunction(V), ufl.TestFunction(V)
    dx = dx(mesh)
Пример #6
0
def test_manufactured_poisson_dg(degree, filename, datadir):
    """ Manufactured Poisson problem, solving u = x[component]**n, where n is the
    degree of the Lagrange function space.

    """
    with XDMFFile(MPI.comm_world, os.path.join(datadir, filename)) as xdmf:
        if MPI.size(MPI.comm_world) == 1:  # Serial
            mesh = xdmf.read_mesh(GhostMode.none)
        else:
            mesh = xdmf.read_mesh(GhostMode.shared_facet)

    V = FunctionSpace(mesh, ("DG", degree))
    u, v = TrialFunction(V), TestFunction(V)

    # Exact solution
    x = SpatialCoordinate(mesh)
    u_exact = x[1] ** degree

    # Coefficient
    k = Function(V)
    k.vector.set(2.0)
    k.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD)

    # Source term
    f = - div(k * grad(u_exact))

    # Mesh normals and element size
    n = FacetNormal(mesh)
    h = CellDiameter(mesh)
    h_avg = (h("+") + h("-")) / 2.0

    # Penalty parameter
    alpha = 32

    dx_ = dx(metadata={"quadrature_degree": -1})
    ds_ = ds(metadata={"quadrature_degree": -1})
    dS_ = dS(metadata={"quadrature_degree": -1})

    a = inner(k * grad(u), grad(v)) * dx_ \
        - k("+") * inner(avg(grad(u)), jump(v, n)) * dS_ \
        - k("+") * inner(jump(u, n), avg(grad(v))) * dS_ \
        + k("+") * (alpha / h_avg) * inner(jump(u, n), jump(v, n)) * dS_ \
        - inner(k * grad(u), v * n) * ds_ \
        - inner(u * n, k * grad(v)) * ds_ \
        + (alpha / h) * inner(k * u, v) * ds_
    L = inner(f, v) * dx_ - inner(k * u_exact * n, grad(v)) * ds_ \
        + (alpha / h) * inner(k * u_exact, v) * ds_

    for integral in a.integrals():
        integral.metadata()["quadrature_degree"] = ufl.algorithms.estimate_total_polynomial_degree(a)
    for integral in L.integrals():
        integral.metadata()["quadrature_degree"] = ufl.algorithms.estimate_total_polynomial_degree(L)

    b = assemble_vector(L)
    b.ghostUpdate(addv=PETSc.InsertMode.ADD, mode=PETSc.ScatterMode.REVERSE)

    A = assemble_matrix(a, [])
    A.assemble()

    # Create LU linear solver
    solver = PETSc.KSP().create(MPI.comm_world)
    solver.setType(PETSc.KSP.Type.PREONLY)
    solver.getPC().setType(PETSc.PC.Type.LU)
    solver.setOperators(A)

    # Solve
    uh = Function(V)
    solver.solve(b, uh.vector)
    uh.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT,
                          mode=PETSc.ScatterMode.FORWARD)

    error = assemble_scalar((u_exact - uh)**2 * dx)
    error = MPI.sum(mesh.mpi_comm(), error)

    assert np.absolute(error) < 1.0e-14