def test_save_2D_facet_function(tempdir, encoding, data_type): dtype_str, dtype = data_type mesh = UnitSquareMesh(MPI.comm_world, 32, 32) mf = MeshFunction(dtype_str, mesh, mesh.topology.dim - 1, 0) mf.rename("facets") if (MPI.size(mesh.mpi_comm()) == 1): for facet in Facets(mesh): mf[facet] = dtype(facet.index()) else: for facet in Facets(mesh): mf[facet] = dtype(facet.global_index()) filename = os.path.join(tempdir, "mf_facet_2D_%s.xdmf" % dtype_str) with XDMFFile(mesh.mpi_comm(), filename, encoding=encoding) as xdmf: xdmf.write(mf) with XDMFFile(mesh.mpi_comm(), filename) as xdmf: read_function = getattr(xdmf, "read_mf_" + dtype_str) mf_in = read_function(mesh, "facets") diff = 0 for facet in Facets(mesh): diff += (mf_in[facet] - mf[facet]) assert diff == 0
def test_save_and_read_function_timeseries(tempdir): filename = os.path.join(tempdir, "function.h5") mesh = UnitSquareMesh(MPI.comm_world, 10, 10) Q = FunctionSpace(mesh, ("CG", 3)) F0 = Function(Q) F1 = Function(Q) t = 0.0 @function.expression.numba_eval def expr_eval(values, x, cell_idx): values[:, 0] = t * x[:, 0] E = Expression(expr_eval) F0.interpolate(E) # Save to HDF5 File hdf5_file = HDF5File(mesh.mpi_comm(), filename, "w") for t in range(10): F0.interpolate(E) hdf5_file.write(F0, "/function", t) hdf5_file.close() # Read back from file hdf5_file = HDF5File(mesh.mpi_comm(), filename, "r") for t in range(10): F1.interpolate(E) vec_name = "/function/vector_%d" % t F0 = hdf5_file.read_function(Q, vec_name) # timestamp = hdf5_file.attributes(vec_name)["timestamp"] # assert timestamp == t F0.vector().axpy(-1.0, F1.vector()) assert F0.vector().norm(cpp.la.Norm.l2) < 1.0e-12 hdf5_file.close()
def test_save_and_read_function(tempdir): filename = os.path.join(tempdir, "function.h5") mesh = UnitSquareMesh(MPI.comm_world, 10, 10) Q = FunctionSpace(mesh, ("CG", 3)) F0 = Function(Q) F1 = Function(Q) @function.expression.numba_eval def expr_eval(values, x, cell_idx): values[:, 0] = x[:, 0] E = Expression(expr_eval) F0.interpolate(E) # Save to HDF5 File hdf5_file = HDF5File(mesh.mpi_comm(), filename, "w") hdf5_file.write(F0, "/function") hdf5_file.close() # Read back from file hdf5_file = HDF5File(mesh.mpi_comm(), filename, "r") F1 = hdf5_file.read_function(Q, "/function") F0.vector().axpy(-1.0, F1.vector()) assert F0.vector().norm(dolfin.cpp.la.Norm.l2) < 1.0e-12 hdf5_file.close()
def test_save_and_checkpoint_timeseries(tempdir, encoding): mesh = UnitSquareMesh(MPI.comm_world, 16, 16) filename = os.path.join(tempdir, "u2_checkpoint.xdmf") FE = FiniteElement("CG", mesh.ufl_cell(), 2) V = FunctionSpace(mesh, FE) times = [0.5, 0.2, 0.1] u_out = [None] * len(times) u_in = [None] * len(times) p = 0.0 def expr_eval(values, x): values[:, 0] = x[:, 0] * p with XDMFFile(mesh.mpi_comm(), filename, encoding=encoding) as file: for i, p in enumerate(times): u_out[i] = interpolate(expr_eval, V) file.write_checkpoint(u_out[i], "u_out", p) with XDMFFile(mesh.mpi_comm(), filename) as file: for i, p in enumerate(times): u_in[i] = file.read_checkpoint(V, "u_out", i) for i, p in enumerate(times): u_in[i].vector.axpy(-1.0, u_out[i].vector) assert u_in[i].vector.norm() < 1.0e-12 # test reading last with XDMFFile(mesh.mpi_comm(), filename) as file: u_in_last = file.read_checkpoint(V, "u_out", -1) u_out[-1].vector.axpy(-1.0, u_in_last.vector) assert u_out[-1].vector.norm() < 1.0e-12
def test_save_and_read_function_timeseries(tempdir): filename = os.path.join(tempdir, "function.h5") mesh = UnitSquareMesh(MPI.comm_world, 10, 10) Q = FunctionSpace(mesh, ("CG", 3)) F0 = Function(Q) F1 = Function(Q) t = 0.0 def E(x): return t * x[:, 0] F0.interpolate(E) # Save to HDF5 File hdf5_file = HDF5File(mesh.mpi_comm(), filename, "w") for t in range(10): F0.interpolate(E) hdf5_file.write(F0, "/function", t) hdf5_file.close() # Read back from file hdf5_file = HDF5File(mesh.mpi_comm(), filename, "r") for t in range(10): F1.interpolate(E) vec_name = "/function/vector_{}".format(t) F0 = hdf5_file.read_function(Q, vec_name) # timestamp = hdf5_file.attributes(vec_name)["timestamp"] # assert timestamp == t F0.vector.axpy(-1.0, F1.vector) assert F0.vector.norm() < 1.0e-12 hdf5_file.close()
def test_save_and_read_mesh_2D(tempdir): filename = os.path.join(tempdir, "mesh2d.h5") # Write to file mesh0 = UnitSquareMesh(MPI.comm_world, 20, 20) mesh_file = HDF5File(mesh0.mpi_comm(), filename, "w") mesh_file.write(mesh0, "/my_mesh") mesh_file.close() # Read from file mesh_file = HDF5File(mesh0.mpi_comm(), filename, "r") mesh1 = mesh_file.read_mesh("/my_mesh", False, cpp.mesh.GhostMode.none) mesh_file.close() assert mesh0.num_entities_global(0) == mesh1.num_entities_global(0) dim = mesh0.topology.dim assert mesh0.num_entities_global(dim) == mesh1.num_entities_global(dim) # Read from file, and use partition from file mesh_file = HDF5File(mesh0.mpi_comm(), filename, "r") mesh2 = mesh_file.read_mesh("/my_mesh", True, cpp.mesh.GhostMode.none) mesh_file.close() assert mesh0.num_cells() == mesh2.num_cells() dim = mesh0.topology.dim assert mesh0.num_entities_global(dim) == mesh1.num_entities_global(dim)
def test_save_and_checkpoint_timeseries(tempdir, encoding): mesh = UnitSquareMesh(MPI.comm_world, 16, 16) filename = os.path.join(tempdir, "u2_checkpoint.xdmf") FE = FiniteElement("CG", mesh.ufl_cell(), 2) V = FunctionSpace(mesh, FE) times = [0.5, 0.2, 0.1] u_out = [None] * len(times) u_in = [None] * len(times) with XDMFFile(mesh.mpi_comm(), filename, encoding=encoding) as file: for i, p in enumerate(times): u_out[i] = interpolate(Expression("x[0]*p", p=p, degree=1), V) file.write_checkpoint(u_out[i], "u_out", p) with XDMFFile(mesh.mpi_comm(), filename) as file: for i, p in enumerate(times): u_in[i] = file.read_checkpoint(V, "u_out", i) for i, p in enumerate(times): u_in[i].vector().axpy(-1.0, u_out[i].vector()) assert u_in[i].vector().norm(cpp.la.Norm.l2) < 1.0e-12 # test reading last with XDMFFile(mesh.mpi_comm(), filename) as file: u_in_last = file.read_checkpoint(V, "u_out", -1) u_out[-1].vector().axpy(-1.0, u_in_last.vector()) assert u_out[-1].vector().norm(cpp.la.Norm.l2) < 1.0e-12
def test_save_and_read_function(tempdir): filename = os.path.join(tempdir, "function.h5") mesh = UnitSquareMesh(MPI.comm_world, 10, 10) Q = FunctionSpace(mesh, ("CG", 3)) F0 = Function(Q) F1 = Function(Q) def E(values, x): values[:, 0] = x[:, 0] F0.interpolate(E) # Save to HDF5 File hdf5_file = HDF5File(mesh.mpi_comm(), filename, "w") hdf5_file.write(F0, "/function") hdf5_file.close() # Read back from file hdf5_file = HDF5File(mesh.mpi_comm(), filename, "r") F1 = hdf5_file.read_function(Q, "/function") F0.vector().axpy(-1.0, F1.vector()) assert F0.vector().norm() < 1.0e-12 hdf5_file.close()
def test_ghost_2d(mode): N = 8 num_cells = N * N * 2 mesh = UnitSquareMesh(MPI.comm_world, N, N, ghost_mode=mode) if MPI.size(mesh.mpi_comm()) > 1: assert MPI.sum(mesh.mpi_comm(), mesh.num_cells()) > num_cells assert mesh.num_entities_global(0) == 81 assert mesh.num_entities_global(2) == num_cells
def test_save_points_2D(tempdir, encoding): mesh = UnitSquareMesh(MPI.comm_world, 16, 16) points = mesh.geometry.points vals = numpy.linalg.norm(points, axis=1) with XDMFFile(mesh.mpi_comm(), os.path.join(tempdir, "points_2D.xdmf"), encoding=encoding) as file: file.write(points) with XDMFFile(mesh.mpi_comm(), os.path.join(tempdir, "points_values_2D.xdmf"), encoding=encoding) as file: file.write(points, vals)
def test_compute_entity_collisions_tree_2d(self): references = [[set([20, 21, 22, 23, 28, 29, 30, 31]), set([0, 1, 2, 3, 8, 9, 10, 11])], [set([6]), set([25])]] points = [Point(0.52, 0.51), Point(0.9, -0.9)] for i, point in enumerate(points): mesh_A = UnitSquareMesh(4, 4) mesh_B = UnitSquareMesh(4, 4) mesh_B.translate(point) tree_A = BoundingBoxTree() tree_A.build(mesh_A) tree_B = BoundingBoxTree() tree_B.build(mesh_B) entities_A, entities_B = tree_A.compute_entity_collisions(tree_B) if MPI.size(mesh_A.mpi_comm()) == 1: self.assertEqual(set(entities_A), references[i][0]) self.assertEqual(set(entities_B), references[i][1])
def test_krylov_solver_lu(): mesh = UnitSquareMesh(MPI.comm_world, 12, 12) V = FunctionSpace(mesh, ("Lagrange", 1)) u, v = TrialFunction(V), TestFunction(V) a = inner(u, v) * dx L = inner(1.0, v) * dx A = assemble_matrix(a) A.assemble() b = assemble_vector(L) b.ghostUpdate(addv=PETSc.InsertMode.ADD, mode=PETSc.ScatterMode.REVERSE) norm = 13.0 solver = PETScKrylovSolver(mesh.mpi_comm()) solver.set_options_prefix("test_lu_") PETScOptions.set("test_lu_ksp_type", "preonly") PETScOptions.set("test_lu_pc_type", "lu") solver.set_from_options() x = A.createVecRight() solver.set_operator(A) solver.solve(x, b) # *Tight* tolerance for LU solves assert round(x.norm(PETSc.NormType.N2) - norm, 12) == 0
def test_UnitSquareMeshDistributed(): """Create mesh of unit square.""" mesh = UnitSquareMesh(MPI.comm_world, 5, 7) assert mesh.num_entities_global(0) == 48 assert mesh.num_entities_global(2) == 70 assert mesh.geometry.dim == 2 assert MPI.sum(mesh.mpi_comm(), mesh.topology.ghost_offset(0)) == 48
def mesh(): mesh = UnitSquareMesh(3, 3) assert MPI.size(mesh.mpi_comm()) in (1, 2, 3, 4) # 1 processor -> test serial case # 2 and 3 processors -> test case where submesh in contained only on one processor # 4 processors -> test case where submesh is shared by two processors, resulting in shared facets and vertices return mesh
def test_multiple_datasets(tempdir, encoding): mesh = UnitSquareMesh(MPI.comm_world, 2, 2) cf0 = MeshFunction('size_t', mesh, 2, 11) cf0.name = 'cf0' cf1 = MeshFunction('size_t', mesh, 2, 22) cf1.name = 'cf1' filename = os.path.join(tempdir, "multiple_mf.xdmf") with XDMFFile(mesh.mpi_comm(), filename, encoding=encoding) as xdmf: xdmf.write(mesh) xdmf.write(cf0) xdmf.write(cf1) with XDMFFile(mesh.mpi_comm(), filename) as xdmf: mesh = xdmf.read_mesh(cpp.mesh.GhostMode.none) cf0 = xdmf.read_mf_size_t(mesh, "cf0") cf1 = xdmf.read_mf_size_t(mesh, "cf1") assert (cf0.values[0] == 11 and cf1.values[0] == 22)
def test_save_2d_tensor(tempdir, encoding): filename = os.path.join(tempdir, "tensor.xdmf") mesh = UnitSquareMesh(MPI.comm_world, 16, 16) u = Function(TensorFunctionSpace(mesh, ("Lagrange", 2))) u.vector.set(1.0 + (1j if has_petsc_complex else 0)) with XDMFFile(mesh.mpi_comm(), filename, encoding=encoding) as file: file.write(u)
def test_save_2D_cell_function(tempdir, encoding, data_type): dtype_str, dtype = data_type filename = os.path.join(tempdir, "mf_2D_%s.xdmf" % dtype_str) mesh = UnitSquareMesh(MPI.comm_world, 32, 32) mf = MeshFunction(dtype_str, mesh, mesh.topology.dim, 0) mf.name = "cells" mf.values[:] = numpy.arange(mesh.num_entities(2), dtype=dtype) with XDMFFile(mesh.mpi_comm(), filename, encoding=encoding) as file: file.write(mf) with XDMFFile(mesh.mpi_comm(), filename) as xdmf: read_function = getattr(xdmf, "read_mf_" + dtype_str) mf_in = read_function(mesh, "cells") diff = mf_in.values - mf.values assert numpy.all(diff == 0)
def test_mesh_function_assign_2D_cells(): mesh = UnitSquareMesh(MPI.comm_world, 3, 3) ncells = mesh.num_cells() f = MeshFunction("int", mesh, mesh.topology.dim, 0) for c in range(ncells): f.values[c] = ncells - c g = MeshValueCollection("int", mesh, 2) g.assign(f) assert ncells == len(f.values) assert ncells == g.size() f2 = MeshFunction("int", mesh, g, 0) for c in range(mesh.num_cells()): value = ncells - c assert value == g.get_value(c, 0) assert f2.values[c] == g.get_value(c, 0) h = MeshValueCollection("int", mesh, 2) global_indices = mesh.topology.global_indices(2) ncells_global = mesh.num_entities_global(2) for c in range(mesh.num_cells()): if global_indices[c] in [5, 8, 10]: continue value = ncells_global - global_indices[c] h.set_value(c, int(value)) f3 = MeshFunction("int", mesh, h, 0) values = f3.values values[values > ncells_global] = 0. assert MPI.sum(mesh.mpi_comm(), values.sum() * 1.0) == 140.
def test_krylov_solver_lu(): mesh = UnitSquareMesh(MPI.comm_world, 12, 12) V = FunctionSpace(mesh, ("Lagrange", 1)) u, v = TrialFunction(V), TestFunction(V) a = inner(u, v) * dx L = inner(1.0, v) * dx A = assemble_matrix(a) A.assemble() b = assemble_vector(L) b.ghostUpdate(addv=PETSc.InsertMode.ADD, mode=PETSc.ScatterMode.REVERSE) norm = 13.0 solver = PETSc.KSP().create(mesh.mpi_comm()) solver.setOptionsPrefix("test_lu_") opts = PETSc.Options("test_lu_") opts["ksp_type"] = "preonly" opts["pc_type"] = "lu" solver.setFromOptions() x = A.createVecRight() solver.setOperators(A) solver.solve(b, x) # *Tight* tolerance for LU solves assert x.norm(PETSc.NormType.N2) == pytest.approx(norm, abs=1.0e-12)
def test_mesh_function_assign_2D_cells(): mesh = UnitSquareMesh(MPI.comm_world, 3, 3) ncells = mesh.num_cells() f = MeshFunction("int", mesh, mesh.topology.dim, 0) for cell in Cells(mesh): f[cell] = ncells - cell.index() g = MeshValueCollection("int", mesh, 2) g.assign(f) assert ncells == f.size() assert ncells == g.size() f2 = MeshFunction("int", mesh, g, 0) for cell in Cells(mesh): value = ncells - cell.index() assert value == g.get_value(cell.index(), 0) assert f2[cell] == g.get_value(cell.index(), 0) h = MeshValueCollection("int", mesh, 2) global_indices = mesh.topology.global_indices(2) ncells_global = mesh.num_entities_global(2) for cell in Cells(mesh): if global_indices[cell.index()] in [5, 8, 10]: continue value = ncells_global - global_indices[cell.index()] h.set_value(cell.index(), int(value)) f3 = MeshFunction("int", mesh, h, 0) values = f3.array() values[values > ncells_global] = 0. assert MPI.sum(mesh.mpi_comm(), values.sum() * 1.0) == 140.
def test_save_2D_vertex_function(tempdir, encoding, data_type): dtype_str, dtype = data_type mesh = UnitSquareMesh(MPI.comm_world, 32, 32) mf = MeshFunction(dtype_str, mesh, 0, 0) mf.name = "vertices" global_indices = mesh.topology.global_indices(0) mf.values[:] = global_indices[:] filename = os.path.join(tempdir, "mf_vertex_2D_%s.xdmf" % dtype_str) with XDMFFile(mesh.mpi_comm(), filename, encoding=encoding) as file: file.write(mf) with XDMFFile(mesh.mpi_comm(), filename) as xdmf: read_function = getattr(xdmf, "read_mf_" + dtype_str) mf_in = read_function(mesh, "vertices") diff = mf_in.values - mf.values assert numpy.all(diff == 0)
def test_save_points_2D(tempdir, encoding): mesh = UnitSquareMesh(MPI.comm_world, 16, 16) points, values = [], [] for v in Vertices(mesh): points.append(v.point()) values.append(v.point().norm()) vals = numpy.array(values) with XDMFFile(mesh.mpi_comm(), os.path.join(tempdir, "points_2D.xdmf"), encoding=encoding) as file: file.write(points) with XDMFFile(mesh.mpi_comm(), os.path.join(tempdir, "points_values_2D.xdmf"), encoding=encoding) as file: file.write(points, vals)
def test_compute_first_entity_collision_2d(self): reference = [136, 137] p = Point(0.3, 0.3) mesh = UnitSquareMesh(16, 16) tree = BoundingBoxTree() tree.build(mesh) first = tree.compute_first_entity_collision(p) if MPI.size(mesh.mpi_comm()) == 1: self.assertIn(first, reference) tree = mesh.bounding_box_tree() first = tree.compute_first_entity_collision(p) if MPI.size(mesh.mpi_comm()) == 1: self.assertIn(first, reference)
def test_save_2d_scalar(tempdir, encoding): filename = os.path.join(tempdir, "u2.xdmf") mesh = UnitSquareMesh(MPI.comm_world, 16, 16) # FIXME: This randomly hangs in parallel V = FunctionSpace(mesh, ("Lagrange", 2)) u = Function(V) u.vector.set(1.0 + (1j if has_petsc_complex else 0)) with XDMFFile(mesh.mpi_comm(), filename, encoding=encoding) as file: file.write(u)
def test_compute_entity_collisions_2d(self): reference = set([136, 137]) p = Point(0.3, 0.3) mesh = UnitSquareMesh(16, 16) tree = BoundingBoxTree() tree.build(mesh) entities = tree.compute_entity_collisions(p) if MPI.size(mesh.mpi_comm()) == 1: self.assertEqual(set(entities), reference) tree = mesh.bounding_box_tree() entities = tree.compute_entity_collisions(p) if MPI.size(mesh.mpi_comm()) == 1: self.assertEqual(set(entities), reference)
def test_compute_first_collision_2d(self): reference = {1: [226], 2: [136, 137]} p = Point(0.3, 0.3) mesh = UnitSquareMesh(16, 16) for dim in range(1, 3): tree = BoundingBoxTree() tree.build(mesh, dim) first = tree.compute_first_collision(p) if MPI.size(mesh.mpi_comm()) == 1: self.assertIn(first, reference[dim]) tree = mesh.bounding_box_tree() first = tree.compute_first_collision(p) if MPI.size(mesh.mpi_comm()) == 1: self.assertIn(first, reference[mesh.topology().dim()])
def test_save_2d_vector(tempdir, encoding): filename = os.path.join(tempdir, "u_2dv.xdmf") mesh = UnitSquareMesh(MPI.comm_world, 16, 16) V = VectorFunctionSpace(mesh, ("Lagrange", 2)) u = Function(V) c = Constant((1.0 + (1j if has_petsc_complex else 0), 2.0)) u.interpolate(c) with XDMFFile(mesh.mpi_comm(), filename, encoding=encoding) as file: file.write(u)
def test_krylov_reuse_pc_lu(): """Test that LU re-factorisation is only performed after set_operator(A) is called""" # Test requires PETSc version 3.5 or later. Use petsc4py to check # version number. try: from petsc4py import PETSc except ImportError: pytest.skip("petsc4py required to check PETSc version") else: if not PETSc.Sys.getVersion() >= (3, 5, 0): pytest.skip("PETSc version must be 3.5 of higher") mesh = UnitSquareMesh(MPI.comm_world, 12, 12) V = FunctionSpace(mesh, ("Lagrange", 1)) u, v = TrialFunction(V), TestFunction(V) a = Constant(1.0) * u * v * dx L = Constant(1.0) * v * dx assembler = fem.Assembler(a, L) A = assembler.assemble_matrix() b = assembler.assemble_vector() norm = 13.0 solver = PETScKrylovSolver(mesh.mpi_comm()) solver.set_options_prefix("test_lu_") PETScOptions.set("test_lu_ksp_type", "preonly") PETScOptions.set("test_lu_pc_type", "lu") solver.set_from_options() solver.set_operator(A) x = PETScVector(mesh.mpi_comm()) solver.solve(x, b) assert round(x.norm(cpp.la.Norm.l2) - norm, 10) == 0 assembler = fem.assemble.Assembler(Constant(0.5) * u * v * dx, L) assembler.assemble(A) x = PETScVector(mesh.mpi_comm()) solver.solve(x, b) assert round(x.norm(cpp.la.Norm.l2) - 2.0 * norm, 10) == 0 solver.set_operator(A) solver.solve(x, b) assert round(x.norm(cpp.la.Norm.l2) - 2.0 * norm, 10) == 0
def test_compute_closest_entity_2d(self): reference = (1, 1.0) p = Point(-1.0, 0.01) mesh = UnitSquareMesh(16, 16) tree = BoundingBoxTree() tree.build(mesh) entity, distance = tree.compute_closest_entity(p) if MPI.size(mesh.mpi_comm()) == 1: self.assertEqual(entity, reference[0]) self.assertAlmostEqual(distance, reference[1]) tree = mesh.bounding_box_tree() entity, distance = tree.compute_closest_entity(p) if MPI.size(mesh.mpi_comm()) == 1: self.assertEqual(entity, reference[0]) self.assertAlmostEqual(distance, reference[1])
def test_mesh_point_2d(self): "Test mesh-point intersection in 2D" point = Point(0.1, 0.2) mesh = UnitSquareMesh(16, 16) intersection = intersect(mesh, point) if MPI.size(mesh.mpi_comm()) == 1: self.assertEqual(intersection.intersected_cells(), [98])
def test_save_and_load_2d_mesh(tempdir, encoding): filename = os.path.join(tempdir, "mesh_2D.xdmf") mesh = UnitSquareMesh(MPI.comm_world, 32, 32) with XDMFFile(mesh.mpi_comm(), filename, encoding=encoding) as file: file.write(mesh) with XDMFFile(MPI.comm_world, filename) as file: mesh2 = file.read_mesh(cpp.mesh.GhostMode.none) assert mesh.num_entities_global(0) == mesh2.num_entities_global(0) dim = mesh.topology.dim assert mesh.num_entities_global(dim) == mesh2.num_entities_global(dim)
def test_compute_collisions_point_2d(self): reference = {1: set([226]), 2: set([136, 137])} p = Point(0.3, 0.3) mesh = UnitSquareMesh(16, 16) for dim in range(1, 3): tree = BoundingBoxTree() tree.build(mesh, dim) entities = tree.compute_collisions(p) if MPI.size(mesh.mpi_comm()) == 1: self.assertEqual(set(entities), reference[dim])
def test_lu_cholesky(): """Test that PETScLUSolver selects LU or Cholesky solver based on symmetry of matrix operator. """ from petsc4py import PETSc mesh = UnitSquareMesh(MPI.comm_world, 12, 12) V = FunctionSpace(mesh, "Lagrange", 1) u, v = TrialFunction(V), TestFunction(V) A = PETScMatrix(mesh.mpi_comm()) assemble(Constant(1.0)*u*v*dx, tensor=A) # Check that solver type is LU solver = PETScLUSolver(mesh.mpi_comm(), A, "petsc") pc_type = solver.ksp().getPC().getType() assert pc_type == "lu" # Set symmetry flag A.mat().setOption(PETSc.Mat.Option.SYMMETRIC, True) # Check symmetry flags symm = A.mat().isSymmetricKnown() assert symm[0] == True assert symm[1] == True # Check that solver type is Cholesky since matrix has now been # marked as symmetric solver = PETScLUSolver(mesh.mpi_comm(), A, "petsc") pc_type = solver.ksp().getPC().getType() assert pc_type == "cholesky" # Re-assemble, which resets symmetry flag assemble(Constant(1.0)*u*v*dx, tensor=A) solver = PETScLUSolver(mesh.mpi_comm(), A, "petsc") pc_type = solver.ksp().getPC().getType() assert pc_type == "lu"