def test_scalar_arithmetics(): """test simple arithmetics involving scalar fields""" grid = UnitGrid([3, 4]) s = ScalarField(grid, data=2) v = VectorField.random_uniform(grid) for f in [v, FieldCollection([v])]: f.data = s assert f.data.shape == (2, 3, 4) np.testing.assert_allclose(f.data, 2) f += s np.testing.assert_allclose(f.data, 4) np.testing.assert_allclose((f + s).data, 6) np.testing.assert_allclose((s + f).data, 6) f -= s np.testing.assert_allclose((f - s).data, 0) np.testing.assert_allclose((s - f).data, 0) f *= s np.testing.assert_allclose(f.data, 4) np.testing.assert_allclose((f * s).data, 8) np.testing.assert_allclose((s * f).data, 8) f /= s np.testing.assert_allclose((f / s).data, 1) with pytest.raises(TypeError): s / f with pytest.raises(TypeError): s /= f with pytest.raises(TypeError): s *= f
def test_storing_collection(tmp_path): """ test methods specific to FieldCollections in memory storage """ grid = UnitGrid([2, 2]) f1 = ScalarField.random_uniform(grid, 0.1, 0.4, label="a") f2 = VectorField.random_uniform(grid, 0.1, 0.4, label="b") f3 = Tensor2Field.random_uniform(grid, 0.1, 0.4, label="c") fc = FieldCollection([f1, f2, f3]) storage_classes = {"MemoryStorage": MemoryStorage} if module_available("h5py"): file_path = tmp_path / "test_storage_write.hdf5" storage_classes["FileStorage"] = functools.partial( FileStorage, file_path) for storage_cls in storage_classes.values(): # store some data storage = storage_cls() storage.start_writing(fc) storage.append(fc, 0) storage.append(fc, 1) storage.end_writing() assert storage.has_collection assert storage.extract_field(0)[0] == f1 assert storage.extract_field(1)[0] == f2 assert storage.extract_field(2)[0] == f3 assert storage.extract_field(0)[0].label == "a" assert storage.extract_field(0, label="new label")[0].label == "new label" assert storage.extract_field(0)[0].label == "a" # do not alter label assert storage.extract_field("a")[0] == f1 assert storage.extract_field("b")[0] == f2 assert storage.extract_field("c")[0] == f3 with pytest.raises(ValueError): storage.extract_field("nonsense")
def test_random_uniform_types(): """test whether random uniform fields behave correctly for different types""" grid = UnitGrid([8]) for dtype in [bool, int, float, complex]: field = VectorField.random_uniform(grid, dtype=dtype) assert field.dtype == np.dtype(dtype) assert isinstance(field.data.flat[0].item(), dtype) assert ScalarField.random_uniform(grid, 0, 1).dtype == np.dtype(float) assert ScalarField.random_uniform(grid, vmin=0 + 0j).dtype == np.dtype(complex) assert ScalarField.random_uniform(grid, vmax=1 + 0j).dtype == np.dtype(complex) assert ScalarField.random_uniform(grid, 0 + 0j, 1 + 0j).dtype == np.dtype(complex)
def test_simple_plotting(example_grid): """test simple plotting of various fields on various grids""" vf = VectorField.random_uniform(example_grid) tf = Tensor2Field.random_uniform(example_grid) sf = tf[0, 0] # test extraction of fields fc = FieldCollection([sf, vf]) for f in [sf, vf, tf, fc]: f.plot(action="close") f.plot(kind="line", action="close") if example_grid.dim >= 2: f.plot(kind="image", action="close") if isinstance(f, VectorField) and example_grid.dim == 2: f.plot(kind="quiver", action="close") f.plot(kind="streamplot", action="close")
def test_writing_images(tmp_path): """test writing and reading files""" from matplotlib.pyplot import imread grid = UnitGrid([4, 4]) s = ScalarField.random_uniform(grid, label="scalar") v = VectorField.random_uniform(grid, label="vector") t = Tensor2Field.random_uniform(grid, label="tensor") path = tmp_path / "test_writing_images.png" for f in [s, v, t]: f.to_file(path) # try reading the file with path.open("br") as fp: imread(fp)
def test_pde_vector_scalar(): """test PDE with a vector and a scalar field""" eq = PDE({"u": "vector_laplace(u) - u + gradient(v)", "v": "- divergence(u)"}) assert not eq.explicit_time_dependence assert not eq.complex_valued grid = grids.UnitGrid([8, 8]) field = FieldCollection( [VectorField.random_uniform(grid), ScalarField.random_uniform(grid)] ) res_a = eq.solve(field, t_range=1, dt=0.01, backend="numpy", tracker=None) res_b = eq.solve(field, t_range=1, dt=0.01, backend="numba", tracker=None) res_a.assert_field_compatible(res_b) np.testing.assert_allclose(res_a.data, res_b.data)
def test_hdf_input_output(tmp_path): """test writing and reading files""" grid = UnitGrid([4, 4]) s = ScalarField.random_uniform(grid, label="scalar") v = VectorField.random_uniform(grid, label="vector") t = Tensor2Field.random_uniform(grid, label="tensor") col = FieldCollection([s, v, t], label="collection") path = tmp_path / "test_hdf_input_output.hdf5" for f in [s, v, t, col]: f.to_file(path) f2 = FieldBase.from_file(path) assert f == f2 assert f.label == f2.label assert isinstance(str(f), str) assert isinstance(repr(f), str)