def test_data_managment(): """test how data is set""" grid = UnitGrid([2, 2]) for cls in (ScalarField, VectorField, Tensor2Field): s1 = cls(grid, data=1) np.testing.assert_allclose(s1.data, 1) s2 = cls(grid) np.testing.assert_allclose(s2.data, 0) c = FieldCollection([s1, s2]) s1.data = 0 np.testing.assert_allclose(c.data, 0) c.data = 2 np.testing.assert_allclose(s1.data, 2) np.testing.assert_allclose(s2.data, 2) c.data += 1 np.testing.assert_allclose(s1.data, 3) np.testing.assert_allclose(s2.data, 3) c[0].data += 2 # reference to s1 c[1].data *= 2 # reference to s2 np.testing.assert_allclose(s1.data, 5) np.testing.assert_allclose(s2.data, 6) c[0] = s2 np.testing.assert_allclose(c.data, 6) # nested collections with pytest.raises(RuntimeError): FieldCollection([c])
def test_kymograph_collection(tmp_path): """test making kymographs for field collections""" # create some storage field = FieldCollection([ ScalarField(UnitGrid(8), label="a"), ScalarField(UnitGrid(8), label="b") ]) with get_memory_storage(field) as storage: for i in range(8): field.data = i storage.append(field, i) # create single kymograph path = tmp_path / "test1.png" plotting.plot_kymograph(storage, field_index=1, colorbar=True, transpose=True, filename=path) assert path.stat().st_size > 0 # create multiple kymographs path = tmp_path / "test2.png" plotting.plot_kymographs(storage, filename=path) assert path.stat().st_size > 0
def test_shapes_nfields(num, example_grid): """test single component field""" fields = [ScalarField.random_uniform(example_grid) for _ in range(num)] field = FieldCollection(fields) data_shape = (num, ) + example_grid.shape np.testing.assert_equal(field.data.shape, data_shape) for pf_single in field: np.testing.assert_equal(pf_single.data.shape, example_grid.shape) field_c = field.copy() np.testing.assert_allclose(field.data, field_c.data) assert field.grid == field_c.grid
def test_pde_critical_input(): """test some wrong input and edge cases""" # test whether reserved symbols can be used as variables grid = grids.UnitGrid([4]) eq = PDE({"E": 1}) res = eq.solve(ScalarField(grid), t_range=2) np.testing.assert_allclose(res.data, 2) with pytest.raises(ValueError): PDE({"t": 1}) eq = PDE({"u": 1}) assert eq.expressions == {"u": "1.0"} with pytest.raises(ValueError): eq.evolution_rate(FieldCollection.scalar_random_uniform(2, grid)) eq = PDE({"u": 1, "v": 2}) assert eq.expressions == {"u": "1.0", "v": "2.0"} with pytest.raises(ValueError): eq.evolution_rate(ScalarField.random_uniform(grid)) eq = PDE({"u": "a"}) with pytest.raises(RuntimeError): eq.evolution_rate(ScalarField.random_uniform(grid)) eq = PDE({"x": "x"}) with pytest.raises(ValueError): eq.evolution_rate(ScalarField(grid))
def test_scalar_arithmetics(): """test simple arithmetics involving scalar fields""" grid = UnitGrid([3, 4]) s = ScalarField(grid, data=2) v = VectorField.random_uniform(grid) for f in [v, FieldCollection([v])]: f.data = s assert f.data.shape == (2, 3, 4) np.testing.assert_allclose(f.data, 2) f += s np.testing.assert_allclose(f.data, 4) np.testing.assert_allclose((f + s).data, 6) np.testing.assert_allclose((s + f).data, 6) f -= s np.testing.assert_allclose((f - s).data, 0) np.testing.assert_allclose((s - f).data, 0) f *= s np.testing.assert_allclose(f.data, 4) np.testing.assert_allclose((f * s).data, 8) np.testing.assert_allclose((s * f).data, 8) f /= s np.testing.assert_allclose((f / s).data, 1) with pytest.raises(TypeError): s / f with pytest.raises(TypeError): s /= f with pytest.raises(TypeError): s *= f
def test_pde_wrong_input(): """test some wrong input""" with pytest.raises(ValueError): PDE({"t": 1}) with pytest.raises(ValueError): PDE({"E": 1}) with pytest.raises(ValueError): PDE({"E": 1, "t": 0}) grid = grids.UnitGrid([4]) eq = PDE({"u": 1}) assert eq.expressions == {"u": "1.0"} with pytest.raises(ValueError): eq.evolution_rate(FieldCollection.scalar_random_uniform(2, grid)) eq = PDE({"u": 1, "v": 2}) assert eq.expressions == {"u": "1.0", "v": "2.0"} with pytest.raises(ValueError): eq.evolution_rate(ScalarField.random_uniform(grid)) eq = PDE({"u": "a"}) with pytest.raises(RuntimeError): eq.evolution_rate(ScalarField.random_uniform(grid)) eq = PDE({"x": "x"}) with pytest.raises(ValueError): eq.evolution_rate(ScalarField(grid))
def test_field_type_guessing(): """ test the ability to guess the field type """ for cls in [ScalarField, VectorField, Tensor2Field]: grid = UnitGrid([3]) field = cls.random_normal(grid) s = MemoryStorage() s.start_writing(field) s.append(field, 0) s.append(field, 1) # delete information s._field = None s.info = {} assert not s.has_collection assert len(s) == 2 assert s[0] == field field = FieldCollection([ScalarField(grid), VectorField(grid)]) s = MemoryStorage() s.start_writing(field) s.append(field, 0) assert s.has_collection # delete information s._field = None s.info = {} with pytest.raises(RuntimeError): s[0]
def test_storing_collection(tmp_path): """ test methods specific to FieldCollections in memory storage """ grid = UnitGrid([2, 2]) f1 = ScalarField.random_uniform(grid, 0.1, 0.4, label="a") f2 = VectorField.random_uniform(grid, 0.1, 0.4, label="b") f3 = Tensor2Field.random_uniform(grid, 0.1, 0.4, label="c") fc = FieldCollection([f1, f2, f3]) storage_classes = {"MemoryStorage": MemoryStorage} if module_available("h5py"): file_path = tmp_path / "test_storage_write.hdf5" storage_classes["FileStorage"] = functools.partial( FileStorage, file_path) for storage_cls in storage_classes.values(): # store some data storage = storage_cls() storage.start_writing(fc) storage.append(fc, 0) storage.append(fc, 1) storage.end_writing() assert storage.has_collection assert storage.extract_field(0)[0] == f1 assert storage.extract_field(1)[0] == f2 assert storage.extract_field(2)[0] == f3 assert storage.extract_field(0)[0].label == "a" assert storage.extract_field(0, label="new label")[0].label == "new label" assert storage.extract_field(0)[0].label == "a" # do not alter label assert storage.extract_field("a")[0] == f1 assert storage.extract_field("b")[0] == f2 assert storage.extract_field("c")[0] == f3 with pytest.raises(ValueError): storage.extract_field("nonsense")
def test_interpolation_to_grid_fields(): """ test whether data is interpolated correctly for different fields """ grid = CartesianGrid([[0, 2 * np.pi]] * 2, 6) grid2 = CartesianGrid([[0, 2 * np.pi]] * 2, 8) vf = VectorField.from_expression(grid, ["sin(y)", "cos(x)"]) sf = vf[0] # test extraction of fields fc = FieldCollection([sf, vf]) for f in [sf, vf, fc]: f2 = f.interpolate_to_grid(grid2, method="numba") f3 = f2.interpolate_to_grid(grid, method="numba") np.testing.assert_allclose(f.data, f3.data, atol=0.2, rtol=0.2)
def test_collection_plot(tmp_path): """test Simple simulation""" # create some data field = FieldCollection([ ScalarField(UnitGrid([8, 8]), label="first"), ScalarField(UnitGrid([8, 8])) ]) with get_memory_storage(field) as storage: storage.append(field) path = tmp_path / "test_collection_plot.png" plotting.plot_magnitudes(storage, filename=path) assert path.stat().st_size > 0
def test_pde_2scalar(): """test PDE with two scalar fields""" eq = PDE({"u": "laplace(u) - u", "v": "- u * v"}) assert not eq.explicit_time_dependence assert not eq.complex_valued grid = grids.UnitGrid([8]) field = FieldCollection.scalar_random_uniform(2, grid) res_a = eq.solve(field, t_range=1, dt=0.01, backend="numpy", tracker=None) res_b = eq.solve(field, t_range=1, dt=0.01, backend="numba", tracker=None) res_a.assert_field_compatible(res_b) np.testing.assert_allclose(res_a.data, res_b.data)
def test_simple_plotting(example_grid): """test simple plotting of various fields on various grids""" vf = VectorField.random_uniform(example_grid) tf = Tensor2Field.random_uniform(example_grid) sf = tf[0, 0] # test extraction of fields fc = FieldCollection([sf, vf]) for f in [sf, vf, tf, fc]: f.plot(action="close") f.plot(kind="line", action="close") if example_grid.dim >= 2: f.plot(kind="image", action="close") if isinstance(f, VectorField) and example_grid.dim == 2: f.plot(kind="quiver", action="close") f.plot(kind="streamplot", action="close")
def test_pde_vector_scalar(): """test PDE with a vector and a scalar field""" eq = PDE({"u": "vector_laplace(u) - u + gradient(v)", "v": "- divergence(u)"}) assert not eq.explicit_time_dependence assert not eq.complex_valued grid = grids.UnitGrid([8, 8]) field = FieldCollection( [VectorField.random_uniform(grid), ScalarField.random_uniform(grid)] ) res_a = eq.solve(field, t_range=1, dt=0.01, backend="numpy", tracker=None) res_b = eq.solve(field, t_range=1, dt=0.01, backend="numba", tracker=None) res_a.assert_field_compatible(res_b) np.testing.assert_allclose(res_a.data, res_b.data)
def test_hdf_input_output(tmp_path): """test writing and reading files""" grid = UnitGrid([4, 4]) s = ScalarField.random_uniform(grid, label="scalar") v = VectorField.random_uniform(grid, label="vector") t = Tensor2Field.random_uniform(grid, label="tensor") col = FieldCollection([s, v, t], label="collection") path = tmp_path / "test_hdf_input_output.hdf5" for f in [s, v, t, col]: f.to_file(path) f2 = FieldBase.from_file(path) assert f == f2 assert f.label == f2.label assert isinstance(str(f), str) assert isinstance(repr(f), str)
def test_pde_noise(backend): """test noise operator on PDE class""" grid = grids.UnitGrid([64, 64]) state = FieldCollection([ScalarField(grid), ScalarField(grid)]) eq = PDE({"a": 0, "b": 0}, noise=0.5) res = eq.solve(state, t_range=1, backend=backend, dt=1, tracker=None) assert res.data.std() == pytest.approx(0.5, rel=0.1) eq = PDE({"a": 0, "b": 0}, noise=[0.01, 2.0]) res = eq.solve(state, t_range=1, backend=backend, dt=1) assert res.data[0].std() == pytest.approx(0.01, rel=0.1) assert res.data[1].std() == pytest.approx(2.0, rel=0.1) with pytest.raises(ValueError): eq = PDE({"a": 0}, noise=[0.01, 2.0]) eq.solve(ScalarField(grid), t_range=1, backend=backend, dt=1, tracker=None)
def test_interpolation_to_cartesian(grid): """test whether data is interpolated correctly to Cartesian grid""" dim = grid.dim vf = VectorField(grid, 2) sf = vf[0] # test extraction of fields fc = FieldCollection([sf, vf]) # subset grid_cart = UnitGrid([4] * dim) for f in [sf, fc]: res = f.interpolate_to_grid(grid_cart) np.testing.assert_allclose(res.data, 2) # superset grid_cart = UnitGrid([8] * dim) for f in [sf, fc]: res = f.interpolate_to_grid(grid_cart, fill=0) assert res.data.min() == 0 assert res.data.max() == pytest.approx(2)
def test_interpolation_to_grid_fields(ndim): """test whether data is interpolated correctly for different fields""" grid = CartesianGrid([[0, 2 * np.pi]] * ndim, 6) grid2 = CartesianGrid([[0, 2 * np.pi]] * ndim, 8) if ndim == 1: vf = VectorField.from_expression(grid, ["cos(x)"]) elif ndim == 2: vf = VectorField.from_expression(grid, ["sin(y)", "cos(x)"]) sf = vf[0] # test extraction of fields fc = FieldCollection([sf, vf]) for f in [sf, vf, fc]: # test self-interpolation f0 = f.interpolate_to_grid(grid, backend="numba") np.testing.assert_allclose(f.data, f0.data, atol=1e-15) # test interpolation to finer grid and back f2 = f.interpolate_to_grid(grid2, backend="numba") f3 = f2.interpolate_to_grid(grid, backend="numba") np.testing.assert_allclose(f.data, f3.data, atol=0.2, rtol=0.2)