Exemplo n.º 1
0
def test_pde_wrong_input():
    """test some wrong input"""
    with pytest.raises(ValueError):
        PDE({"t": 1})
    with pytest.raises(ValueError):
        PDE({"E": 1})
    with pytest.raises(ValueError):
        PDE({"E": 1, "t": 0})

    grid = grids.UnitGrid([4])
    eq = PDE({"u": 1})
    assert eq.expressions == {"u": "1.0"}
    with pytest.raises(ValueError):
        eq.evolution_rate(FieldCollection.scalar_random_uniform(2, grid))

    eq = PDE({"u": 1, "v": 2})
    assert eq.expressions == {"u": "1.0", "v": "2.0"}
    with pytest.raises(ValueError):
        eq.evolution_rate(ScalarField.random_uniform(grid))

    eq = PDE({"u": "a"})
    with pytest.raises(RuntimeError):
        eq.evolution_rate(ScalarField.random_uniform(grid))

    eq = PDE({"x": "x"})
    with pytest.raises(ValueError):
        eq.evolution_rate(ScalarField(grid))
Exemplo n.º 2
0
def test_pde_critical_input():
    """test some wrong input and edge cases"""
    # test whether reserved symbols can be used as variables
    grid = grids.UnitGrid([4])
    eq = PDE({"E": 1})
    res = eq.solve(ScalarField(grid), t_range=2)
    np.testing.assert_allclose(res.data, 2)

    with pytest.raises(ValueError):
        PDE({"t": 1})

    eq = PDE({"u": 1})
    assert eq.expressions == {"u": "1.0"}
    with pytest.raises(ValueError):
        eq.evolution_rate(FieldCollection.scalar_random_uniform(2, grid))

    eq = PDE({"u": 1, "v": 2})
    assert eq.expressions == {"u": "1.0", "v": "2.0"}
    with pytest.raises(ValueError):
        eq.evolution_rate(ScalarField.random_uniform(grid))

    eq = PDE({"u": "a"})
    with pytest.raises(RuntimeError):
        eq.evolution_rate(ScalarField.random_uniform(grid))

    eq = PDE({"x": "x"})
    with pytest.raises(ValueError):
        eq.evolution_rate(ScalarField(grid))
Exemplo n.º 3
0
def test_random_uniform_types():
    """test whether random uniform fields behave correctly for different types"""
    grid = UnitGrid([8])
    for dtype in [bool, int, float, complex]:
        field = VectorField.random_uniform(grid, dtype=dtype)
        assert field.dtype == np.dtype(dtype)
        assert isinstance(field.data.flat[0].item(), dtype)

    assert ScalarField.random_uniform(grid, 0, 1).dtype == np.dtype(float)
    assert ScalarField.random_uniform(grid, vmin=0 + 0j).dtype == np.dtype(complex)
    assert ScalarField.random_uniform(grid, vmax=1 + 0j).dtype == np.dtype(complex)
    assert ScalarField.random_uniform(grid, 0 + 0j, 1 + 0j).dtype == np.dtype(complex)
def test_storage_truncation(tmp_path):
    """ test whether simple trackers can be used """
    file = tmp_path / "test_storage_truncation.hdf5"
    for truncate in [True, False]:
        storages = [MemoryStorage()]
        if module_available("h5py"):
            storages.append(FileStorage(file))
        tracker_list = [s.tracker(interval=0.01) for s in storages]

        grid = UnitGrid([8, 8])
        state = ScalarField.random_uniform(grid, 0.2, 0.3)
        pde = DiffusionPDE()

        pde.solve(state, t_range=0.1, dt=0.001, tracker=tracker_list)
        if truncate:
            for storage in storages:
                storage.clear()
        pde.solve(state, t_range=[0.1, 0.2], dt=0.001, tracker=tracker_list)

        times = np.arange(0.1, 0.201, 0.01)
        if not truncate:
            times = np.r_[np.arange(0, 0.101, 0.01), times]
        for storage in storages:
            msg = f"truncate={truncate}, storage={storage}"
            np.testing.assert_allclose(storage.times, times, err_msg=msg)

        assert not storage.has_collection
Exemplo n.º 5
0
def test_smoothing():
    """test smoothing on different grids"""
    for grid in [
            CartesianGrid([[-2, 3]], 4),
            UnitGrid(7, periodic=False),
            UnitGrid(7, periodic=True),
    ]:
        f1 = ScalarField.random_uniform(grid)
        sigma = 0.5 + np.random.random()

        # this assumes that the grid periodicity is the same for all axes
        mode = "wrap" if grid.periodic[0] else "reflect"
        s = sigma / grid.typical_discretization
        expected = ndimage.gaussian_filter(f1.data, sigma=s, mode=mode)

        out = f1.smooth(sigma)
        np.testing.assert_allclose(out.data, expected)

        out.data = 0  # reset data
        f1.smooth(sigma, out=out).data
        np.testing.assert_allclose(out.data, expected)

    # test one simple higher order smoothing
    tf = Tensor2Field.random_uniform(grid)
    assert tf.data.shape == tf.smooth(1).data.shape

    # test in-place smoothing
    g = UnitGrid([8, 8])
    f1 = ScalarField.random_normal(g)
    f2 = f1.smooth(3)
    f1.smooth(3, out=f1)
    np.testing.assert_allclose(f1.data, f2.data)
def test_storing_collection(tmp_path):
    """ test methods specific to FieldCollections in memory storage """
    grid = UnitGrid([2, 2])
    f1 = ScalarField.random_uniform(grid, 0.1, 0.4, label="a")
    f2 = VectorField.random_uniform(grid, 0.1, 0.4, label="b")
    f3 = Tensor2Field.random_uniform(grid, 0.1, 0.4, label="c")
    fc = FieldCollection([f1, f2, f3])

    storage_classes = {"MemoryStorage": MemoryStorage}
    if module_available("h5py"):
        file_path = tmp_path / "test_storage_write.hdf5"
        storage_classes["FileStorage"] = functools.partial(
            FileStorage, file_path)

    for storage_cls in storage_classes.values():
        # store some data
        storage = storage_cls()
        storage.start_writing(fc)
        storage.append(fc, 0)
        storage.append(fc, 1)
        storage.end_writing()

        assert storage.has_collection
        assert storage.extract_field(0)[0] == f1
        assert storage.extract_field(1)[0] == f2
        assert storage.extract_field(2)[0] == f3
        assert storage.extract_field(0)[0].label == "a"
        assert storage.extract_field(0,
                                     label="new label")[0].label == "new label"
        assert storage.extract_field(0)[0].label == "a"  # do not alter label
        assert storage.extract_field("a")[0] == f1
        assert storage.extract_field("b")[0] == f2
        assert storage.extract_field("c")[0] == f3
        with pytest.raises(ValueError):
            storage.extract_field("nonsense")
Exemplo n.º 7
0
def test_shapes_nfields(num, example_grid):
    """test single component field"""
    fields = [ScalarField.random_uniform(example_grid) for _ in range(num)]
    field = FieldCollection(fields)
    data_shape = (num, ) + example_grid.shape
    np.testing.assert_equal(field.data.shape, data_shape)
    for pf_single in field:
        np.testing.assert_equal(pf_single.data.shape, example_grid.shape)

    field_c = field.copy()
    np.testing.assert_allclose(field.data, field_c.data)
    assert field.grid == field_c.grid
Exemplo n.º 8
0
def test_pde_complex():
    """test complex valued PDE"""
    eq = PDE({"p": "I * laplace(p)"})
    assert not eq.explicit_time_dependence
    assert eq.complex_valued

    field = ScalarField.random_uniform(grids.UnitGrid([4]))
    assert not field.is_complex
    res1 = eq.solve(field, t_range=1, dt=0.1, backend="numpy", tracker=None)
    assert res1.is_complex
    res2 = eq.solve(field, t_range=1, dt=0.1, backend="numpy", tracker=None)
    assert res2.is_complex

    np.testing.assert_allclose(res1.data, res2.data)
Exemplo n.º 9
0
def test_scalar_field_plot(tmp_path):
    """test ScalarFieldPlot class"""
    path = tmp_path / "test_scalar_field_plot.png"

    # create some data
    state = ScalarField.random_uniform(UnitGrid([16, 16]))
    for scale in [(0, 1), 1, "automatic", "symmetric", "unity"]:
        sfp = plotting.ScalarFieldPlot(state, scale=scale)
        sfp.savefig(path)
        assert path.stat().st_size > 0

    sfp = plotting.ScalarFieldPlot(state, quantities={"source": None})
    sfp.savefig(path)
    assert path.stat().st_size > 0
Exemplo n.º 10
0
def test_interactive_plotting():
    """ test plot_interactive """

    # create some data
    field = ScalarField.random_uniform(UnitGrid([8]))
    with get_memory_storage(field) as storage:
        for i in range(8):
            storage.append(field.copy(data=i), i)

    plotting.plot_interactive(storage,
                              viewer_args={
                                  "show": False,
                                  "close": True
                              })
Exemplo n.º 11
0
def test_writing_images(tmp_path):
    """test writing and reading files"""
    from matplotlib.pyplot import imread

    grid = UnitGrid([4, 4])
    s = ScalarField.random_uniform(grid, label="scalar")
    v = VectorField.random_uniform(grid, label="vector")
    t = Tensor2Field.random_uniform(grid, label="tensor")

    path = tmp_path / "test_writing_images.png"
    for f in [s, v, t]:
        f.to_file(path)
        # try reading the file
        with path.open("br") as fp:
            imread(fp)
Exemplo n.º 12
0
def test_pde_vector_scalar():
    """test PDE with a vector and a scalar field"""
    eq = PDE({"u": "vector_laplace(u) - u + gradient(v)", "v": "- divergence(u)"})
    assert not eq.explicit_time_dependence
    assert not eq.complex_valued
    grid = grids.UnitGrid([8, 8])
    field = FieldCollection(
        [VectorField.random_uniform(grid), ScalarField.random_uniform(grid)]
    )

    res_a = eq.solve(field, t_range=1, dt=0.01, backend="numpy", tracker=None)
    res_b = eq.solve(field, t_range=1, dt=0.01, backend="numba", tracker=None)

    res_a.assert_field_compatible(res_b)
    np.testing.assert_allclose(res_a.data, res_b.data)
Exemplo n.º 13
0
def test_pde_integral(backend):
    """test PDE with integral"""
    grid = grids.UnitGrid([16])
    field = ScalarField.random_uniform(grid)
    eq = PDE({"c": "-integral(c)"})

    # test rhs
    rhs = eq.make_pde_rhs(field, backend=backend)
    np.testing.assert_allclose(rhs(field.data, 0), -field.integral)

    # test evolution
    for method in ["scipy", "explicit"]:
        res = eq.solve(field, t_range=1000, method=method, tracker=None)
        assert res.integral == pytest.approx(0, abs=1e-2)
        np.testing.assert_allclose(res.data, field.data - field.magnitude, atol=1e-3)
Exemplo n.º 14
0
def test_storage_types(dtype, tmp_path):
    """test storing different types"""
    grid = UnitGrid([32])
    field = ScalarField.random_uniform(grid).copy(dtype=dtype)
    if dtype == complex:
        field += 1j * ScalarField.random_uniform(grid)

    storage_classes = {"MemoryStorage": MemoryStorage}
    if module_available("h5py"):
        file_path = tmp_path / "test_storage_apply.hdf5"
        storage_classes["FileStorage"] = functools.partial(
            FileStorage, file_path)

    for storage_cls in storage_classes.values():
        s = storage_cls()
        s.start_writing(field)
        s.append(field, 0)
        s.append(field, 1)
        s.end_writing()

        assert len(s) == 2
        np.testing.assert_allclose(s.times, [0, 1])
        np.testing.assert_equal(s[0].data, field.data)
        np.testing.assert_equal(s[1].data, field.data)
Exemplo n.º 15
0
def test_hdf_input_output(tmp_path):
    """test writing and reading files"""
    grid = UnitGrid([4, 4])
    s = ScalarField.random_uniform(grid, label="scalar")
    v = VectorField.random_uniform(grid, label="vector")
    t = Tensor2Field.random_uniform(grid, label="tensor")
    col = FieldCollection([s, v, t], label="collection")

    path = tmp_path / "test_hdf_input_output.hdf5"
    for f in [s, v, t, col]:
        f.to_file(path)
        f2 = FieldBase.from_file(path)
        assert f == f2
        assert f.label == f2.label
        assert isinstance(str(f), str)
        assert isinstance(repr(f), str)
Exemplo n.º 16
0
def test_scalar_plot(tmp_path):
    """test Simple simulation"""
    path = tmp_path / "test_scalar_plot.png"

    # create some data
    state = ScalarField.random_uniform(UnitGrid([16, 16]), label="test")
    with get_memory_storage(state) as storage:
        storage.append(state, 0)
        storage.append(state, 1)

    # check creating an overview image
    plotting.plot_magnitudes(storage, filename=path)
    assert path.stat().st_size > 0

    # check creating an kymograph
    plotting.plot_kymograph(storage, filename=path)
    assert path.stat().st_size > 0
Exemplo n.º 17
0
def test_compare_swift_hohenberg(grid):
    """compare custom class to swift-Hohenberg"""
    rate, kc2, delta = np.random.uniform(0.5, 2, size=3)
    eq1 = SwiftHohenbergPDE(rate=rate, kc2=kc2, delta=delta)
    eq2 = PDE({
        "u":
        f"({rate} - {kc2}**2) * u - 2 * {kc2} * laplace(u) "
        f"- laplace(laplace(u)) + {delta} * u**2 - u**3"
    })
    assert eq1.explicit_time_dependence == eq2.explicit_time_dependence
    assert eq1.complex_valued == eq2.complex_valued

    field = ScalarField.random_uniform(grid)
    res1 = eq1.solve(field, t_range=1, dt=0.01, backend="numpy", tracker=None)
    res2 = eq2.solve(field, t_range=1, dt=0.01, backend="numpy", tracker=None)

    res1.assert_field_compatible(res1)
    np.testing.assert_allclose(res1.data, res2.data)
Exemplo n.º 18
0
def test_movie_scalar(movie_func, tmp_path):
    """test Movie class"""

    # create some data
    state = ScalarField.random_uniform(UnitGrid([4, 4]))
    eq = DiffusionPDE()
    storage = MemoryStorage()
    tracker = storage.tracker(interval=1)
    eq.solve(state, t_range=2, dt=1e-2, backend="numpy", tracker=tracker)

    # check creating the movie
    path = tmp_path / "test_movie.mov"

    try:
        movie_func(storage, filename=path, progress=False)
    except RuntimeError:
        pass  # can happen when ffmpeg is not installed
    else:
        assert path.stat().st_size > 0