def test_smoothing():
    """test smoothing on different grids"""
    for grid in [
            CartesianGrid([[-2, 3]], 4),
            UnitGrid(7, periodic=False),
            UnitGrid(7, periodic=True),
    ]:
        f1 = ScalarField.random_uniform(grid)
        sigma = 0.5 + np.random.random()

        # this assumes that the grid periodicity is the same for all axes
        mode = "wrap" if grid.periodic[0] else "reflect"
        s = sigma / grid.typical_discretization
        expected = ndimage.gaussian_filter(f1.data, sigma=s, mode=mode)

        out = f1.smooth(sigma)
        np.testing.assert_allclose(out.data, expected)

        out.data = 0  # reset data
        f1.smooth(sigma, out=out).data
        np.testing.assert_allclose(out.data, expected)

    # test one simple higher order smoothing
    tf = Tensor2Field.random_uniform(grid)
    assert tf.data.shape == tf.smooth(1).data.shape

    # test in-place smoothing
    g = UnitGrid([8, 8])
    f1 = ScalarField.random_normal(g)
    f2 = f1.smooth(3)
    f1.smooth(3, out=f1)
    np.testing.assert_allclose(f1.data, f2.data)
Beispiel #2
0
def test_pde_wrong_input():
    """test some wrong input"""
    with pytest.raises(ValueError):
        PDE({"t": 1})
    with pytest.raises(ValueError):
        PDE({"E": 1})
    with pytest.raises(ValueError):
        PDE({"E": 1, "t": 0})

    grid = grids.UnitGrid([4])
    eq = PDE({"u": 1})
    assert eq.expressions == {"u": "1.0"}
    with pytest.raises(ValueError):
        eq.evolution_rate(FieldCollection.scalar_random_uniform(2, grid))

    eq = PDE({"u": 1, "v": 2})
    assert eq.expressions == {"u": "1.0", "v": "2.0"}
    with pytest.raises(ValueError):
        eq.evolution_rate(ScalarField.random_uniform(grid))

    eq = PDE({"u": "a"})
    with pytest.raises(RuntimeError):
        eq.evolution_rate(ScalarField.random_uniform(grid))

    eq = PDE({"x": "x"})
    with pytest.raises(ValueError):
        eq.evolution_rate(ScalarField(grid))
Beispiel #3
0
def test_pde_critical_input():
    """test some wrong input and edge cases"""
    # test whether reserved symbols can be used as variables
    grid = grids.UnitGrid([4])
    eq = PDE({"E": 1})
    res = eq.solve(ScalarField(grid), t_range=2)
    np.testing.assert_allclose(res.data, 2)

    with pytest.raises(ValueError):
        PDE({"t": 1})

    eq = PDE({"u": 1})
    assert eq.expressions == {"u": "1.0"}
    with pytest.raises(ValueError):
        eq.evolution_rate(FieldCollection.scalar_random_uniform(2, grid))

    eq = PDE({"u": 1, "v": 2})
    assert eq.expressions == {"u": "1.0", "v": "2.0"}
    with pytest.raises(ValueError):
        eq.evolution_rate(ScalarField.random_uniform(grid))

    eq = PDE({"u": "a"})
    with pytest.raises(RuntimeError):
        eq.evolution_rate(ScalarField.random_uniform(grid))

    eq = PDE({"x": "x"})
    with pytest.raises(ValueError):
        eq.evolution_rate(ScalarField(grid))
def test_storage_copy(tmp_path):
    """ test the copy function of StorageBase """
    grid = UnitGrid([2])
    field = ScalarField(grid)

    storage_classes = {"None": None, "MemoryStorage": MemoryStorage}
    if module_available("h5py"):
        file_path = tmp_path / "test_storage_apply.hdf5"
        storage_classes["FileStorage"] = functools.partial(
            FileStorage, file_path)

    s1 = MemoryStorage()
    s1.start_writing(field, info={"b": 2})
    s1.append(field.copy(data=np.array([0, 1])), 0)
    s1.append(field.copy(data=np.array([1, 2])), 1)
    s1.end_writing()

    for name, storage_cls in storage_classes.items():
        out = None if storage_cls is None else storage_cls()
        s2 = s1.copy(out=out)
        assert storage_cls is None or s2 is out
        assert len(s2) == 2
        np.testing.assert_allclose(s2.times, s1.times)
        assert s2[0] == s1[0], name
        assert s2[1] == s1[1], name

    # test empty storage
    s1 = MemoryStorage()
    s2 = s1.copy()
    assert len(s2) == 0
Beispiel #5
0
def test_kymograph_collection(tmp_path):
    """test making kymographs for field collections"""
    # create some storage
    field = FieldCollection([
        ScalarField(UnitGrid(8), label="a"),
        ScalarField(UnitGrid(8), label="b")
    ])
    with get_memory_storage(field) as storage:
        for i in range(8):
            field.data = i
            storage.append(field, i)

    # create single kymograph
    path = tmp_path / "test1.png"
    plotting.plot_kymograph(storage,
                            field_index=1,
                            colorbar=True,
                            transpose=True,
                            filename=path)
    assert path.stat().st_size > 0

    # create multiple kymographs
    path = tmp_path / "test2.png"
    plotting.plot_kymographs(storage, filename=path)
    assert path.stat().st_size > 0
def test_complex_operator(example_grid):
    """test using a complex operator on grid"""
    r = ScalarField.random_normal(example_grid)
    i = ScalarField.random_normal(example_grid)
    c = r + 1j * i
    assert c.is_complex
    assert np.iscomplexobj(c)

    c_lap = c.laplace("natural").data
    np.testing.assert_allclose(c_lap.real, r.laplace("natural").data)
    np.testing.assert_allclose(c_lap.imag, i.laplace("natural").data)
def test_vector_from_scalars():
    """test how to compile vector fields from scalar fields"""
    g = UnitGrid([1, 2])
    s1 = ScalarField(g, [[0, 1]])
    s2 = ScalarField(g, [[2, 3]])
    v = VectorField.from_scalars([s1, s2], label="test")
    assert v.label == "test"
    np.testing.assert_equal(v.data, [[[0, 1]], [[2, 3]]])

    with pytest.raises(ValueError):
        VectorField.from_scalars([s1, s2, s1])
def test_random_uniform_types():
    """test whether random uniform fields behave correctly for different types"""
    grid = UnitGrid([8])
    for dtype in [bool, int, float, complex]:
        field = VectorField.random_uniform(grid, dtype=dtype)
        assert field.dtype == np.dtype(dtype)
        assert isinstance(field.data.flat[0].item(), dtype)

    assert ScalarField.random_uniform(grid, 0, 1).dtype == np.dtype(float)
    assert ScalarField.random_uniform(grid, vmin=0 + 0j).dtype == np.dtype(complex)
    assert ScalarField.random_uniform(grid, vmax=1 + 0j).dtype == np.dtype(complex)
    assert ScalarField.random_uniform(grid, 0 + 0j, 1 + 0j).dtype == np.dtype(complex)
Beispiel #9
0
def test_collection_plot(tmp_path):
    """test Simple simulation"""
    # create some data
    field = FieldCollection([
        ScalarField(UnitGrid([8, 8]), label="first"),
        ScalarField(UnitGrid([8, 8]))
    ])
    with get_memory_storage(field) as storage:
        storage.append(field)

    path = tmp_path / "test_collection_plot.png"
    plotting.plot_magnitudes(storage, filename=path)
    assert path.stat().st_size > 0
def test_storage_truncation(tmp_path):
    """ test whether simple trackers can be used """
    file = tmp_path / "test_storage_truncation.hdf5"
    for truncate in [True, False]:
        storages = [MemoryStorage()]
        if module_available("h5py"):
            storages.append(FileStorage(file))
        tracker_list = [s.tracker(interval=0.01) for s in storages]

        grid = UnitGrid([8, 8])
        state = ScalarField.random_uniform(grid, 0.2, 0.3)
        pde = DiffusionPDE()

        pde.solve(state, t_range=0.1, dt=0.001, tracker=tracker_list)
        if truncate:
            for storage in storages:
                storage.clear()
        pde.solve(state, t_range=[0.1, 0.2], dt=0.001, tracker=tracker_list)

        times = np.arange(0.1, 0.201, 0.01)
        if not truncate:
            times = np.r_[np.arange(0, 0.101, 0.01), times]
        for storage in storages:
            msg = f"truncate={truncate}, storage={storage}"
            np.testing.assert_allclose(storage.times, times, err_msg=msg)

        assert not storage.has_collection
def test_storing_collection(tmp_path):
    """ test methods specific to FieldCollections in memory storage """
    grid = UnitGrid([2, 2])
    f1 = ScalarField.random_uniform(grid, 0.1, 0.4, label="a")
    f2 = VectorField.random_uniform(grid, 0.1, 0.4, label="b")
    f3 = Tensor2Field.random_uniform(grid, 0.1, 0.4, label="c")
    fc = FieldCollection([f1, f2, f3])

    storage_classes = {"MemoryStorage": MemoryStorage}
    if module_available("h5py"):
        file_path = tmp_path / "test_storage_write.hdf5"
        storage_classes["FileStorage"] = functools.partial(
            FileStorage, file_path)

    for storage_cls in storage_classes.values():
        # store some data
        storage = storage_cls()
        storage.start_writing(fc)
        storage.append(fc, 0)
        storage.append(fc, 1)
        storage.end_writing()

        assert storage.has_collection
        assert storage.extract_field(0)[0] == f1
        assert storage.extract_field(1)[0] == f2
        assert storage.extract_field(2)[0] == f3
        assert storage.extract_field(0)[0].label == "a"
        assert storage.extract_field(0,
                                     label="new label")[0].label == "new label"
        assert storage.extract_field(0)[0].label == "a"  # do not alter label
        assert storage.extract_field("a")[0] == f1
        assert storage.extract_field("b")[0] == f2
        assert storage.extract_field("c")[0] == f3
        with pytest.raises(ValueError):
            storage.extract_field("nonsense")
Beispiel #12
0
    def get_phase_field(self,
                        grid: GridBase,
                        *,
                        vmin: float = 0,
                        vmax: float = 1,
                        label: str = None) -> ScalarField:
        """Creates an image of the droplet on the `grid`

        Args:
            grid (:class:`~pde.grids.base.GridBase`):
                The grid used for discretizing the droplet phase field
            vmin (float):
                Minimal value the phase field will attain (far away from droplet)
            vmax (float):
                Maximal value the phase field will attain (inside the droplet)
            label (str):
                The label associated with the returned scalar field

        Returns:
            :class:`~pde.fields.ScalarField`: A scalar field
            representing the droplet
        """
        data = self._get_phase_field(grid)
        data = vmin + (vmax - vmin) * data  # scale data
        return ScalarField(grid, data=data, label=label)
Beispiel #13
0
def test_scalar_arithmetics():
    """test simple arithmetics involving scalar fields"""
    grid = UnitGrid([3, 4])
    s = ScalarField(grid, data=2)
    v = VectorField.random_uniform(grid)

    for f in [v, FieldCollection([v])]:
        f.data = s
        assert f.data.shape == (2, 3, 4)
        np.testing.assert_allclose(f.data, 2)

        f += s
        np.testing.assert_allclose(f.data, 4)
        np.testing.assert_allclose((f + s).data, 6)
        np.testing.assert_allclose((s + f).data, 6)
        f -= s
        np.testing.assert_allclose((f - s).data, 0)
        np.testing.assert_allclose((s - f).data, 0)

        f *= s
        np.testing.assert_allclose(f.data, 4)
        np.testing.assert_allclose((f * s).data, 8)
        np.testing.assert_allclose((s * f).data, 8)
        f /= s
        np.testing.assert_allclose((f / s).data, 1)
        with pytest.raises(TypeError):
            s / f
        with pytest.raises(TypeError):
            s /= f
        with pytest.raises(TypeError):
            s *= f
Beispiel #14
0
def test_field_type_guessing():
    """ test the ability to guess the field type """
    for cls in [ScalarField, VectorField, Tensor2Field]:
        grid = UnitGrid([3])
        field = cls.random_normal(grid)
        s = MemoryStorage()
        s.start_writing(field)
        s.append(field, 0)
        s.append(field, 1)

        # delete information
        s._field = None
        s.info = {}

        assert not s.has_collection
        assert len(s) == 2
        assert s[0] == field

    field = FieldCollection([ScalarField(grid), VectorField(grid)])
    s = MemoryStorage()
    s.start_writing(field)
    s.append(field, 0)

    assert s.has_collection

    # delete information
    s._field = None
    s.info = {}

    with pytest.raises(RuntimeError):
        s[0]
Beispiel #15
0
    def get_phasefield(self,
                       grid: GridBase = None,
                       label: Optional[str] = None) -> ScalarField:
        """create a phase field representing a list of droplets

        Args:
            grid (:class:`pde.grids.base.GridBase`):
                The grid on which the phase field is created. If omitted, the
                grid associated with the emulsion is used.
            label (str):
                Optional label for the returned scalar field

        Returns:
            :class:`~pde.fields.scalar.ScalarField`: the actual phase field
        """
        if grid is None:
            grid = self.grid
        if grid is None:
            raise RuntimeError("Grid needs to be specified")

        if len(self) == 0:
            return ScalarField(grid)

        else:
            result: ScalarField = self[0].get_phase_field(grid, label=label)
            for d in self[1:]:
                result += d.get_phase_field(grid)
            np.clip(result.data, 0, 1, out=result.data)
            return result
Beispiel #16
0
def test_pde_noise(backend):
    """test noise operator on PDE class"""
    grid = grids.UnitGrid([64, 64])
    state = FieldCollection([ScalarField(grid), ScalarField(grid)])

    eq = PDE({"a": 0, "b": 0}, noise=0.5)
    res = eq.solve(state, t_range=1, backend=backend, dt=1, tracker=None)
    assert res.data.std() == pytest.approx(0.5, rel=0.1)

    eq = PDE({"a": 0, "b": 0}, noise=[0.01, 2.0])
    res = eq.solve(state, t_range=1, backend=backend, dt=1)
    assert res.data[0].std() == pytest.approx(0.01, rel=0.1)
    assert res.data[1].std() == pytest.approx(2.0, rel=0.1)

    with pytest.raises(ValueError):
        eq = PDE({"a": 0}, noise=[0.01, 2.0])
        eq.solve(ScalarField(grid), t_range=1, backend=backend, dt=1, tracker=None)
Beispiel #17
0
def test_pde_bcs_error(bc):
    """test PDE with wrong boundary conditions"""
    eq = PDE({"u": "laplace(u)"}, bc=bc)
    grid = grids.UnitGrid([8, 8])
    field = ScalarField.random_normal(grid)

    for backend in ["numpy", "numba"]:
        with pytest.raises(BCDataError):
            eq.solve(field, t_range=1, dt=0.01, backend=backend, tracker=None)
Beispiel #18
0
def test_pde_time_dependent_bcs(backend):
    """test PDE with time-dependent BCs"""
    field = ScalarField(grids.UnitGrid([3]))

    eq = PDE({"c": "laplace(c)"}, bc={"value_expression": "Heaviside(t - 1.5)"})

    storage = MemoryStorage()
    eq.solve(field, t_range=10, dt=1e-2, backend=backend, tracker=storage.tracker(1))

    np.testing.assert_allclose(storage[1].data, 0)
    np.testing.assert_allclose(storage[-1].data, 1, rtol=1e-3)
Beispiel #19
0
def test_kymograph_single(tmp_path):
    """ test making kymographs for single fields """
    # create some storage
    field = ScalarField(UnitGrid(8))
    with get_memory_storage(field) as storage:
        for i in range(8):
            storage.append(field.copy(data=i), i)

    # create single kymograph
    path = tmp_path / "test1.png"
    plotting.plot_kymograph(storage,
                            colorbar=True,
                            transpose=True,
                            filename=path)
    assert path.stat().st_size > 0

    # create multiple kymographs
    path = tmp_path / "test2.png"
    plotting.plot_kymographs(storage, filename=path)
    assert path.stat().st_size > 0
def test_random_normal_types():
    """test whether random normal fields behave correctly for different types"""
    grid = UnitGrid([8])
    for dtype in [bool, int, float, complex]:
        field = VectorField.random_normal(grid, dtype=dtype)
        assert field.dtype == np.dtype(dtype)
        assert isinstance(field.data.flat[0].item(), dtype)

    assert ScalarField.random_normal(grid, 0, 1).dtype == np.dtype(float)
    assert ScalarField.random_normal(grid, mean=0 + 0j).dtype == np.dtype(complex)
    assert ScalarField.random_normal(grid, std=1 + 0j).dtype == np.dtype(complex)
    assert ScalarField.random_normal(grid, 0 + 0j, 1 + 0j).dtype == np.dtype(complex)

    m = complex(np.random.random(), np.random.random())
    s = complex(1 + np.random.random(), 1 + np.random.random())
    grid = UnitGrid([256, 256])
    field = field.random_normal(grid, m, s)
    assert np.mean(field.average) == pytest.approx(m, rel=0.1, abs=0.1)
    assert np.std(field.data.real) == pytest.approx(s.real, rel=0.1, abs=0.1)
    assert np.std(field.data.imag) == pytest.approx(s.imag, rel=0.1, abs=0.1)
Beispiel #21
0
def test_storage_write(tmp_path):
    """test simple memory storage"""
    dim = 5
    grid = UnitGrid([dim])
    field = ScalarField(grid)

    storage_classes = {"MemoryStorage": MemoryStorage}
    if module_available("h5py"):
        file_path = tmp_path / "test_storage_write.hdf5"
        storage_classes["FileStorage"] = functools.partial(
            FileStorage, file_path)

    for name, storage_cls in storage_classes.items():
        storage = storage_cls(info={"a": 1})
        storage.start_writing(field, info={"b": 2})
        field.data = np.arange(dim)
        storage.append(field, 0)
        field.data = np.arange(dim)
        storage.append(field, 1)
        storage.end_writing()

        assert not storage.has_collection

        np.testing.assert_allclose(storage.times, np.arange(2))
        for f in storage:
            np.testing.assert_array_equal(f.data, np.arange(dim))
        for i in range(2):
            np.testing.assert_array_equal(storage[i].data, np.arange(dim))
        assert {"a": 1, "b": 2}.items() <= storage.info.items()

        storage = storage_cls()
        storage.clear()
        for i in range(3):
            storage.start_writing(field)
            field.data = np.arange(dim) + i
            storage.append(field, i)
            storage.end_writing()

        np.testing.assert_allclose(storage.times,
                                   np.arange(3),
                                   err_msg="storage class: " + name)
Beispiel #22
0
def test_shapes_nfields(num, example_grid):
    """test single component field"""
    fields = [ScalarField.random_uniform(example_grid) for _ in range(num)]
    field = FieldCollection(fields)
    data_shape = (num, ) + example_grid.shape
    np.testing.assert_equal(field.data.shape, data_shape)
    for pf_single in field:
        np.testing.assert_equal(pf_single.data.shape, example_grid.shape)

    field_c = field.copy()
    np.testing.assert_allclose(field.data, field_c.data)
    assert field.grid == field_c.grid
Beispiel #23
0
def test_pde_user_funcs():
    """test user supplied functions"""
    # test a simple case
    eq = PDE({"u": "get_x(gradient(u))"},
             user_funcs={"get_x": lambda arr: arr[0]})
    field = ScalarField.random_colored(grids.UnitGrid([32, 32]))
    rhs = eq.evolution_rate(field)
    np.testing.assert_allclose(rhs.data,
                               field.gradient("auto_periodic_neumann").data[0])
    f = eq._make_pde_rhs_numba(field)
    np.testing.assert_allclose(f(field.data, 0),
                               field.gradient("auto_periodic_neumann").data[0])
Beispiel #24
0
def test_memory_storage():
    """ test methods specific to memory storage """
    sf = ScalarField(UnitGrid([1]))
    s1 = MemoryStorage()
    s1.start_writing(sf)
    s1.append(sf.copy(data=0), 0)
    s1.append(sf.copy(data=2), 1)
    s2 = MemoryStorage()
    s2.start_writing(sf)
    s2.append(sf.copy(data=1), 0)
    s2.append(sf.copy(data=3), 1)

    # test from_fields
    s3 = MemoryStorage.from_fields(s1.times, [s1[0], s1[1]])
    assert s3.times == s1.times
    np.testing.assert_allclose(s3.data, s1.data)

    # test from_collection
    s3 = MemoryStorage.from_collection([s1, s2])
    assert s3.times == s1.times
    np.testing.assert_allclose(np.ravel(s3.data), np.arange(4))
Beispiel #25
0
def test_storing_extract_range(tmp_path):
    """test methods specific to FieldCollections in memory storage"""
    sf = ScalarField(UnitGrid([1]))

    storage_classes = {"MemoryStorage": MemoryStorage}
    if module_available("h5py"):
        file_path = tmp_path / "test_storage_write.hdf5"
        storage_classes["FileStorage"] = functools.partial(
            FileStorage, file_path)

    for storage_cls in storage_classes.values():
        # store some data
        s1 = storage_cls()
        s1.start_writing(sf)
        sf.data = np.array([0])
        s1.append(sf, 0)
        sf.data = np.array([2])
        s1.append(sf, 1)
        s1.end_writing()

        np.testing.assert_equal(s1[0].data, 0)
        np.testing.assert_equal(s1[1].data, 2)
        np.testing.assert_equal(s1[-1].data, 2)
        np.testing.assert_equal(s1[-2].data, 0)

        with pytest.raises(IndexError):
            s1[2]
        with pytest.raises(IndexError):
            s1[-3]

        # test extraction
        s2 = s1.extract_time_range()
        assert s2.times == list(s1.times)
        np.testing.assert_allclose(s2.data, s1.data)
        s3 = s1.extract_time_range(0.5)
        assert s3.times == s1.times[:1]
        np.testing.assert_allclose(s3.data, s1.data[:1])
        s4 = s1.extract_time_range((0.5, 1.5))
        assert s4.times == s1.times[1:]
        np.testing.assert_allclose(s4.data, s1.data[1:])
Beispiel #26
0
def test_pde_scalar():
    """test PDE with a single scalar field"""
    eq = PDE({"u": "laplace(u) + exp(-t) + sin(t)"})
    assert eq.explicit_time_dependence
    assert not eq.complex_valued
    grid = grids.UnitGrid([8])
    field = ScalarField.random_normal(grid)

    res_a = eq.solve(field, t_range=1, dt=0.01, backend="numpy", tracker=None)
    res_b = eq.solve(field, t_range=1, dt=0.01, backend="numba", tracker=None)

    res_a.assert_field_compatible(res_b)
    np.testing.assert_allclose(res_a.data, res_b.data)
Beispiel #27
0
def test_pde_bcs(bc):
    """test PDE with boundary conditions"""
    eq = PDE({"u": "laplace(u)"}, bc=bc)
    assert not eq.explicit_time_dependence
    assert not eq.complex_valued
    grid = grids.UnitGrid([8])
    field = ScalarField.random_normal(grid)

    res_a = eq.solve(field, t_range=1, dt=0.01, backend="numpy", tracker=None)
    res_b = eq.solve(field, t_range=1, dt=0.01, backend="numba", tracker=None)

    res_a.assert_field_compatible(res_b)
    np.testing.assert_allclose(res_a.data, res_b.data)
Beispiel #28
0
def test_interactive_plotting():
    """ test plot_interactive """

    # create some data
    field = ScalarField.random_uniform(UnitGrid([8]))
    with get_memory_storage(field) as storage:
        for i in range(8):
            storage.append(field.copy(data=i), i)

    plotting.plot_interactive(storage,
                              viewer_args={
                                  "show": False,
                                  "close": True
                              })
Beispiel #29
0
def test_scalar_field_plot(tmp_path):
    """test ScalarFieldPlot class"""
    path = tmp_path / "test_scalar_field_plot.png"

    # create some data
    state = ScalarField.random_uniform(UnitGrid([16, 16]))
    for scale in [(0, 1), 1, "automatic", "symmetric", "unity"]:
        sfp = plotting.ScalarFieldPlot(state, scale=scale)
        sfp.savefig(path)
        assert path.stat().st_size > 0

    sfp = plotting.ScalarFieldPlot(state, quantities={"source": None})
    sfp.savefig(path)
    assert path.stat().st_size > 0
Beispiel #30
0
def test_pde_complex():
    """test complex valued PDE"""
    eq = PDE({"p": "I * laplace(p)"})
    assert not eq.explicit_time_dependence
    assert eq.complex_valued

    field = ScalarField.random_uniform(grids.UnitGrid([4]))
    assert not field.is_complex
    res1 = eq.solve(field, t_range=1, dt=0.1, backend="numpy", tracker=None)
    assert res1.is_complex
    res2 = eq.solve(field, t_range=1, dt=0.1, backend="numpy", tracker=None)
    assert res2.is_complex

    np.testing.assert_allclose(res1.data, res2.data)