def test_unit_grid_2d():
    """ test 2D grids """
    # test special case
    grid = UnitGrid([4, 4], periodic=True)
    assert grid.dim == 2
    assert grid.numba_type == "f8[:, :]"
    assert grid.volume == 16
    np.testing.assert_array_equal(grid.discretization, np.ones(2))
    assert grid.get_image_data(np.zeros(grid.shape))["extent"] == [0, 4, 0, 4]
    for _ in range(10):
        p = np.random.randn(2)
        assert np.all(grid.polar_coordinates_real(p) < np.sqrt(8))
    large_enough = grid.polar_coordinates_real((0, 0)) > np.sqrt(4)
    assert np.any(large_enough)

    periodic = random.choices([True, False], k=2)
    grid = UnitGrid([4, 4], periodic=periodic)
    assert grid.dim == 2
    assert grid.volume == 16
    assert grid.polar_coordinates_real((1, 1)).shape == (4, 4)

    grid = UnitGrid([4, 8], periodic=periodic)
    assert grid.dim == 2
    assert grid.volume == 32
    assert grid.polar_coordinates_real((1, 1)).shape == (4, 8)

    # test conversion between polar and Cartesian coordinates
    c1 = grid.cell_coords
    p = np.random.random(2) * grid.shape
    d, a = grid.polar_coordinates_real(p, ret_angle=True)
    c2 = grid.from_polar_coordinates(d, a, p)
    assert np.allclose(grid.distance_real(c1, c2), 0)

    # test boundary points
    np.testing.assert_equal(
        grid._boundary_coordinates(0, False),
        np.c_[np.full(8, 0), np.linspace(0.5, 7.5, 8)],
    )
    np.testing.assert_equal(
        grid._boundary_coordinates(0, True),
        np.c_[np.full(8, 4), np.linspace(0.5, 7.5, 8)],
    )
    np.testing.assert_equal(
        grid._boundary_coordinates(1, False),
        np.c_[np.linspace(0.5, 3.5, 4),
              np.full(4, 0)],
    )
    np.testing.assert_equal(
        grid._boundary_coordinates(1, True),
        np.c_[np.linspace(0.5, 3.5, 4),
              np.full(4, 8)],
    )
示例#2
0
def test_singular_dimensions_3d(periodic):
    """test grids with singular dimensions"""
    dim = np.random.randint(3, 5)
    g1 = UnitGrid([dim], periodic=periodic)
    g3a = UnitGrid([dim, 1, 1], periodic=periodic)
    g3b = UnitGrid([1, 1, dim], periodic=periodic)

    field = ScalarField.random_uniform(g1)
    expected = field.laplace("auto_periodic_neumann").data
    for g in [g3a, g3b]:
        f = ScalarField(g, data=field.data.reshape(g.shape))
        res = f.laplace("auto_periodic_neumann").data.reshape(g1.shape)
        np.testing.assert_allclose(expected, res)
def test_vector_plotting_2d(transpose):
    """test plotting of 2d vector fields"""
    grid = UnitGrid([3, 4])
    field = VectorField.random_uniform(grid, 0.1, 0.9)

    for method in ["quiver", "streamplot"]:
        ref = field.plot(method=method, transpose=transpose)
        field._update_plot(ref)

    # test sub-sampling
    grid = UnitGrid([32, 15])
    field = VectorField.random_uniform(grid, 0.1, 0.9)
    field.get_vector_data(transpose=transpose, max_points=7)
示例#4
0
def test_unit_grid_1d(periodic):
    """test 1D grids"""
    grid = UnitGrid(4, periodic=periodic)
    assert grid.dim == 1
    assert grid.numba_type == "f8[:]"
    assert grid.volume == 4
    np.testing.assert_array_equal(grid.discretization, np.ones(1))
    dist, angle = grid.polar_coordinates_real(0, ret_angle=True)
    if periodic:
        np.testing.assert_allclose(dist, [0.5, 1.5, 1.5, 0.5])
    else:
        np.testing.assert_allclose(dist, np.arange(4) + 0.5)
    assert angle.shape == (4, )

    grid = UnitGrid(8, periodic=periodic)
    assert grid.dim == 1
    assert grid.volume == 8

    norm_numba = grid.make_normalize_point_compiled(reflect=False)

    def norm_numba_wrap(x):
        y = np.array([x])
        norm_numba(y)
        return y

    for normalize in [
            partial(grid.normalize_point, reflect=False), norm_numba_wrap
    ]:
        if periodic:
            np.testing.assert_allclose(normalize(-1e-10), 8 - 1e-10)
            np.testing.assert_allclose(normalize(1e-10), 1e-10)
            np.testing.assert_allclose(normalize(8 - 1e-10), 8 - 1e-10)
            np.testing.assert_allclose(normalize(8 + 1e-10), 1e-10)
        else:
            for x in [-1e-10, 1e-10, 8 - 1e-10, 8 + 1e-10]:
                np.testing.assert_allclose(normalize(x), x)

    grid = UnitGrid(8, periodic=periodic)

    # test conversion between polar and Cartesian coordinates
    c1 = grid.cell_coords
    p = np.random.random(1) * grid.shape
    d, a = grid.polar_coordinates_real(p, ret_angle=True)
    c2 = grid.from_polar_coordinates(d, a, p)
    assert np.allclose(grid.distance_real(c1, c2), 0)

    # test boundary points
    np.testing.assert_equal(grid._boundary_coordinates(0, False),
                            np.array([0]))
    np.testing.assert_equal(grid._boundary_coordinates(0, True), np.array([8]))
示例#5
0
def test_callback_tracker():
    """ test trackers that support a callback """
    data = []

    def store_mean_data(state):
        data.append(state.average)

    def get_mean_data(state):
        return state.average

    grid = UnitGrid([4, 4])
    state = ScalarField.random_uniform(grid, 0.2, 0.3)
    pde = DiffusionPDE()
    data_tracker = trackers.DataTracker(get_mean_data, interval=0.1)
    callback_tracker = trackers.CallbackTracker(store_mean_data, interval=0.1)
    tracker_list = [data_tracker, callback_tracker]
    pde.solve(state,
              t_range=0.5,
              dt=0.005,
              tracker=tracker_list,
              backend="numpy")

    np.testing.assert_array_equal(data, data_tracker.data)

    data = []

    def store_time(state, t):
        data.append(t)

    def get_time(state, t):
        return t

    grid = UnitGrid([4, 4])
    state = ScalarField.random_uniform(grid, 0.2, 0.3)
    pde = DiffusionPDE()
    data_tracker = trackers.DataTracker(get_time, interval=0.1)
    tracker_list = [
        trackers.CallbackTracker(store_time, interval=0.1), data_tracker
    ]
    pde.solve(state,
              t_range=0.5,
              dt=0.005,
              tracker=tracker_list,
              backend="numpy")

    ts = np.arange(0, 0.55, 0.1)
    np.testing.assert_allclose(data, ts, atol=1e-2)
    np.testing.assert_allclose(data_tracker.data, ts, atol=1e-2)
示例#6
0
def test_pde_poisson_solver_1d():
    """test the poisson solver on 1d grids"""
    # solve Laplace's equation
    grid = UnitGrid([4])
    res = solve_laplace_equation(grid, bc=[{"value": -1}, {"value": 3}])
    np.testing.assert_allclose(res.data, grid.axes_coords[0] - 1)

    res = solve_laplace_equation(grid, bc=[{"value": -1}, {"derivative": 1}])
    np.testing.assert_allclose(res.data, grid.axes_coords[0] - 1)

    # test Poisson equation with 2nd Order BC
    res = solve_laplace_equation(grid, bc=[{"value": -1}, "extrapolate"])

    # solve Poisson's equation
    grid = CartesianGrid([[0, 1]], 4)
    field = ScalarField(grid, data=1)

    res = solve_poisson_equation(field, bc=[{"value": 1}, {"derivative": 1}])
    xs = grid.axes_coords[0]
    np.testing.assert_allclose(res.data, 1 + 0.5 * xs**2, rtol=1e-2)

    # test inconsistent problem
    field.data = 1
    with pytest.raises(RuntimeError, match="Neumann"):
        solve_poisson_equation(field, {"derivative": 0})
示例#7
0
def main():
    """ main routine testing the performance """
    print("Reports calls-per-second (larger is better)\n")

    # Cartesian grid with different shapes and boundary conditions
    for size in [32, 512]:
        grid = UnitGrid((size, size), periodic=False)
        print(grid)

        field = ScalarField.random_normal(grid)
        bc_value = np.ones(size)
        result = field.laplace(bc={"value": 1}).data

        for bc in ["scalar", "array", "linked"]:
            if bc == "scalar":
                bcs = {"value": 1}
            elif bc == "array":
                bcs = {"value": bc_value}
            elif bc == "linked":
                bcs = Boundaries.from_data(grid, {"value": bc_value}, rank=0)
                for ax, upper in grid._iter_boundaries():
                    bcs[ax][upper].link_value(bc_value)
            # result = field.laplace(bc=bcs).data
            laplace = grid.get_operator("laplace", bc=bcs)
            # call once to pre-compile and test result
            np.testing.assert_allclose(laplace(field.data), result)
            speed = estimate_computation_speed(laplace, field.data)
            print(f"{bc:>6s}:{int(speed):>9d}")

        print()
def test_setting_specific_bcs():
    """test the interface of setting specific conditions"""
    grid = UnitGrid([4, 4], periodic=[False, True])
    bcs = grid.get_boundary_conditions("auto_periodic_neumann")

    # test non-periodic axis
    assert str(bcs[0]) == '"derivative"'
    assert str(bcs["x"]) == '"derivative"'
    bcs["x"] = "value"
    assert str(bcs["x"]) == '"value"'
    bcs["left"] = "derivative"
    assert str(bcs["left"]) == '"derivative"'
    assert str(bcs["right"]) == '"value"'
    bcs["right"] = "derivative"
    assert str(bcs["x"]) == '"derivative"'
    with pytest.raises(PeriodicityError):
        bcs["x"] = "periodic"

    # test periodic axis
    assert str(bcs[1]) == '"periodic"'
    assert str(bcs["y"]) == '"periodic"'
    assert str(bcs["top"]) == '"periodic"'
    bcs["y"] = "periodic"
    with pytest.raises(PeriodicityError):
        bcs["y"] = "value"
    with pytest.raises(PeriodicityError):
        bcs["top"] = "value"

    # test wrong input
    with pytest.raises(KeyError):
        bcs["nonsense"]
    with pytest.raises(TypeError):
        bcs[None]
    with pytest.raises(KeyError):
        bcs["nonsense"] = None
示例#9
0
def test_simple_progress():
    """ simple test for basic progress bar """
    pbar = trackers.ProgressTracker(interval=1)
    field = ScalarField(UnitGrid([3]))
    pbar.initialize(field)
    pbar.handle(field, 2)
    pbar.finalize()
示例#10
0
def test_unit_rect_grid(periodic):
    """test whether the rectangular grid behaves like a unit grid in special cases"""
    dim = random.randrange(1, 4)
    shape = np.random.randint(2, 10, size=dim)
    g1 = UnitGrid(shape, periodic=periodic)
    g2 = CartesianGrid(np.c_[np.zeros(dim), shape], shape, periodic=periodic)
    volume = np.prod(shape)
    for g in [g1, g2]:
        assert g.volume == pytest.approx(volume)
        assert g.integrate(1) == pytest.approx(volume)
        assert g.make_integrator()(np.ones(shape)) == pytest.approx(volume)

    assert g1.dim == g2.dim == dim
    np.testing.assert_array_equal(g1.shape, g2.shape)
    assert g1.typical_discretization == pytest.approx(
        g2.typical_discretization)

    for _ in range(10):
        p1, p2 = np.random.normal(scale=10, size=(2, dim))
        assert g1.distance_real(p1,
                                p2) == pytest.approx(g2.distance_real(p1, p2))

    p0 = np.random.normal(scale=10, size=dim)
    np.testing.assert_allclose(g1.polar_coordinates_real(p0),
                               g2.polar_coordinates_real(p0))
示例#11
0
def test_interpolate_3d():
    """ test interpolation of 3d grid """
    grid = UnitGrid([2, 2, 2], periodic=False)
    intp = grid.make_interpolator_compiled(bc={"type": "value", "value": 1})

    val = intp(np.zeros((2, 2, 2)), np.array([0, 1, 1]))
    assert val == pytest.approx(1)
def test_adaptive_solver_nan(scheme):
    """test whether the adaptive solver can treat nans"""
    frequency = 5 if scheme == "euler" else 20

    class MockPDE(PDEBase):
        """simple PDE which returns NaN every 5 evaluations"""

        evaluations = 0

        def evolution_rate(self, state, t):
            MockPDE.evaluations += 1
            if MockPDE.evaluations % frequency == 1:
                return ScalarField(state.grid, data=np.nan)
            else:
                return state.copy()

    field = ScalarField(UnitGrid([2]))
    eq = MockPDE()
    sol, info = eq.solve(
        field,
        1,
        dt=0.1,
        method="explicit",
        scheme=scheme,
        backend="numpy",
        adaptive=True,
        ret_info=True,
    )

    np.testing.assert_allclose(sol.data, 0)
    assert info["solver"]["dt_last"] > 0.1
    assert info["solver"]["dt_adaptive"]
示例#13
0
def test_storage_fixed_size(compression, tmp_path):
    """test setting fixed size of FileStorage objects"""
    c = ScalarField(UnitGrid([2]), data=1)

    for fixed in [True, False]:
        path = tmp_path / f"test_storage_fixed_size_{fixed}.hdf5"
        storage = FileStorage(path,
                              max_length=1 if fixed else None,
                              compression=compression)
        assert len(storage) == 0

        storage.start_writing(c)
        assert len(storage) == 0
        storage.append(c, 0)
        assert len(storage) == 1

        if fixed:
            with pytest.raises((TypeError, ValueError, RuntimeError)):
                storage.append(c, 1)
            assert len(storage) == 1
            np.testing.assert_allclose(storage.times, [0])
        else:
            storage.append(c, 1)
            assert len(storage) == 2
            np.testing.assert_allclose(storage.times, [0, 1])
示例#14
0
def test_simulation_persistence(compression, tmp_path):
    """test whether a tracker can accurately store information about
    simulation"""
    path = tmp_path / "test_simulation_persistence.hdf5"
    storage = FileStorage(path, compression=compression)

    # write some simulation data
    pde = DiffusionPDE()
    grid = UnitGrid([16, 16])  # generate grid
    state = ScalarField.random_uniform(grid, 0.2, 0.3)
    pde.solve(state,
              t_range=0.11,
              dt=0.001,
              tracker=storage.tracker(interval=0.05))
    storage.close()

    # read the data
    storage = FileStorage(path)
    np.testing.assert_almost_equal(storage.times, [0, 0.05, 0.1])
    data = np.array(storage.data)
    assert data.shape == (3, ) + state.data.shape
    grid_res = storage.grid
    assert grid == grid_res
    grid_res = storage.grid
    assert grid == grid_res
示例#15
0
def test_keep_opened(tmp_path):
    """test the keep opened option"""
    path = tmp_path / "test_keep_opened.hdf5"

    c = ScalarField(UnitGrid([2]), data=1)
    storage = FileStorage(path, keep_opened=False)
    storage.start_writing(c)
    assert len(storage) == 0
    storage.append(c, 0)
    assert storage._file_state == "closed"
    assert len(storage) == 1
    assert storage._file_state == "reading"
    storage.append(c, 1)
    assert len(storage) == 2

    storage2 = FileStorage(path, write_mode="append")
    assert storage.times == storage2.times
    assert storage.data == storage2.data
    storage.close()  # close the old storage to enable writing here
    storage2.start_writing(c)
    storage2.append(c, 2)
    storage2.close()

    assert len(storage2) == 3
    np.testing.assert_allclose(storage2.times, np.arange(3))
示例#16
0
def test_memory_storage():
    """test methods specific to memory storage"""
    sf = ScalarField(UnitGrid([1]))
    s1 = MemoryStorage()
    s1.start_writing(sf)
    sf.data = 0
    s1.append(sf, 0)
    sf.data = 2
    s1.append(sf, 1)

    s2 = MemoryStorage()
    s2.start_writing(sf)
    sf.data = 1
    s2.append(sf, 0)
    sf.data = 3
    s2.append(sf, 1)

    # test from_fields
    s3 = MemoryStorage.from_fields(s1.times, [s1[0], s1[1]])
    assert s3.times == s1.times
    np.testing.assert_allclose(s3.data, s1.data)

    # test from_collection
    s3 = MemoryStorage.from_collection([s1, s2])
    assert s3.times == s1.times
    np.testing.assert_allclose(np.ravel(s3.data), np.arange(4))
def test_individual_boundaries():
    """ test setting individual boundaries """
    g = UnitGrid([2])
    for data in [
        "value",
        {"value": 1},
        {"type": "value", "value": 1},
        "derivative",
        {"derivative": 1},
        {"type": "derivative", "value": 1},
        {"mixed": 1},
        {"type": "mixed", "value": 1},
        "extrapolate",
    ]:
        bc = BCBase.from_data(g, 0, upper=True, data=data, rank=0)

        assert isinstance(str(bc), str)
        assert isinstance(repr(bc), str)
        assert bc.rank == 0
        assert bc.homogeneous
        bc.check_value_rank(0)
        with pytest.raises(RuntimeError):
            bc.check_value_rank(1)

        for bc_copy in [BCBase.from_data(g, 0, upper=True, data=bc, rank=0), bc.copy()]:
            assert bc == bc_copy
            assert bc._cache_hash() == bc_copy._cache_hash()

    assert bc.extract_component() == bc
示例#18
0
def get_performance_data(periodic=False):
    """ obtain the data used in the performance plot
    
    Args:
        periodic (bool): The boundary conditions of the underlying grid
        
    Returns:
        dict: The durations of calculating the Laplacian on different grids
        using different methods
    """
    sizes = 2**np.arange(3, 13)

    statistics = {}
    for size in display_progress(sizes):
        data = {}
        grid = UnitGrid([size] * 2, periodic=periodic)
        test_data = np.random.randn(*grid.shape)

        for method in ["numba", "scipy"]:
            op = grid.get_operator("laplace", bc="natural", method=method)
            data[method] = time_function(op, test_data)

        if opencv_laplace:
            data["opencv"] = time_function(opencv_laplace, test_data)

        statistics[int(size)] = data

    return statistics
示例#19
0
def test_interpolate_1d():
    """ test interpolation of 1d grid """
    grid = UnitGrid(2, periodic=False)
    intp = grid.make_interpolator_compiled(bc={"type": "value", "value": 1})

    assert intp(np.zeros(2), np.zeros(1)) == pytest.approx(1)
    assert intp(np.zeros(2), np.ones(1)) == pytest.approx(0)
    with pytest.raises(DomainError):
        intp(np.zeros(2), np.array([-1]))
    with pytest.raises(DomainError):
        intp(np.zeros(2), np.array([3]))

    grid_per = UnitGrid(2, periodic=True)
    intp = grid_per.make_interpolator_compiled(bc="natural")
    for pos in [-1, 0, 1, 2, 3]:
        assert intp(np.arange(2), np.array([pos])) == pytest.approx(0.5)
def test_interactive_collection_plotting():
    """ test the interactive plotting """
    grid = UnitGrid([3, 3])
    sf = ScalarField.random_uniform(grid, 0.1, 0.9)
    vf = VectorField.random_uniform(grid, 0.1, 0.9)
    field = FieldCollection([sf, vf])
    field.plot_interactive(viewer_args={"show": False, "close": True})
示例#21
0
def test_field_type_guessing():
    """ test the ability to guess the field type """
    for cls in [ScalarField, VectorField, Tensor2Field]:
        grid = UnitGrid([3])
        field = cls.random_normal(grid)
        s = MemoryStorage()
        s.start_writing(field)
        s.append(field, 0)
        s.append(field, 1)

        # delete information
        s._field = None
        s.info = {}

        assert not s.has_collection
        assert len(s) == 2
        assert s[0] == field

    field = FieldCollection([ScalarField(grid), VectorField(grid)])
    s = MemoryStorage()
    s.start_writing(field)
    s.append(field, 0)

    assert s.has_collection

    # delete information
    s._field = None
    s.info = {}

    with pytest.raises(RuntimeError):
        s[0]
示例#22
0
def test_emulsion_two():
    """test an emulsions with two droplets"""
    grid = UnitGrid([30])
    e = Emulsion([DiffuseDroplet([10], 3, 1)], grid=grid)
    e1 = Emulsion([DiffuseDroplet([20], 5, 1)], grid=grid)
    e.extend(e1)
    assert e
    assert len(e) == 2
    assert e == e.copy()
    assert e is not e.copy()
    assert e.interface_width == pytest.approx(1)
    assert e.total_droplet_volume == pytest.approx(16)

    dists = e.get_pairwise_distances()
    np.testing.assert_array_equal(dists, np.array([[0, 10], [10, 0]]))
    expect = {
        "count": 2,
        "radius_mean": 4,
        "radius_std": 1,
        "volume_mean": 8,
        "volume_std": 2,
    }
    assert e.get_size_statistics() == expect

    np.testing.assert_array_equal(e.get_neighbor_distances(False), np.array([10, 10]))
    np.testing.assert_array_equal(e.get_neighbor_distances(True), np.array([2, 2]))
示例#23
0
def test_generic_cartesian_grid():
    """test generic cartesian grid functions"""
    for dim in (1, 2, 3):
        periodic = random.choices([True, False], k=dim)
        shape = np.random.randint(2, 8, size=dim)
        a = np.random.random(dim)
        b = a + np.random.random(dim)

        cases = [
            UnitGrid(shape, periodic=periodic),
            CartesianGrid(np.c_[a, b], shape, periodic=periodic),
        ]
        for grid in cases:
            assert grid.dim == dim
            dim_axes = len(grid.axes) + len(grid.axes_symmetric)
            assert dim_axes == dim
            vol = np.prod(grid.discretization) * np.prod(shape)
            assert grid.volume == pytest.approx(vol)
            assert grid.uniform_cell_volumes

            # random points
            points = [[np.random.uniform(a[i], b[i]) for i in range(dim)]
                      for _ in range(10)]
            c = grid.point_to_cell(points)
            p = grid.cell_to_point(c)
            np.testing.assert_array_equal(c, grid.point_to_cell(p))

            assert grid.contains_point(grid.get_random_point())
            w = 0.499 * (b - a).min()
            assert grid.contains_point(grid.get_random_point(w))
            assert "laplace" in grid.operators
示例#24
0
def test_diffusion_cached():
    """test some caching of rhs of the simple diffusion model"""
    grid = UnitGrid([8])
    c0 = ScalarField.random_uniform(grid)

    # first run without cache
    eq1 = DiffusionPDE(diffusivity=1)
    eq1.cache_rhs = False
    c1a = eq1.solve(c0, t_range=1, dt=0.1, backend="numba", tracker=None)

    eq1.diffusivity = 0.1
    c1b = eq1.solve(c1a, t_range=1, dt=0.1, backend="numba", tracker=None)

    # then run with cache
    eq2 = DiffusionPDE(diffusivity=1)
    eq2.cache_rhs = True
    c2a = eq2.solve(c0, t_range=1, dt=0.1, backend="numba", tracker=None)

    eq2.diffusivity = 0.1
    c2b = eq2.solve(c2a, t_range=1, dt=0.1, backend="numba", tracker=None)

    eq2._cache = {}  # clear cache
    c2c = eq2.solve(c2a, t_range=1, dt=0.1, backend="numba", tracker=None)

    np.testing.assert_allclose(c1a.data, c2a.data)
    assert not np.allclose(c1b.data, c2b.data)
    np.testing.assert_allclose(c1b.data, c2c.data)
示例#25
0
def test_mixed_boundary_condition():
    """test limiting cases of the mixed boundary condition"""
    g = UnitGrid([2])
    d = np.random.random(2)
    g1 = g.make_operator("gradient", bc=[{"mixed": 0}, {"mixed": np.inf}])
    g2 = g.make_operator("gradient", bc=["derivative", "value"])
    np.testing.assert_allclose(g1(d), g2(d))
def test_storing_collection(tmp_path):
    """ test methods specific to FieldCollections in memory storage """
    grid = UnitGrid([2, 2])
    f1 = ScalarField.random_uniform(grid, 0.1, 0.4, label="a")
    f2 = VectorField.random_uniform(grid, 0.1, 0.4, label="b")
    f3 = Tensor2Field.random_uniform(grid, 0.1, 0.4, label="c")
    fc = FieldCollection([f1, f2, f3])

    storage_classes = {"MemoryStorage": MemoryStorage}
    if module_available("h5py"):
        file_path = tmp_path / "test_storage_write.hdf5"
        storage_classes["FileStorage"] = functools.partial(
            FileStorage, file_path)

    for storage_cls in storage_classes.values():
        # store some data
        storage = storage_cls()
        storage.start_writing(fc)
        storage.append(fc, 0)
        storage.append(fc, 1)
        storage.end_writing()

        assert storage.has_collection
        assert storage.extract_field(0)[0] == f1
        assert storage.extract_field(1)[0] == f2
        assert storage.extract_field(2)[0] == f3
        assert storage.extract_field(0)[0].label == "a"
        assert storage.extract_field(0,
                                     label="new label")[0].label == "new label"
        assert storage.extract_field(0)[0].label == "a"  # do not alter label
        assert storage.extract_field("a")[0] == f1
        assert storage.extract_field("b")[0] == f2
        assert storage.extract_field("c")[0] == f3
        with pytest.raises(ValueError):
            storage.extract_field("nonsense")
示例#27
0
def test_runtime_tracker():
    """ test the RuntimeTracker """
    s = ScalarField.random_uniform(UnitGrid([128]))
    tracker = trackers.RuntimeTracker("0:01")
    sol = ExplicitSolver(DiffusionPDE())
    con = Controller(sol, t_range=1e4, tracker=["progress", tracker])
    con.run(s, dt=1e-3)
def test_storage_copy(tmp_path):
    """ test the copy function of StorageBase """
    grid = UnitGrid([2])
    field = ScalarField(grid)

    storage_classes = {"None": None, "MemoryStorage": MemoryStorage}
    if module_available("h5py"):
        file_path = tmp_path / "test_storage_apply.hdf5"
        storage_classes["FileStorage"] = functools.partial(
            FileStorage, file_path)

    s1 = MemoryStorage()
    s1.start_writing(field, info={"b": 2})
    s1.append(field.copy(data=np.array([0, 1])), 0)
    s1.append(field.copy(data=np.array([1, 2])), 1)
    s1.end_writing()

    for name, storage_cls in storage_classes.items():
        out = None if storage_cls is None else storage_cls()
        s2 = s1.copy(out=out)
        assert storage_cls is None or s2 is out
        assert len(s2) == 2
        np.testing.assert_allclose(s2.times, s1.times)
        assert s2[0] == s1[0], name
        assert s2[1] == s1[1], name

    # test empty storage
    s1 = MemoryStorage()
    s2 = s1.copy()
    assert len(s2) == 0
示例#29
0
def test_trackers():
    """ test whether simple trackers can be used """
    times = []

    def store_time(state, t):
        times.append(t)

    def get_data(state):
        return {"integral": state.integral}

    devnull = open(os.devnull, "w")
    data = trackers.DataTracker(get_data, interval=0.1)
    tracker_list = [
        trackers.PrintTracker(interval=0.1, stream=devnull),
        trackers.CallbackTracker(store_time, interval=0.1),
        None,  # should be ignored
        data,
    ]
    if module_available("matplotlib"):
        tracker_list.append(trackers.PlotTracker(interval=0.1, show=False))

    grid = UnitGrid([16, 16])
    state = ScalarField.random_uniform(grid, 0.2, 0.3)
    pde = DiffusionPDE()
    pde.solve(state, t_range=1, dt=0.005, tracker=tracker_list)

    devnull.close()

    assert times == data.times
    if module_available("pandas"):
        df = data.dataframe
        np.testing.assert_allclose(df["time"], times)
        np.testing.assert_allclose(df["integral"], state.integral)
def test_storage_truncation(tmp_path):
    """ test whether simple trackers can be used """
    file = tmp_path / "test_storage_truncation.hdf5"
    for truncate in [True, False]:
        storages = [MemoryStorage()]
        if module_available("h5py"):
            storages.append(FileStorage(file))
        tracker_list = [s.tracker(interval=0.01) for s in storages]

        grid = UnitGrid([8, 8])
        state = ScalarField.random_uniform(grid, 0.2, 0.3)
        pde = DiffusionPDE()

        pde.solve(state, t_range=0.1, dt=0.001, tracker=tracker_list)
        if truncate:
            for storage in storages:
                storage.clear()
        pde.solve(state, t_range=[0.1, 0.2], dt=0.001, tracker=tracker_list)

        times = np.arange(0.1, 0.201, 0.01)
        if not truncate:
            times = np.r_[np.arange(0, 0.101, 0.01), times]
        for storage in storages:
            msg = f"truncate={truncate}, storage={storage}"
            np.testing.assert_allclose(storage.times, times, err_msg=msg)

        assert not storage.has_collection