def test_gradient(space, method, padding): """Discretized spatial gradient operator.""" places = 2 if space.dtype == np.float32 else 4 with pytest.raises(TypeError): Gradient(odl.rn(1), method=method) if isinstance(padding, tuple): pad_mode, pad_const = padding else: pad_mode, pad_const = padding, 0 # DiscreteLp Vector dom_vec = noise_element(space) dom_vec_arr = dom_vec.asarray() # gradient grad = Gradient(space, method=method, pad_mode=pad_mode, pad_const=pad_const) grad_vec = grad(dom_vec) assert len(grad_vec) == space.ndim # computation of gradient components with helper function for axis, dx in enumerate(space.cell_sides): diff = finite_diff(dom_vec_arr, axis=axis, dx=dx, method=method, pad_mode=pad_mode, pad_const=pad_const) assert all_almost_equal(grad_vec[axis].asarray(), diff) # Test adjoint operator derivative = grad.derivative() ran_vec = noise_element(derivative.range) deriv_grad_vec = derivative(dom_vec) adj_grad_vec = derivative.adjoint(ran_vec) lhs = ran_vec.inner(deriv_grad_vec) rhs = dom_vec.inner(adj_grad_vec) # Check not to use trivial data assert lhs != 0 assert rhs != 0 assert almost_equal(lhs, rhs, places=places) # Higher-dimensional arrays lin_size = 3 for ndim in [1, 3, 6]: space = odl.uniform_discr([0.] * ndim, [1.] * ndim, [lin_size] * ndim) dom_vec = odl.phantom.cuboid(space, [0.2] * ndim, [0.8] * ndim) grad = Gradient(space, method=method, pad_mode=pad_mode, pad_const=pad_const) grad(dom_vec)
def test_gradient(method, impl, padding): """Discretized spatial gradient operator.""" with pytest.raises(TypeError): Gradient(odl.Rn(1), method=method) if isinstance(padding, tuple): padding_method, padding_value = padding else: padding_method, padding_value = padding, None # DiscreteLp Vector discr_space = odl.uniform_discr([0, 0], [1, 1], DATA_2D.shape, impl=impl) dom_vec = discr_space.element(DATA_2D) # computation of gradient components with helper function dx0, dx1 = discr_space.cell_sides diff_0 = finite_diff(DATA_2D, axis=0, dx=dx0, method=method, padding_method=padding_method, padding_value=padding_value) diff_1 = finite_diff(DATA_2D, axis=1, dx=dx1, method=method, padding_method=padding_method, padding_value=padding_value) # gradient grad = Gradient(discr_space, method=method, padding_method=padding_method, padding_value=padding_value) grad_vec = grad(dom_vec) assert len(grad_vec) == DATA_2D.ndim assert all_almost_equal(grad_vec[0].asarray(), diff_0) assert all_almost_equal(grad_vec[1].asarray(), diff_1) # Test adjoint operator derivative = grad.derivative() ran_vec = derivative.range.element([DATA_2D, DATA_2D ** 2]) deriv_grad_vec = derivative(dom_vec) adj_grad_vec = derivative.adjoint(ran_vec) lhs = ran_vec.inner(deriv_grad_vec) rhs = dom_vec.inner(adj_grad_vec) # Check not to use trivial data assert lhs != 0 assert rhs != 0 assert almost_equal(lhs, rhs) # higher dimensional arrays lin_size = 3 for ndim in [1, 3, 6]: # DiscreteLp Vector space = odl.uniform_discr([0.] * ndim, [1.] * ndim, [lin_size] * ndim) dom_vec = odl.phantom.cuboid(space, [0.2] * ndim, [0.8] * ndim) # gradient grad = Gradient(space, method=method, padding_method=padding_method, padding_value=padding_value) grad(dom_vec)
def test_gradient(space, method, padding): """Discretized spatial gradient operator.""" places = 2 if space.dtype == np.float32 else 4 with pytest.raises(TypeError): Gradient(odl.rn(1), method=method) if isinstance(padding, tuple): pad_mode, pad_const = padding else: pad_mode, pad_const = padding, 0 # DiscreteLp Vector dom_vec = noise_element(space) dom_vec_arr = dom_vec.asarray() # gradient grad = Gradient(space, method=method, pad_mode=pad_mode, pad_const=pad_const) grad_vec = grad(dom_vec) assert len(grad_vec) == space.ndim # computation of gradient components with helper function for axis, dx in enumerate(space.cell_sides): diff = finite_diff(dom_vec_arr, axis=axis, dx=dx, method=method, pad_mode=pad_mode, pad_const=pad_const) assert all_almost_equal(grad_vec[axis].asarray(), diff) # Test adjoint operator derivative = grad.derivative() ran_vec = noise_element(derivative.range) deriv_grad_vec = derivative(dom_vec) adj_grad_vec = derivative.adjoint(ran_vec) lhs = ran_vec.inner(deriv_grad_vec) rhs = dom_vec.inner(adj_grad_vec) # Check not to use trivial data assert lhs != 0 assert rhs != 0 assert almost_equal(lhs, rhs, places=places) # higher dimensional arrays lin_size = 3 for ndim in [1, 3, 6]: # DiscreteLpElement space = odl.uniform_discr([0.] * ndim, [1.] * ndim, [lin_size] * ndim) dom_vec = odl.phantom.cuboid(space, [0.2] * ndim, [0.8] * ndim) # gradient grad = Gradient(space, method=method, pad_mode=pad_mode, pad_const=pad_const) grad(dom_vec)