def test_directional_diff(): v = np.r_[1, -1] v = v / np.linalg.norm(v) x0 = [2, 3] directional_diff = np.dot(nd.Gradient(rosen)(x0), v) assert_allclose(directional_diff, 743.87633380824832) dd, _info = nd.directionaldiff(rosen, x0, v, full_output=True) assert_allclose(dd, 743.87633380824832)
def test_issue_39(): """ Test that checks float/Bicomplex works """ fun = nd.Gradient(lambda x: 1.0 / (np.exp(x[0]) + np.cos(x[1]) + 10), method="multicomplex") assert_allclose(fun([1.0, 2.0]), [-0.017961123762187736, 0.0060082083648822])
def test_gradient(): def fun(x): return np.sum(x**2) dtrue = [2., 4., 6.] for method in ['complex', 'central', 'backward', 'forward']: for order in [2, 4]: dfun = nd.Gradient(fun, method=method, order=order) d = dfun([1, 2, 3]) assert_array_almost_equal(d, dtrue)
def test_gradient_fulloutput(): """Fix issue#52: Gradient tries to apply squeeze to the output tuple containing both the result and the full_output object. """ res, info = nd.Gradient(lambda x, y: x + y, full_output=True)(1, 3) assert_allclose(res, 1) assert info.error_estimate < 1e-13 assert info.final_step == 0.015625 assert info.index == 5