def test_homoscedastic_doptimality_criterion(self): poly_degree = 3 num_design_pts = 11 design_samples = np.linspace(-1, 1, num_design_pts) noise_multiplier = None design_factors = univariate_monomial_basis_matrix( poly_degree, design_samples) homog_outer_prods = compute_homoscedastic_outer_products( design_factors) doptimality_criterion_wrapper = partial(doptimality_criterion, homog_outer_prods, design_factors) diffs = check_derivative(doptimality_criterion_wrapper, num_design_pts) #print (diffs) assert diffs.min() < 5e-5, diffs mu = np.random.uniform(0, 1, (num_design_pts)) mu /= mu.sum() M1 = homog_outer_prods.dot(mu) print(np.linalg.det(M1), doptimality_criterion_wrapper(mu, return_grad=False)) assert np.allclose( np.log(np.linalg.det(np.linalg.inv(M1))), doptimality_criterion_wrapper(mu, return_grad=False)) jac = lambda x: doptimality_criterion(homog_outer_prods, design_factors, x)[1] hess_matvec = lambda x, p: doptimality_criterion( homog_outer_prods, design_factors, x, return_hessian=True)[2].dot(p ) pya.check_hessian(jac, hess_matvec, mu[:, np.newaxis])
def test_rosenbrock_function_gradient_and_hessian_prod(self): benchmark = setup_benchmark("rosenbrock", nvars=2) init_guess = benchmark.variable.get_statistics('mean')+\ benchmark.variable.get_statistics('std') errors = pya.check_gradients(benchmark.fun, benchmark.jac, init_guess, disp=False) assert errors.min() < 1e-5 errors = pya.check_hessian(benchmark.jac, benchmark.hessp, init_guess, disp=False) assert errors.min() < 1e-5
def test_ishigami_function_gradient_and_hessian(self): benchmark = setup_benchmark("ishigami", a=7, b=0.1) init_guess = benchmark.variable.get_statistics('mean')+\ benchmark.variable.get_statistics('std') errors = pya.check_gradients(benchmark.fun, benchmark.jac, init_guess, disp=False) assert errors.min() < 2e-7 hess_matvec = lambda x, v: np.dot(benchmark.hess(x), v) errors = pya.check_hessian(benchmark.jac, hess_matvec, init_guess, disp=False) assert errors.min() < 2e-7