def main(problem_sizes=(4, 8, 16, 32, 64, 96)): fixed_step = MinStepGenerator(num_steps=1, use_exact_steps=True, offset=0) epsilon = MaxStepGenerator(num_steps=14, use_exact_steps=True, step_ratio=1.6, offset=0) adaptiv_txt = '_adaptive_{0:d}_{1!s}_{2:d}'.format(epsilon.num_steps, str(epsilon.step_ratio), epsilon.offset) gradient_funs = OrderedDict() hessian_funs = OrderedDict() hessian_fun = 'Hessdiag' hessian_fun = 'Hessian' if nda is not None: nda_method = 'forward' nda_txt = 'algopy_' + nda_method gradient_funs[nda_txt] = nda.Jacobian(1, method=nda_method) hessian_funs[nda_txt] = getattr(nda, hessian_fun)(1, method=nda_method) ndc_hessian = getattr(nd, hessian_fun) order = 2 for method in ['forward', 'central', 'complex']: method2 = method + adaptiv_txt options = dict(method=method, order=order) gradient_funs[method] = nd.Jacobian(1, step=fixed_step, **options) gradient_funs[method2] = nd.Jacobian(1, step=epsilon, **options) hessian_funs[method] = ndc_hessian(1, step=fixed_step, **options) hessian_funs[method2] = ndc_hessian(1, step=epsilon, **options) hessian_funs['forward_statsmodels'] = nds.Hessian(1, method='forward') hessian_funs['central_statsmodels'] = nds.Hessian(1, method='central') hessian_funs['complex_statsmodels'] = nds.Hessian(1, method='complex') gradient_funs['forward_statsmodels'] = nds.Jacobian(1, method='forward') gradient_funs['central_statsmodels'] = nds.Jacobian(1, method='central') gradient_funs['complex_statsmodels'] = nds.Jacobian(1, method='complex') gradient_funs['forward_scipy'] = nsc.Jacobian(1, method='forward') gradient_funs['central_scipy'] = nsc.Jacobian(1, method='central') gradient_funs['complex_scipy'] = nsc.Jacobian(1, method='complex') run_gradient_and_hessian_benchmarks(gradient_funs, hessian_funs, problem_sizes)
def test_scalar_to_vector(val): def fun(x): return np.array([x, x**2, x**3]) truth = np.array([[[1.]], [[2 * val]], [[3 * val**2]]]) for method in [ 'multicomplex', 'complex', 'central', 'forward', 'backward' ]: j0, info = nd.Jacobian(fun, method=method, full_output=True)(val) if method != "multicomplex": j00 = nds.Jacobian(fun, method=method)(val) error = np.abs(j00 - truth) note('statsmodel: method={}, error={}'.format(method, error)) assert_allclose(j00, truth, rtol=1e-3, atol=1e-6) error = np.abs(j0 - truth) note('method={}, error={}, error_est={}'.format( method, error, info.error_estimate)) assert_allclose(j0, truth, rtol=1e-3, atol=1e-6)
hessian_funs[nda_txt] = getattr(nda, hessian_fun)(1, method=nda_method) order = 2 for method in ['forward', 'central', 'complex']: method2 = method + adaptiv_txt options = dict(method=method, order=order) gradient_funs[method] = nd.Jacobian(1, step=fixed_step, **options) gradient_funs[method2] = nd.Jacobian(1, step=epsilon, **options) hessian_funs[method] = ndc_hessian(1, step=fixed_step, **options) hessian_funs[method2] = ndc_hessian(1, step=epsilon, **options) hessian_funs['forward_statsmodels'] = nds.Hessian(1, method='forward') hessian_funs['central_statsmodels'] = nds.Hessian(1, method='central') hessian_funs['complex_statsmodels'] = nds.Hessian(1, method='complex') gradient_funs['forward_statsmodels'] = nds.Jacobian(1, method='forward') gradient_funs['central_statsmodels'] = nds.Jacobian(1, method='central') gradient_funs['complex_statsmodels'] = nds.Jacobian(1, method='complex') gradient_funs['forward_scipy'] = nsc.Jacobian(1, method='forward') gradient_funs['central_scipy'] = nsc.Jacobian(1, method='central') gradient_funs['complex_scipy'] = nsc.Jacobian(1, method='complex') def _compute_benchmark(functions, problem_sizes): result_list = [] for n in problem_sizes: print('n=', n) num_methods = len(functions) results = np.zeros((num_methods, 3)) ref_g = None f = BenchmarkFunction(n)
def fun_jacobian(x): return nd.Jacobian(lambda x: to_minimize(x))(x).ravel()