Ejemplo n.º 1
0
    def test_cantilever_beam_gradients(self):
        benchmark = setup_benchmark('cantilever_beam')
        from pyapprox.models.wrappers import ActiveSetVariableModel
        fun = ActiveSetVariableModel(
            benchmark.fun,
            benchmark.variable.num_vars() +
            benchmark.design_variable.num_vars(),
            benchmark.variable.get_statistics('mean'),
            benchmark.design_var_indices)
        jac = ActiveSetVariableModel(
            benchmark.jac,
            benchmark.variable.num_vars() +
            benchmark.design_variable.num_vars(),
            benchmark.variable.get_statistics('mean'),
            benchmark.design_var_indices)
        init_guess = 2 * np.ones((2, 1))
        errors = pya.check_gradients(fun, jac, init_guess, disp=True)
        assert errors.min() < 4e-7

        constraint_fun = ActiveSetVariableModel(
            benchmark.constraint_fun,
            benchmark.variable.num_vars() +
            benchmark.design_variable.num_vars(),
            benchmark.variable.get_statistics('mean'),
            benchmark.design_var_indices)
        constraint_jac = ActiveSetVariableModel(
            benchmark.constraint_jac,
            benchmark.variable.num_vars() +
            benchmark.design_variable.num_vars(),
            benchmark.variable.get_statistics('mean'),
            benchmark.design_var_indices)
        init_guess = 2 * np.ones((2, 1))
        errors = pya.check_gradients(constraint_fun,
                                     constraint_jac,
                                     init_guess,
                                     disp=True)
        assert errors.min() < 4e-7

        nsamples = 10
        samples = pya.generate_independent_random_samples(
            benchmark.variable, nsamples)
        constraint_fun = ActiveSetVariableModel(
            benchmark.constraint_fun,
            benchmark.variable.num_vars() +
            benchmark.design_variable.num_vars(), samples,
            benchmark.design_var_indices)
        constraint_jac = ActiveSetVariableModel(
            benchmark.constraint_jac,
            benchmark.variable.num_vars() +
            benchmark.design_variable.num_vars(), samples,
            benchmark.design_var_indices)
        init_guess = 2 * np.ones((2, 1))
        errors = pya.check_gradients(
            lambda x: constraint_fun(x).flatten(order='F'),
            constraint_jac,
            init_guess,
            disp=True)
        assert errors.min() < 4e-7
Ejemplo n.º 2
0
 def test_piston_gradient(self):
     benchmark = setup_benchmark("piston")
     sample = pya.generate_independent_random_samples(benchmark.variable, 1)
     print(benchmark.jac(sample))
     errors = pya.check_gradients(benchmark.fun, benchmark.jac, sample)
     errors = errors[np.isfinite(errors)]
     assert errors.max() > 0.1 and errors.min() <= 6e-7
Ejemplo n.º 3
0
    def test_evaluate_gradient_2d(self):
        """
        for the PDE ((1+sum(z^2)*x)*u(x)')' = -2, u(0) = 0, u(1) = 1
        use model.evaluate_gradient to evaluate the gradient of the QoI
        with respect to the random parameter vector z.
        The QoI is the intergral of the solution over the entire domain
        The adjoint rhs is then just 1.
        """
        order = 20
        model = SteadyStateDiffusionEquation2D()
        lims = [0., 1., 0., 1.]
        bndry_cond = [0., 0.]
        model.initialize(order, bndry_cond, lims)

        model.diffusivity_function = \
            lambda x, z: (z[0]**2+z[1]**2)*(x[0]+x[1]) + 1.
        model.forcing_function = lambda x, z: 0*x[0]-2

        sample = np.random.RandomState(2).uniform(-1, 1, (2, 1))
        model.diffusivity_derivs_function = \
            lambda x, z, i: np.array([2.*(x[0]+x[1])*z[i]]).T
        model.forcing_derivs_function = \
            lambda x, z, i: np.array([0.*x[0]]).T
        model(sample)
        # evaluate_gradient has to be called before any more calls to
        # model.solve with different parameters, because we need to
        # access self.fwd_solution, which will change with any subsuquent calls
        errors = pya.check_gradients(
            model, lambda x: model.evaluate_gradient(x[:, 0]), sample)
        errors = errors[np.isfinite(errors)]
        assert errors.max() > 0.1 and errors.min() <= 4e-6
Ejemplo n.º 4
0
 def test_wing_weight_gradient(self):
     variable = define_wing_weight_random_variables()
     fun = wing_weight_function
     grad = wing_weight_gradient
     sample = pya.generate_independent_random_samples(variable, 1)
     errors = pya.check_gradients(fun, grad, sample)
     errors = errors[np.isfinite(errors)]
     assert errors.max() > 0.1 and errors.min() <= 6e-7
Ejemplo n.º 5
0
    def test_r_oed_objective_and_constraint_wrappers(self):
        poly_degree = 10
        num_design_pts = 101
        num_pred_pts = 51
        pred_samples = np.random.uniform(-1, 1, num_pred_pts)
        design_samples = np.linspace(-1, 1, num_design_pts)
        design_factors = univariate_monomial_basis_matrix(
            poly_degree, design_samples)
        pred_factors = univariate_monomial_basis_matrix(
            poly_degree, pred_samples)
        homog_outer_prods = compute_homoscedastic_outer_products(
            design_factors)
        goptimality_criterion_wrapper = partial(goptimality_criterion,
                                                homog_outer_prods,
                                                design_factors, pred_factors)
        mu = np.random.uniform(0, 1, (num_design_pts))
        mu /= mu.sum()
        obj, jac = goptimality_criterion_wrapper(mu)

        beta = 0.75
        pred_weights = np.ones(num_pred_pts) / num_pred_pts
        r_oed_objective_wrapper = partial(r_oed_objective, beta, pred_weights)
        r_oed_jac_wrapper = partial(r_oed_objective_jacobian, beta,
                                    pred_weights)
        x0 = np.concatenate([np.ones(num_design_pts + 1), mu])[:, np.newaxis]
        diffs = pya.check_gradients(r_oed_objective_wrapper, r_oed_jac_wrapper,
                                    x0)
        assert diffs.min() < 6e-5, diffs

        r_oed_constraint_wrapper = partial(
            r_oed_constraint_objective, num_design_pts,
            lambda x: goptimality_criterion_wrapper(x)[0])
        r_oed_constraint_jac_wrapper = partial(
            r_oed_constraint_jacobian, num_design_pts,
            lambda x: goptimality_criterion_wrapper(x)[1])
        x0 = np.concatenate([np.ones(num_pred_pts + 1), mu])[:, np.newaxis]
        from pyapprox import approx_jacobian
        print(x0.shape)
        approx_jacobian(r_oed_constraint_wrapper, x0[:, 0])
        diffs = pya.check_gradients(r_oed_constraint_wrapper,
                                    r_oed_constraint_jac_wrapper, x0)
        assert diffs.min() < 6e-5, diffs
Ejemplo n.º 6
0
 def test_rosenbrock_function_gradient_and_hessian_prod(self):
     benchmark = setup_benchmark("rosenbrock", nvars=2)
     init_guess = benchmark.variable.get_statistics('mean')+\
         benchmark.variable.get_statistics('std')
     errors = pya.check_gradients(benchmark.fun,
                                  benchmark.jac,
                                  init_guess,
                                  disp=False)
     assert errors.min() < 1e-5
     errors = pya.check_hessian(benchmark.jac,
                                benchmark.hessp,
                                init_guess,
                                disp=False)
     assert errors.min() < 1e-5
Ejemplo n.º 7
0
 def test_ishigami_function_gradient_and_hessian(self):
     benchmark = setup_benchmark("ishigami", a=7, b=0.1)
     init_guess = benchmark.variable.get_statistics('mean')+\
         benchmark.variable.get_statistics('std')
     errors = pya.check_gradients(benchmark.fun,
                                  benchmark.jac,
                                  init_guess,
                                  disp=False)
     assert errors.min() < 2e-7
     hess_matvec = lambda x, v: np.dot(benchmark.hess(x), v)
     errors = pya.check_hessian(benchmark.jac,
                                hess_matvec,
                                init_guess,
                                disp=False)
     assert errors.min() < 2e-7
    def test_ACVMC_objective_jacobian(self):

        cov = np.asarray([[1.00, 0.50, 0.25], [0.50, 1.00, 0.50],
                          [0.25, 0.50, 4.00]])

        costs = [4, 2, 1]

        target_cost = 20

        nhf_samples, nsample_ratios = pya.allocate_samples_mlmc(
            cov, costs, target_cost)[:2]

        estimator = ACVMF(cov, costs)
        errors = pya.check_gradients(
            partial(acv_sample_allocation_objective, estimator),
            partial(acv_sample_allocation_jacobian_torch, estimator),
            nsample_ratios[:, np.newaxis],
            disp=False)
        #print(errors.min())
        assert errors.min() < 1e-8
Ejemplo n.º 9
0
seed = 1
generate_sample_data = partial(generate_monte_carlo_quadrature_data,
                               generate_random_samples,
                               benchmark.variable.num_vars(),
                               benchmark.design_var_indices,
                               seed=seed)

num_vars = benchmark.variable.num_vars() + benchmark.design_variable.num_vars()
objective = StatisticalConstraint(benchmark.fun, benchmark.jac,
                                  expectation_fun, expectation_jac, num_vars,
                                  benchmark.design_var_indices,
                                  generate_sample_data)

init_guess = 2 * np.ones((2, 1))
errors = pya.check_gradients(objective,
                             objective.jacobian,
                             init_guess,
                             disp=False)
assert errors.min() < 1e-7

constraint = StatisticalConstraint(benchmark.constraint_fun,
                                   benchmark.constraint_jac,
                                   expectation_fun,
                                   expectation_jac,
                                   num_vars,
                                   benchmark.design_var_indices,
                                   generate_sample_data,
                                   bound=0.1,
                                   upper_bound=False)

print('####')
init_guess = 2 * np.ones((2, 1))
Ejemplo n.º 10
0
def check_derivative(function, num_design_pts, rel=True):
    design_prob_measure = np.ones((num_design_pts, 1)) / num_design_pts
    design_prob_measure = np.random.uniform(0, 1, (num_design_pts, 1))
    return pya.check_gradients(function, True, design_prob_measure, rel=rel)