Пример #1
0
    def test_cantilever_beam_gradients(self):
        benchmark = setup_benchmark('cantilever_beam')
        from pyapprox.models.wrappers import ActiveSetVariableModel
        fun = ActiveSetVariableModel(
            benchmark.fun,
            benchmark.variable.num_vars() +
            benchmark.design_variable.num_vars(),
            benchmark.variable.get_statistics('mean'),
            benchmark.design_var_indices)
        jac = ActiveSetVariableModel(
            benchmark.jac,
            benchmark.variable.num_vars() +
            benchmark.design_variable.num_vars(),
            benchmark.variable.get_statistics('mean'),
            benchmark.design_var_indices)
        init_guess = 2 * np.ones((2, 1))
        errors = pya.check_gradients(fun, jac, init_guess, disp=True)
        assert errors.min() < 4e-7

        constraint_fun = ActiveSetVariableModel(
            benchmark.constraint_fun,
            benchmark.variable.num_vars() +
            benchmark.design_variable.num_vars(),
            benchmark.variable.get_statistics('mean'),
            benchmark.design_var_indices)
        constraint_jac = ActiveSetVariableModel(
            benchmark.constraint_jac,
            benchmark.variable.num_vars() +
            benchmark.design_variable.num_vars(),
            benchmark.variable.get_statistics('mean'),
            benchmark.design_var_indices)
        init_guess = 2 * np.ones((2, 1))
        errors = pya.check_gradients(constraint_fun,
                                     constraint_jac,
                                     init_guess,
                                     disp=True)
        assert errors.min() < 4e-7

        nsamples = 10
        samples = pya.generate_independent_random_samples(
            benchmark.variable, nsamples)
        constraint_fun = ActiveSetVariableModel(
            benchmark.constraint_fun,
            benchmark.variable.num_vars() +
            benchmark.design_variable.num_vars(), samples,
            benchmark.design_var_indices)
        constraint_jac = ActiveSetVariableModel(
            benchmark.constraint_jac,
            benchmark.variable.num_vars() +
            benchmark.design_variable.num_vars(), samples,
            benchmark.design_var_indices)
        init_guess = 2 * np.ones((2, 1))
        errors = pya.check_gradients(
            lambda x: constraint_fun(x).flatten(order='F'),
            constraint_jac,
            init_guess,
            disp=True)
        assert errors.min() < 4e-7
Пример #2
0
 def jacobian(self, design_sample):
     if design_sample.ndim == 1:
         design_sample = design_sample[:, np.newaxis]
     if np.array_equal(design_sample,
                       self.design_sample) and self.jac_values is not None:
         jac_values = self.jac_values
     else:
         jac = ActiveSetVariableModel(self.jac, self.num_vars, self.samples,
                                      self.design_var_indices)
         jac_values = jac(design_sample)
     constraint_jac = self.stats_jac(jac_values, self.weights)
     if self.bound is not None and self.upper_bound:
         constraint_jac *= -1
     return constraint_jac
Пример #3
0
    def generate_shared_data(self, design_sample):
        self.design_sample = design_sample.copy()

        fun = ActiveSetVariableModel(self.fun, self.num_vars, design_sample,
                                     self.random_var_indices)
        data = self.generate_sample_data(fun)
        self.samples, self.weights, self.fun_values = data[:3]
        assert self.samples.shape[
            0] == self.num_vars - self.design_var_indices.shape[0]
        assert self.samples.shape[1] == self.weights.shape[0]
        #assert self.samples.shape[1]==self.fun_values.shape[0]
        if not callable(self.jac) and self.jac:
            # consider whether to support self.jac=True. It seems appealing
            # if using gradients from adjoint PDE simulation which requires
            # data used to compute function values and thus better to do at the
            # time the function values are obtained. Challenge is defining the correct
            # output interface and only computing gradients if self.jac has been called
            # and not if self.__call__ is called.
            raise Exception("Not yet implemented")
            self.jac_values = data[3]
Пример #4
0
 def jacobian(self, design_sample):
     if design_sample.ndim == 1:
         design_sample = design_sample[:, np.newaxis]
     if (np.array_equal(design_sample, self.design_sample) and
             self.jac_values is not None):
         jac_values = self.jac_values
     else:
         jac = ActiveSetVariableModel(
             self.jac, self.num_vars, self.samples, self.design_var_indices)
         jac_values = jac(design_sample)
     nsamples = self.weights.shape[0]
     nqoi = self.fun_values.shape[1]
     nvars = jac_values.shape[1]
     constraint_jac = np.empty((nqoi, nvars))
     for ii in range(nqoi):
         constraint_jac[ii] = self.stats_jac(
             jac_values[ii*nsamples:(ii+1)*nsamples, :], self.weights)
     if self.bound is not None and self.upper_bound:
         constraint_jac *= -1
     return constraint_jac.squeeze()
Пример #5
0
   =============== ========= =======================

First we must specify the distribution of the random variables
"""
import numpy as np
import pyapprox as pya
from pyapprox.benchmarks.benchmarks import setup_benchmark
from functools import partial
from pyapprox.optimization import *
benchmark = setup_benchmark('cantilever_beam')

from pyapprox.models.wrappers import ActiveSetVariableModel
nsamples = 10
samples = pya.generate_independent_random_samples(benchmark.variable, nsamples)
fun = ActiveSetVariableModel(
    benchmark.fun,
    benchmark.variable.num_vars() + benchmark.design_variable.num_vars(),
    samples, benchmark.design_var_indices)
jac = ActiveSetVariableModel(
    benchmark.jac,
    benchmark.variable.num_vars() + benchmark.design_variable.num_vars(),
    samples, benchmark.design_var_indices)

generate_random_samples = partial(pya.generate_independent_random_samples,
                                  benchmark.variable, 100)
#set seed so that finite difference jacobian always uses the same set of samples for each
#step size and as used for computing the exact gradient
seed = 1
generate_sample_data = partial(generate_monte_carlo_quadrature_data,
                               generate_random_samples,
                               benchmark.variable.num_vars(),
                               benchmark.design_var_indices,