def _get_input_values(self):
        '''Generator for the values'''

        params = self.driver.get_parameters().values()

        baseline = zeros(self.num_parameters, 'd')
        delta = zeros(self.num_parameters, 'd')
        mask = zeros(self.num_parameters, 'd')

        for i, param in enumerate(params):
            baseline[i] = param.evaluate()
            delta[i] = param.fd_step

        # baseline case
        if not self.skip_baseline:
            if not (self.form == "CENTRAL" and self.order % 2 == 1):
                yield baseline

        if self.form == "FORWARD":
            offset = 1
        elif self.form == "BACKWARD":
            offset = - self.order
        elif self.form == "CENTRAL":
            if self.order % 2 == 1:
                offset = (0.5 - self.order)
            else:
                offset = 1 - self.order

        # non-baseline cases for forward and backward
        if self.form in ["BACKWARD", "FORWARD"]:
            for iparam in range(self.num_parameters):
                mask[iparam] = 1.0
                for i in range(self.order):
                    var_val = baseline + (offset + i) * delta * mask
                    yield var_val
                mask[iparam] = 0.0
        else:  # for central form
            for iparam in range(self.num_parameters):
                mask[iparam] = 1.0
                if self.order % 2 == 1:
                    for i in range(self.order + 1):
                        var_val = baseline + (offset + i) * delta * mask
                        yield var_val
                else:
                    for i in range(self.order + 1):
                        if (offset + i) != 0:
                            var_val = baseline + (offset + i) * delta * mask
                            yield var_val
                mask[iparam] = 0.0
    def _get_input_values(self):
        '''Generator for the values'''

        params = self.driver.get_parameters().values()

        baseline = zeros(self.num_parameters, 'd')
        delta = zeros(self.num_parameters, 'd')
        mask = zeros(self.num_parameters, 'd')

        for i, param in enumerate(params):
            baseline[i] = param.evaluate()
            delta[i] = param.fd_step

        # baseline case
        if not self.skip_baseline:
            if not (self.form == "CENTRAL" and self.order % 2 == 1):
                yield baseline

        if self.form == "FORWARD":
            offset = 1
        elif self.form == "BACKWARD":
            offset = -self.order
        elif self.form == "CENTRAL":
            if self.order % 2 == 1:
                offset = (0.5 - self.order)
            else:
                offset = 1 - self.order

        # non-baseline cases for forward and backward
        if self.form in ["BACKWARD", "FORWARD"]:
            for iparam in range(self.num_parameters):
                mask[iparam] = 1.0
                for i in range(self.order):
                    var_val = baseline + (offset + i) * delta * mask
                    yield var_val
                mask[iparam] = 0.0
        else:  # for central form
            for iparam in range(self.num_parameters):
                mask[iparam] = 1.0
                if self.order % 2 == 1:
                    for i in range(self.order + 1):
                        var_val = baseline + (offset + i) * delta * mask
                        yield var_val
                else:
                    for i in range(self.order + 1):
                        if (offset + i) != 0:
                            var_val = baseline + (offset + i) * delta * mask
                            yield var_val
                mask[iparam] = 0.0
Esempio n. 3
0
    def provideJ(self):
        """Calculate analytical first derivatives."""

        if self.Jsize is None:
            n_in = 0
            n_out = 0
            for varname in self.list_inputs():
                val = self.get(varname)
                width = flattened_size(varname, val, self)
                n_in += width
            for varname in self.list_outputs():
                val = self.get(varname)
                width = flattened_size(varname, val, self)
                n_out += width
            self.Jsize = (n_out, n_in)

        J = zeros(self.Jsize)
        grad = self._srcexpr.evaluate_gradient()

        i = 0
        for varname in self._inputs:
            val = self.get(varname)
            width = flattened_size(varname, val, self)
            J[:, i:i + width] = grad[varname]
            i += width

        return J
    def provideJ(self):
        """Calculate analytical first derivatives."""

        if self.Jsize is None:
            n_in = 0
            n_out = 0
            for varname in self.list_inputs():
                val = self.get(varname)
                width = flattened_size(varname, val, self)
                n_in += width
            for varname in self.list_outputs():
                val = self.get(varname)
                width = flattened_size(varname, val, self)
                n_out += width
            self.Jsize = (n_out, n_in)

        J = zeros(self.Jsize)
        grad = self._srcexpr.evaluate_gradient()

        i = 0
        for varname in self._inputs:
            val = self.get(varname)
            width = flattened_size(varname, val, self)
            J[:, i:i+width] = grad[varname]
            i += width

        return J
Esempio n. 5
0
 def execute(self):
     """Calculate the gradient of the workflow."""
     
     self._check()
     
     
     # Calculate gradient of the workflow
     self.calc_derivatives(first=True)
     self.ffd_order = 1
     self.differentiator.calc_gradient()
     self.ffd_order = 0
         
     inputs = self.get_parameters().keys()
     objs = self.get_objectives().keys()
     constraints = list(self.get_eq_constraints().keys() + \
                        self.get_ineq_constraints().keys())
     
     self.dF = zeros((len(objs), len(inputs)), 'd')
     self.dG = zeros((len(constraints), len(inputs)), 'd')
     self.F = zeros(len(objs), 'd')
     self.G = zeros(len(constraints), 'd')
     self.x = zeros(len(inputs), 'd')
     self.dF_names = []
     self.dG_names = []
     self.dx_names = []
     
     for i, input_name in enumerate(inputs):
         
         self.dx_names.append(input_name)
         self.x[i] = self.differentiator.base_param[input_name]
         
         for j, output_name in enumerate(objs):
             self.dF[j][i] = self.differentiator.get_derivative(output_name, 
                                                                wrt=input_name)
             self.dF_names.append(output_name)
             self.F[j] = self.differentiator.base_data[output_name]
             
         for j, output_name in enumerate(constraints):
             self.dG[j][i] = self.differentiator.get_derivative(output_name, 
                                                                wrt=input_name)
             self.dG_names.append(output_name)
             self.G[j] = self.differentiator.base_data[output_name]
     # Sensitivity is sometimes run sequentially using different submodels,
     # so we need to return the state to the baseline value.
     self.differentiator.reset_state()
     
     self.record_case()
Esempio n. 6
0
    def execute(self):
        """Calculate the gradient of the workflow."""

        self._check()

        # Calculate gradient of the workflow
        self.calc_derivatives(first=True)
        self.ffd_order = 1
        self.differentiator.calc_gradient()
        self.ffd_order = 0

        inputs = self.get_parameters().keys()
        objs = self.get_objectives().keys()
        constraints = list(self.get_eq_constraints().keys() + \
                           self.get_ineq_constraints().keys())

        self.dF = zeros((len(objs), len(inputs)), 'd')
        self.dG = zeros((len(constraints), len(inputs)), 'd')
        self.F = zeros(len(objs), 'd')
        self.G = zeros(len(constraints), 'd')
        self.x = zeros(len(inputs), 'd')
        self.dF_names = []
        self.dG_names = []
        self.dx_names = []

        for i, input_name in enumerate(inputs):

            self.dx_names.append(input_name)
            self.x[i] = self.differentiator.base_param[input_name]

            for j, output_name in enumerate(objs):
                self.dF[j][i] = self.differentiator.get_derivative(
                    output_name, wrt=input_name)
                self.dF_names.append(output_name)
                self.F[j] = self.differentiator.base_data[output_name]

            for j, output_name in enumerate(constraints):
                self.dG[j][i] = self.differentiator.get_derivative(
                    output_name, wrt=input_name)
                self.dG_names.append(output_name)
                self.G[j] = self.differentiator.base_data[output_name]
        # Sensitivity is sometimes run sequentially using different submodels,
        # so we need to return the state to the baseline value.
        self.differentiator.reset_state()

        self.record_case()
Esempio n. 7
0
 def test_set_to_unset_default(self):
     comp = MyNoDefComp()
     self.assertEqual(0., comp.f_in)
     comp.f_in = 42.
     comp.arr_in = array([88., 32.])
     comp.list_in = [1, 2, 3]
     self.assertEqual(comp.get_valid(['f_out']), [False])
     comp.run()
     self.assertEqual(comp.get_valid(['f_out']), [True])
     comp.revert_to_defaults()
     # make sure reverting to defaults invalidates our outputs
     self.assertEqual(comp.get_valid(['f_out']), [False])
     self.assertEqual(0., comp.f_in)
     self.assertTrue(all(zeros(0, 'd') == comp.arr_in))
     self.assertEqual([], comp.list_in)
 def test_set_to_unset_default(self):
     comp = MyNoDefComp()
     self.assertEqual(0., comp.f_in)
     comp.f_in = 42.
     comp.arr_in = array([88., 32.])
     comp.list_in = [1,2,3]
     self.assertEqual(comp.get_valid(['f_out']), [False])
     comp.run()
     self.assertEqual(comp.get_valid(['f_out']), [True])
     comp.revert_to_defaults()
     # make sure reverting to defaults invalidates our outputs
     self.assertEqual(comp.get_valid(['f_out']), [False])
     self.assertEqual(0., comp.f_in)
     self.assertTrue(all(zeros(0,'d')==comp.arr_in))
     self.assertEqual([], comp.list_in)
    def _get_input_values(self):
        '''Generator for the values'''

        baseline = self.driver.eval_parameters()
        delta = self.driver.get_fd_steps()
        mask = zeros(baseline.size, 'd')

        # baseline case
        if not self.skip_baseline:
            if not (self.form == "CENTRAL" and self.order % 2 == 1):
                yield baseline

        if self.form == "FORWARD":
            offset = 1
        elif self.form == "BACKWARD":
            offset = - self.order
        elif self.form == "CENTRAL":
            if self.order % 2 == 1:
                offset = (0.5 - self.order)
            else:
                offset = 1 - self.order

        # non-baseline cases for forward and backward
        if self.form in ["BACKWARD", "FORWARD"]:
            for iparam in range(len(mask)):
                mask[iparam] = 1.0
                for i in range(self.order):
                    var_val = baseline + (offset + i) * delta * mask
                    yield var_val
                mask[iparam] = 0.0
        else:  # for central form
            for iparam in range(len(mask)):
                mask[iparam] = 1.0
                if self.order % 2 == 1:
                    for i in range(self.order + 1):
                        var_val = baseline + (offset + i) * delta * mask
                        yield var_val
                else:
                    for i in range(self.order + 1):
                        if (offset + i) != 0:
                            var_val = baseline + (offset + i) * delta * mask
                            yield var_val
                mask[iparam] = 0.0
Esempio n. 10
0
    def _get_input_values(self):
        '''Generator for the values'''

        baseline = self.driver.eval_parameters()
        delta = self.driver.get_fd_steps()
        mask = zeros(baseline.size, 'd')

        # baseline case
        if not self.skip_baseline:
            if not (self.form == "CENTRAL" and self.order % 2 == 1):
                yield baseline

        if self.form == "FORWARD":
            offset = 1
        elif self.form == "BACKWARD":
            offset = -self.order
        elif self.form == "CENTRAL":
            if self.order % 2 == 1:
                offset = (0.5 - self.order)
            else:
                offset = 1 - self.order

        # non-baseline cases for forward and backward
        if self.form in ["BACKWARD", "FORWARD"]:
            for iparam in range(len(mask)):
                mask[iparam] = 1.0
                for i in range(self.order):
                    var_val = baseline + (offset + i) * delta * mask
                    yield var_val
                mask[iparam] = 0.0
        else:  # for central form
            for iparam in range(len(mask)):
                mask[iparam] = 1.0
                if self.order % 2 == 1:
                    for i in range(self.order + 1):
                        var_val = baseline + (offset + i) * delta * mask
                        yield var_val
                else:
                    for i in range(self.order + 1):
                        if (offset + i) != 0:
                            var_val = baseline + (offset + i) * delta * mask
                            yield var_val
                mask[iparam] = 0.0
Esempio n. 11
0
class SensitivityDriver(Driver):
    """Driver to calculate the gradient of a workflow and return
    it as a driver output. The gradient is calculated from all
    inputs (Parameters) to all outputs (Objectives and Constraints).

    SensitivityDriver includes requires OpenMDAO to calculate a gradient.
    Fake Finite Difference is supported.
    """

    implements(IHasParameters, IHasObjectives, IHasConstraints)

    dF = Array(zeros((0, 0), 'd'),
               iotype='out',
               desc='Sensitivity of the '
               'objectives with respect to the parameters. Index 1 is the '
               'objective output, while index 2 is the parameter input.')
    dG = Array(zeros((0, 0), 'd'),
               iotype='out',
               desc='Sensitivity of the '
               'constraints with respect to the parameters. Index 1 is the '
               'constraint output, while index 2 is the parameter input.')

    dF_names = List([],
                    iotype='out',
                    desc='Objective names that '
                    'correspond to our array indices.')
    dG_names = List([],
                    iotype='out',
                    desc='Constraint names that '
                    'correspond to our array indices.')
    dx_names = List([],
                    iotype='out',
                    desc='Parameter names that '
                    'correspond to our array indices.')

    F = Array(zeros(0, 'd'),
              iotype='out',
              desc='Objective baseline values '
              'where sensitivity is evaluated.')
    G = Array(zeros(0, 'd'),
              iotype='out',
              desc='Constraint baseline values '
              'where sensitivity is evaluated.')
    x = Array(zeros(0, 'd'),
              iotype='out',
              desc='Parameter baseline values '
              'where sensitivity is evaluated.')

    def execute(self):
        """Calculate the gradient of the workflow."""

        # Inital run to make sure the workflow executes
        self.run_iteration()

        self._check()

        inputs = self.list_param_group_targets()
        obj = self.list_objective_targets()
        con = self.list_constraint_targets()

        nobj = len(obj)
        ncon = self.total_constraints()

        self.dF_names = self.get_objectives().keys()
        self.dG_names = self.get_constraints().keys()
        self.dx_names = inputs

        self.F = self.eval_objectives()
        self.G = self.eval_constraints(self.parent)
        self.x = self.eval_parameters(self.parent)

        # Finally, calculate gradient
        J = self.workflow.calc_gradient(inputs, obj + con)

        self.dF = J[:nobj, :]
        self.dG = J[nobj:nobj + ncon, :]

        self.record_case()

    def _check(self):
        """Make sure we aren't missing inputs or outputs."""

        if self.total_parameters() < 1:
            msg = "Missing inputs for gradient calculation"
            self.raise_exception(msg, ValueError)

        if len(self.get_objectives()) + self.total_constraints() < 1:
            msg = "Missing outputs for gradient calculation"
            self.raise_exception(msg, ValueError)
Esempio n. 12
0
class SensitivityDriver(DriverUsesDerivatives):
    """Driver to calculate the gradient of a workflow and return
    it as a driver output. The gradient is calculated from all
    inputs (Parameters) to all outputs (Objectives and Constraints).
    
    SensitivityDriver includes a differentiator slot where the differentiation
    method can be plugged. Fake Finite Difference is supported.
    """

    implements(IHasParameters, IHasObjectives, IHasConstraints)

    dF = Array(zeros((0, 0), 'd'),
               iotype='out',
               desc='Sensitivity of the '
               'objectives with respect to the parameters. Index 1 is the '
               'objective output, while index 2 is the parameter input.')
    dG = Array(zeros((0, 0), 'd'),
               iotype='out',
               desc='Sensitivity of the '
               'constraints with respect to the parameters. Index 1 is the '
               'constraint output, while index 2 is the parameter input.')

    F = Array(zeros((0, 0), 'd'),
              iotype='out',
              desc='Values of the objectives '
              'which sensitivities are taken around.')
    G = Array(zeros((0, 0), 'd'),
              iotype='out',
              desc='Values of the constraints '
              'which sensitivities are taken around.')

    dF_names = List([],
                    iotype='out',
                    desc='Objective names that '
                    'correspond to our array indices.')
    dG_names = List([],
                    iotype='out',
                    desc='Constraint names that '
                    'correspond to our array indices.')
    dx_names = List([],
                    iotype='out',
                    desc='Parameter names that '
                    'correspond to our array indices.')

    F = Array(zeros(0, 'd'),
              iotype='out',
              desc='Objective baseline values '
              'where sensitivity is evaluated.')
    G = Array(zeros(0, 'd'),
              iotype='out',
              desc='Constraint baseline values '
              'where sensitivity is evaluated.')
    x = Array(zeros(0, 'd'),
              iotype='out',
              desc='Parameter baseline values '
              'where sensitivity is evaluated.')

    def execute(self):
        """Calculate the gradient of the workflow."""

        self._check()

        # Calculate gradient of the workflow
        self.ffd_order = 1
        self.differentiator.calc_gradient()
        self.ffd_order = 0

        inputs = self.get_parameters().keys()
        objs = self.get_objectives().keys()
        constraints = list(self.get_eq_constraints().keys() + \
                           self.get_ineq_constraints().keys())

        self.dF = zeros((len(objs), len(inputs)), 'd')
        self.dG = zeros((len(constraints), len(inputs)), 'd')
        self.F = zeros(len(objs), 'd')
        self.G = zeros(len(constraints), 'd')
        self.x = zeros(len(inputs), 'd')
        self.dF_names = []
        self.dG_names = []
        self.dx_names = []

        for i, input_name in enumerate(inputs):

            self.dx_names.append(input_name)
            self.x[i] = self.differentiator.base_param[input_name]

            for j, output_name in enumerate(objs):
                self.dF[j][i] = self.differentiator.get_derivative(
                    output_name, wrt=input_name)
                self.dF_names.append(output_name)
                self.F[j] = self.differentiator.base_data[output_name]

            for j, output_name in enumerate(constraints):
                self.dG[j][i] = self.differentiator.get_derivative(
                    output_name, wrt=input_name)
                self.dG_names.append(output_name)
                self.G[j] = self.differentiator.base_data[output_name]
        # Sensitivity is sometimes run sequentially using different submodels,
        # so we need to return the state to the baseline value.
        self.differentiator.reset_state()

        self.record_case()

    def _check(self):
        """Make sure we aren't missing inputs or outputs."""

        if len(self.get_parameters().values()) < 1:
            msg = "Missing inputs for gradient calculation"
            self.raise_exception(msg, ValueError)

        if len(self.get_objectives().values()) + \
           len(self.get_eq_constraints().values()) + \
           len(self.get_ineq_constraints().values()) < 1:
            msg = "Missing outputs for gradient calculation"
            self.raise_exception(msg, ValueError)