예제 #1
0
 def run_iteration(self):
     """ The CONMIN driver iteration."""
     
     #self._logger.debug('iter_count = %d' % self.iter_count)
     #self._logger.debug('objective = %f' % self.cnmn1.obj)
     #self._logger.debug('design vals = %s' % self.design_vals[:-2])
     
     # TODO: 'step around' ill-behaved cases.
     
     self._load_common_blocks()
     
     #print "Iteration %s: " % self.get_pathname(), self.iter_count
     #print "Before"
     #print self.design_vals
     try:
         (self.design_vals,
          self._scal, self.d_const, self.s,
          self.g1, self.g2, self._b, self._c,
          self._cons_is_linear,
          self.cons_active_or_violated, self._ms1) = \
              conmin.conmin(self.design_vals,
                            self._lower_bounds, self._upper_bounds,
                            self.constraint_vals,
                            self._scal, self.d_obj,
                            self.d_const,
                            self.s, self.g1, self.g2, self._b, self._c,
                            self._cons_is_linear,
                            self.cons_active_or_violated, self._ms1)
     except Exception, err:
         self._logger.error(str(err))
         raise
예제 #2
0
 def run_iteration(self):
     """ The CONMIN driver iteration."""
     
     #self._logger.debug('iter_count = %d' % self.iter_count)
     #self._logger.debug('objective = %f' % self.cnmn1.obj)
     #self._logger.debug('design vals = %s' % self.design_vals[:-2])
     
     # TODO: 'step around' ill-behaved cases.
     
     self._load_common_blocks()
     
     #print "Iteration %s: " % self.get_pathname(), self.iter_count
     #print "Before"
     #print self.design_vals
     try:
         (self.design_vals,
          self._scal, self.d_const, self.s,
          self.g1, self.g2, self._b, self._c,
          self._cons_is_linear,
          self.cons_active_or_violated, self._ms1) = \
              conmin.conmin(self.design_vals,
                            self._lower_bounds, self._upper_bounds,
                            self.constraint_vals,
                            self._scal, self.d_obj,
                            self.d_const,
                            self.s, self.g1, self.g2, self._b, self._c,
                            self._cons_is_linear,
                            self.cons_active_or_violated, self._ms1)
     except Exception, err:
         self._logger.error(str(err))
         raise
    def run_iteration(self):
        """ The CONMIN driver iteration."""

        #self._logger.debug('iter_count = %d' % self.iter_count)
        #self._logger.debug('objective = %f' % self.cnmn1.obj)
        #self._logger.debug('design vals = %s' % self.design_vals[:-2])

        # TODO: 'step around' ill-behaved cases.

        self._load_common_blocks()

        #print "Iteration %s: " % self.get_pathname(), self.iter_count
        #print "Before"
        #print self.design_vals
        try:
            (self.design_vals,
             self._scal, self.d_const, self.s,
             self.g1, self.g2, self._b, self._c,
             self._cons_is_linear,
             self.cons_active_or_violated, self._ms1) = \
                 conmin.conmin(self.design_vals,
                               self._lower_bounds, self._upper_bounds,
                               self.constraint_vals,
                               self._scal, self.d_obj,
                               self.d_const,
                               self.s, self.g1, self.g2, self._b, self._c,
                               self._cons_is_linear,
                               self.cons_active_or_violated, self._ms1)
        except Exception as err:
            self._logger.error(str(err))
            raise

        self._save_common_blocks()

        # calculate objective and constraints
        if self.cnmn1.info == 1:

            # Note. CONMIN is driving the finite difference estimation of the
            # gradient. However, we still take advantage of a component's
            # user-defined gradients via Fake Finite Difference.
            if self.cnmn1.igoto == 3:

                # update the parameters in the model
                self.set_parameters(self.design_vals[:-2])

                # Run model under Fake Finite Difference
                self.calc_derivatives(first=True, savebase=True)
                self.ffd_order = 1
                super(CONMINdriver, self).run_iteration()
                self.ffd_order = 0
            else:
                # update the parameters in the model
                self.set_parameters(self.design_vals[:-2])

                # Run the model for this step
                super(CONMINdriver, self).run_iteration()

            # calculate objective
            self.cnmn1.obj = self.eval_objective()

            # update constraint value array
            self.constraint_vals[0:self.total_ineq_constraints()] = \
                self.eval_ineq_constraints()

            #self._logger.debug('constraints = %s' % self.constraint_vals)

        # calculate gradient of constraints and gradient of objective
        # We also have to determine which constraints are active/violated, and
        # only return gradients of active/violated constraints.
        elif self.cnmn1.info == 2 and self.cnmn1.nfdg == 1:

            # Sometimes, CONMIN wants the derivatives at a different point.
            self.set_parameters(self.design_vals[:-2])
            super(CONMINdriver, self).run_iteration()

            inputs = self.list_param_group_targets()
            obj = self.list_objective_targets()
            con = self.list_ineq_constraint_targets()

            J = self.workflow.calc_gradient(inputs, obj + con)

            nobj = len(obj)
            self.d_obj[:-2] = J[0:nobj, :].ravel()

            for i in range(len(self.cons_active_or_violated)):
                self.cons_active_or_violated[i] = 0

            self.cnmn1.nac = 0
            for i in range(self.total_ineq_constraints()):
                if self.constraint_vals[i] >= self.cnmn1.ct:
                    self.cons_active_or_violated[self.cnmn1.nac] = i+1
                    self.d_const[:-2, self.cnmn1.nac] = J[nobj+i, :]
                    self.cnmn1.nac += 1
        else:
            self.raise_exception('Unexpected value for flag INFO returned'
                                 ' from CONMIN.', RuntimeError)
예제 #4
0
    def run_iteration(self):
        """ The CONMIN driver iteration."""

        #self._logger.debug('iter_count = %d' % self.iter_count)
        #self._logger.debug('objective = %f' % self.cnmn1.obj)
        #self._logger.debug('design vals = %s' % self.design_vals[:-2])

        # TODO: 'step around' ill-behaved cases.

        self._load_common_blocks()

        #print "Iteration %s: " % self.get_pathname(), self.iter_count
        #print "Before"
        #print self.design_vals
        try:
            (self.design_vals,
             self._scal, self.d_const, self.s,
             self.g1, self.g2, self._b, self._c,
             self._cons_is_linear,
             self.cons_active_or_violated, self._ms1) = \
                 conmin.conmin(self.design_vals,
                               self._lower_bounds, self._upper_bounds,
                               self.constraint_vals,
                               self._scal, self.d_obj,
                               self.d_const,
                               self.s, self.g1, self.g2, self._b, self._c,
                               self._cons_is_linear,
                               self.cons_active_or_violated, self._ms1)
        except Exception as err:
            self._logger.error(str(err))
            raise

        self._save_common_blocks()

        # calculate objective and constraints
        if self.cnmn1.info == 1:

            # Note. CONMIN is driving the finite difference estimation of the
            # gradient.
            if self.cnmn1.igoto == 3:

                # update the parameters in the model
                self.set_parameters(self.design_vals[:-2])
                super(CONMINdriver, self).run_iteration()

            else:
                # update the parameters in the model
                self.set_parameters(self.design_vals[:-2])

                # Run the model for this step
                super(CONMINdriver, self).run_iteration()

            # calculate objective
            self.cnmn1.obj = self.eval_objective()

            # update constraint value array
            self.constraint_vals[0:self.total_ineq_constraints()] = \
                self.eval_ineq_constraints()

            #self._logger.debug('constraints = %s' % self.constraint_vals)

        # calculate gradient of constraints and gradient of objective
        # We also have to determine which constraints are active/violated, and
        # only return gradients of active/violated constraints.
        elif self.cnmn1.info == 2 and self.cnmn1.nfdg == 1:

            # Sometimes, CONMIN wants the derivatives at a different point.
            self.set_parameters(self.design_vals[:-2])
            super(CONMINdriver, self).run_iteration()

            inputs = self.list_param_group_targets()
            obj = self.list_objective_targets()
            con = self.list_ineq_constraint_targets()

            J = self.workflow.calc_gradient(inputs, obj + con)

            nobj = len(obj)
            self.d_obj[:-2] = J[0:nobj, :].ravel()

            for i in range(len(self.cons_active_or_violated)):
                self.cons_active_or_violated[i] = 0

            self.cnmn1.nac = 0
            for i in range(self.total_ineq_constraints()):
                if self.constraint_vals[i] >= self.cnmn1.ct:
                    self.cons_active_or_violated[self.cnmn1.nac] = i + 1
                    self.d_const[:-2, self.cnmn1.nac] = J[nobj + i, :]
                    self.cnmn1.nac += 1
        else:
            self.raise_exception(
                'Unexpected value for flag INFO returned'
                ' from CONMIN.', RuntimeError)