コード例 #1
0
ファイル: scipy_driver.py プロジェクト: thomasdick/FADO
    def preprocess(self):
        """
        Prepares the optimization problem, including preprocessing variables,
        and setting up the lists of constraints and variable bounds that SciPy
        needs. Must be called after all functions are added to the driver.
        """
        ConstrainedOptimizationDriver.preprocess(self)

        class _fun:
            def __init__(self,fun,idx):
                self._f = fun
                self._i = idx
            def __call__(self,x):
                return self._f(x,self._i)
        #end

        # setup the constraint list, the callbacks are the same for all
        # constraints, an index argument (i) is used to distinguish them.
        self._constraints = []
        for i in range(self._nCon):
            self._constraints.append({'type' : ('ineq','eq')[i<len(self._constraintsEQ)],
                                      'fun' : _fun(self._eval_g,i),
                                      'jac' : _fun(self._eval_jac_g,i)})
        #end

        # variable bounds
        self._bounds = np.array((self.getLowerBound(),self.getUpperBound()),float).transpose()

        # size the gradient and constraint jacobian
        self._grad_f = np.zeros((self._nVar,))
        self._old_grad_f = np.zeros((self._nVar,))
        self._jac_g = np.zeros((self._nVar,self._nCon))
        self._old_jac_g = np.zeros((self._nVar,self._nCon))
コード例 #2
0
    def getNLP(self):
        """
        Prepares and returns the optimization problem for Ipopt (an instance of ipyopt.Problem).
        For convenience also does other preprocessing, must be called after all functions are set.
        Do not destroy the driver after obtaining the problem.
        """
        ConstrainedOptimizationDriver.preprocess(self)

        conLowerBound = np.zeros([
            self._nCon,
        ])
        conUpperBound = np.zeros([
            self._nCon,
        ])

        i = len(self._constraintsEQ)
        conUpperBound[i:(i + len(self._constraintsGT))] = 1e20

        # assume row major storage for gradient sparsity
        rg = range(self._nVar * self._nCon)
        self._sparseIndices = (np.array([i // self._nVar for i in rg],
                                        dtype=int),
                               np.array([i % self._nVar for i in rg],
                                        dtype=int))

        # create the optimization problem
        self._nlp = opt.Problem(self._nVar, self.getLowerBound(),
                                self.getUpperBound(), self._nCon,
                                conLowerBound, conUpperBound,
                                self._sparseIndices, 0, self._eval_f,
                                self._eval_grad_f, self._eval_g,
                                self._eval_jac_g)
        return self._nlp
コード例 #3
0
    def __init__(self):
        ConstrainedOptimizationDriver.__init__(self)

        # sparse indices of the constraint gradient, for now assumed to be dense
        self._sparseIndices = None

        # the optimization problem
        self._nlp = None
コード例 #4
0
ファイル: scipy_driver.py プロジェクト: thomasdick/FADO
    def __init__(self):
        ConstrainedOptimizationDriver.__init__(self)

        # list of constraints and variable bounds
        self._constraints = []
        self._bounds = []

        # info for the second order approximation
        self._hessianDir = ""
        self._hessianFilename = []
コード例 #5
0
ファイル: scipy_driver.py プロジェクト: su2code/FADO
    def __init__(self):
        ConstrainedOptimizationDriver.__init__(self)

        # list of constraints and variable bounds
        self._constraints = []
        self._bounds = []