Beispiel #1
0
    def __init__(self, n=0, m=0, name='Algopy-Generic', **kwargs):
        NLPModel.__init__(self, n, m, name, **kwargs)

        try:
            self._trace_obj(self.x0)
        except:
            pass

        try:
            self._trace_cons(self.x0)
        except:
            pass
Beispiel #2
0
    def __init__(self, n=0, m=0, name="Algopy-Generic", **kwargs):
        NLPModel.__init__(self, n, m, name, **kwargs)

        try:
            self._trace_obj(self.x0)
        except:
            pass

        try:
            self._trace_cons(self.x0)
        except:
            pass
Beispiel #3
0
    def __init__(self, n=0, m=0, name="Adolc-Generic", **kwargs):
        NLPModel.__init__(self, n, m, name, **kwargs)
        self.__class__.__NUM_INSTANCES[0] += 1

        # Trace objective and constraint functions.
        self._obj_trace_id = None
        self._trace_obj(self.x0)

        self._con_trace_id = None
        if self.m > 0:
            self._trace_con(self.x0)

        self.first_sparse_hess_eval = True
        self.first_sparse_jac_eval = True
Beispiel #4
0
    def __init__(self, n=0, m=0, name='CppAD-Generic', **kwargs):
        NLPModel.__init__(self, n, m, name, **kwargs)

        # Trace objective and constraint functions.
        self._trace_obj(self.x0)
        if self.m > 0: self._trace_cons(self.x0)
Beispiel #5
0
    def __init__(self, pyopt_model, sens_type, **kwargs):
        """
        :parameters:

           :nlp:       nonlinear problem pyOpt
            :sens_type:  sensitivity type
                'FD' : estimation of gradients using finite differences
                'CS' : estimation of gradients using complex step
                grad_func : user provided gradients
        """

        self.pyopt_model = pyopt_model

        # Problem dimensions.
        nbVar = len(pyopt_model._variables)
        nbCons = len(pyopt_model._constraints)

        # Bounds on variables.
        LVar = -numpy.inf * numpy.ones(nbVar)
        UVar = numpy.inf * numpy.ones(nbVar)
        X0 = numpy.zeros(nbVar)

        for i in range(nbVar):
            var = pyopt_model.getVar(i)
            LVar[i] = var.lower
            UVar[i] = var.upper
            X0[i] = var.value

        # Constraints left and right-hand side.
        LCon = -numpy.inf * numpy.ones(nbCons)
        UCon = numpy.inf * numpy.ones(nbCons)

        for j in range(nbCons):
            cons = pyopt_model.getCon(j)
            if cons.type=='i':
                LCon[j] = cons.lower
                UCon[j] = cons.upper
            elif cons.type=='e':
                LCon[j] = cons.equal
                UCon[j] = cons.equal

        # Differentiation method.
        self.sens_type = sens_type
        if sens_type == 'FD':
            self.sens_step = 1e-6
        else:
            self.sens_step = 1e-20

        self.gradient_method = Gradient(pyopt_model, sens_type, '',
                                        self.sens_step)

        # Saved values (private).
        self._last_x = None
        self._last_obj = None
        self._last_grad_obj = None
        self._last_cons = None
        self._last_grad_con = None

        # Initialize model.
        NLPModel.__init__(self, name=pyopt_model.name, n=nbVar, m=nbCons, Lvar=LVar, Uvar=UVar,
                          Lcon=LCon, Ucon=UCon, x0=X0, **kwargs)
Beispiel #6
0
    def __init__(self, n=0, m=0, name='CppAD-Generic', **kwargs):
        NLPModel.__init__(self, n, m, name, **kwargs)

        # Trace objective and constraint functions.
        self._trace_obj(self.x0)
        if self.m > 0: self._trace_cons(self.x0)