Ejemplo n.º 1
0
    def __init__(self, pyopt_model, sens_type, **kwargs):
        """
        :parameters:

           :nlp:       nonlinear problem pyOpt
            :sens_type:  sensitivity type
                'FD' : estimation of gradients using finite differences
                'CS' : estimation of gradients using complex step
                grad_func : user provided gradients
        """

        self.pyopt_model = pyopt_model

        # Problem dimensions.
        nbVar = len(pyopt_model._variables)
        nbCons = len(pyopt_model._constraints)

        # Bounds on variables.
        LVar = -numpy.inf * numpy.ones(nbVar)
        UVar = numpy.inf * numpy.ones(nbVar)
        X0 = numpy.zeros(nbVar)

        for i in range(nbVar):
            var = pyopt_model.getVar(i)
            LVar[i] = var.lower
            UVar[i] = var.upper
            X0[i] = var.value

        # Constraints left and right-hand side.
        LCon = -numpy.inf * numpy.ones(nbCons)
        UCon = numpy.inf * numpy.ones(nbCons)

        for j in range(nbCons):
            cons = pyopt_model.getCon(j)
            if cons.type=='i':
                LCon[j] = cons.lower
                UCon[j] = cons.upper
            elif cons.type=='e':
                LCon[j] = cons.equal
                UCon[j] = cons.equal

        # Differentiation method.
        self.sens_type = sens_type
        if sens_type == 'FD':
            self.sens_step = 1e-6
        else:
            self.sens_step = 1e-20

        self.gradient_method = Gradient(pyopt_model, sens_type, '',
                                        self.sens_step)

        # Saved values (private).
        self._last_x = None
        self._last_obj = None
        self._last_grad_obj = None
        self._last_cons = None
        self._last_grad_con = None

        # Initialize model.
        NLPModel.__init__(self, name=pyopt_model.name, n=nbVar, m=nbCons, Lvar=LVar, Uvar=UVar,
                          Lcon=LCon, Ucon=UCon, x0=X0, **kwargs)
Ejemplo n.º 2
0
class NLPModel_From_PyOpt(NLPModel):

    def __init__(self, pyopt_model, sens_type, **kwargs):
        """
        :parameters:

           :nlp:       nonlinear problem pyOpt
            :sens_type:  sensitivity type
                'FD' : estimation of gradients using finite differences
                'CS' : estimation of gradients using complex step
                grad_func : user provided gradients
        """

        self.pyopt_model = pyopt_model

        # Problem dimensions.
        nbVar = len(pyopt_model._variables)
        nbCons = len(pyopt_model._constraints)

        # Bounds on variables.
        LVar = -numpy.inf * numpy.ones(nbVar)
        UVar = numpy.inf * numpy.ones(nbVar)
        X0 = numpy.zeros(nbVar)

        for i in range(nbVar):
            var = pyopt_model.getVar(i)
            LVar[i] = var.lower
            UVar[i] = var.upper
            X0[i] = var.value

        # Constraints left and right-hand side.
        LCon = -numpy.inf * numpy.ones(nbCons)
        UCon = numpy.inf * numpy.ones(nbCons)

        for j in range(nbCons):
            cons = pyopt_model.getCon(j)
            if cons.type=='i':
                LCon[j] = cons.lower
                UCon[j] = cons.upper
            elif cons.type=='e':
                LCon[j] = cons.equal
                UCon[j] = cons.equal

        # Differentiation method.
        self.sens_type = sens_type
        if sens_type == 'FD':
            self.sens_step = 1e-6
        else:
            self.sens_step = 1e-20

        self.gradient_method = Gradient(pyopt_model, sens_type, '',
                                        self.sens_step)

        # Saved values (private).
        self._last_x = None
        self._last_obj = None
        self._last_grad_obj = None
        self._last_cons = None
        self._last_grad_con = None

        # Initialize model.
        NLPModel.__init__(self, name=pyopt_model.name, n=nbVar, m=nbCons, Lvar=LVar, Uvar=UVar,
                          Lcon=LCon, Ucon=UCon, x0=X0, **kwargs)

    def obj(self, x):
        if self._last_obj is not None and (self._last_x == x).all():
            return self._last_obj
        f, c, fail = self.pyopt_model.obj_fun(x)
        self._last_x = x[:]
        self._last_obj = f
        self._last_cons = c
        self._last_grad_obj = None  # Gradient out of date.
        self._last_grad_con = None # Gradient out of date.
        return f

    def cons(self, x):
        if self._last_cons is not None and (self._last_x == x).all():
            return self._last_cons
        f, c, fail = self.pyopt_model.obj_fun(x)
        self._last_x = x[:]
        self._last_obj = f
        self._last_cons = c
        self._last_grad_obj = None  # Gradient out of date.
        self._last_grad_con = None # Gradient out of date.
        return c

    def grad(self, x):
        if self._last_grad_obj is not None and (self._last_x == x).all():
            return self._last_grad_obj
        f, c, fail = self.pyopt_model.obj_fun(x)
        grad = self.gradient_method.getGrad(x, {}, [f], c)
        self._last_x = x[:]
        self._last_obj = f
        self._last_cons = c
        self._last_grad_obj = grad[0].flatten()
        self._last_grad_con = grad[1]
        return grad[0].flatten()

    def igrad(self, x, i):
        if self._last_grad_con is not None and (self._last_x == x).all():
            return self._last_grad_con[i]
        f, c, fail = self.pyopt_model.obj_fun(x)
        grad = self.gradient_method.getGrad(x, {}, [f], c)
        self._last_x = x[:]
        self._last_obj = f
        self._last_cons = c
        self._last_grad_obj = grad[0].flatten()
        self._last_grad_con = grad[1]
        return grad[1][i]