def __init__(self, n=0, m=0, name='Algopy-Generic', **kwargs): NLPModel.__init__(self, n, m, name, **kwargs) try: self._trace_obj(self.x0) except: pass try: self._trace_cons(self.x0) except: pass
def __init__(self, n=0, m=0, name="Algopy-Generic", **kwargs): NLPModel.__init__(self, n, m, name, **kwargs) try: self._trace_obj(self.x0) except: pass try: self._trace_cons(self.x0) except: pass
def __init__(self, n=0, m=0, name="Adolc-Generic", **kwargs): NLPModel.__init__(self, n, m, name, **kwargs) self.__class__.__NUM_INSTANCES[0] += 1 # Trace objective and constraint functions. self._obj_trace_id = None self._trace_obj(self.x0) self._con_trace_id = None if self.m > 0: self._trace_con(self.x0) self.first_sparse_hess_eval = True self.first_sparse_jac_eval = True
if i == 1: grad[0] = 0. grad[1] = 2. elif i == 2: grad[0] = 3. grad[1] = 0. return grad Infinity = numpy.inf # Create an instance of an NLPModel nlp = NLPModel(n=2, m=2, name='Rosenbrock', Lvar=numpy.array([-1000,-Infinity]), Uvar=numpy.array([Infinity,1000]), Lcon=numpy.array([-2000,-1000]), Ucon=numpy.array([2000,1000]), x0 = numpy.array([-1.2, 1])) # Assign an objective function and its gradient nlp.obj = rosenbrock nlp.grad = gradient_rosenbrock # Assign constraints and its gradients nlp.cons = constraint nlp.igrad = gradient_constraint # Translate this NLPy problem in a pyOpt problem opt_prob = PyOpt_From_NLPModel(nlp)
def __init__(self, n=0, m=0, name='CppAD-Generic', **kwargs): NLPModel.__init__(self, n, m, name, **kwargs) # Trace objective and constraint functions. self._trace_obj(self.x0) if self.m > 0: self._trace_cons(self.x0)
def __init__(self, pyopt_model, sens_type, **kwargs): """ :parameters: :nlp: nonlinear problem pyOpt :sens_type: sensitivity type 'FD' : estimation of gradients using finite differences 'CS' : estimation of gradients using complex step grad_func : user provided gradients """ self.pyopt_model = pyopt_model # Problem dimensions. nbVar = len(pyopt_model._variables) nbCons = len(pyopt_model._constraints) # Bounds on variables. LVar = -numpy.inf * numpy.ones(nbVar) UVar = numpy.inf * numpy.ones(nbVar) X0 = numpy.zeros(nbVar) for i in range(nbVar): var = pyopt_model.getVar(i) LVar[i] = var.lower UVar[i] = var.upper X0[i] = var.value # Constraints left and right-hand side. LCon = -numpy.inf * numpy.ones(nbCons) UCon = numpy.inf * numpy.ones(nbCons) for j in range(nbCons): cons = pyopt_model.getCon(j) if cons.type=='i': LCon[j] = cons.lower UCon[j] = cons.upper elif cons.type=='e': LCon[j] = cons.equal UCon[j] = cons.equal # Differentiation method. self.sens_type = sens_type if sens_type == 'FD': self.sens_step = 1e-6 else: self.sens_step = 1e-20 self.gradient_method = Gradient(pyopt_model, sens_type, '', self.sens_step) # Saved values (private). self._last_x = None self._last_obj = None self._last_grad_obj = None self._last_cons = None self._last_grad_con = None # Initialize model. NLPModel.__init__(self, name=pyopt_model.name, n=nbVar, m=nbCons, Lvar=LVar, Uvar=UVar, Lcon=LCon, Ucon=UCon, x0=X0, **kwargs)