Exemple #1
0
    def __init__(self, Q, B, d, c, Lcon, Ucon, Lvar, Uvar, **kwargs):

        p, n = Q.shape
        m = B.shape[0]
        _Lcon = np.concatenate((d, Lcon))
        _Ucon = np.concatenate((d, Ucon))
        _Lvar = np.empty(n+p)
        _Lvar[:n] = Lvar
        _Lvar[n:] = -np.inf
        _Uvar = np.empty(n+p)
        _Uvar[:n] = Uvar
        _Uvar[n:] = np.inf
        x0 = np.zeros(n+p)

        NLPModel.__init__(self, n=n+p, m=m+p, Lcon=_Lcon, \
                          Ucon=_Ucon, Lvar=_Lvar, Uvar=_Uvar, **kwargs)

        # Initialize the parameters of least squares
        self.Q = Q
        self.B = B
        self.nx = n
        self.nr = p
        self.mx = m
        "Return a contiguous array in memory (C order)."
        self.d = np.ascontiguousarray(d, dtype=float)
        self.c = np.ascontiguousarray(c, dtype=float)
Exemple #2
0
	def __init__(self, n=0, m=0, name='Generic Matrix-Free', **kwargs):

		# Standard NLP initialization
		NLPModel.__init__(self,n=n,m=m,name=name,**kwargs)

		# Additional elements for this class
		self.JTprod = 0	# Counter for Jacobian-transpose products
Exemple #3
0
    def __init__(self, Q, B, d, c, Lcon, Ucon, Lvar, Uvar, **kwargs):

        NLPModel.__init__(self, n=Q.shape[1], m=B.shape[0], Lcon=Lcon, \
                          Ucon=Ucon, Lvar=Lvar, Uvar=Uvar, **kwargs)

        #Initialize the parameters of least squares
        self.Q = Q
        self.B = B
        "Return a contiguous array in memory (C order)."
        self.d = np.ascontiguousarray(d, dtype=float)
        self.c = np.ascontiguousarray(c, dtype=float)
Exemple #4
0
    def __init__(self, nlp, **kwargs):

        self.nlp = nlp

        # Save number of variables and constraints prior to transformation
        self.original_n = nlp.n
        self.original_m = nlp.m

        # Number of slacks for the constaints
        nSlacks = nlp.nlowerC + nlp.nupperC + nlp.nrangeC
        self.nSlacks = nSlacks

        # Update effective number of variables and constraints
        n = self.original_n + nSlacks
        m = self.original_m + nlp.nrangeC

        Lvar = -np.infty * np.ones(n)
        Uvar = +np.infty * np.ones(n)
        # Copy orignal bounds
        Lvar[:self.original_n] = nlp.Lvar
        Uvar[:self.original_n] = nlp.Uvar

        # Add bounds corresponding to lower constraints
        bot = self.original_n
        self.sL = range(bot, bot + nlp.nlowerC)
        Lvar[bot:bot+nlp.nlowerC] = nlp.Lcon[nlp.lowerC]

        # Add bounds corresponding to upper constraints
        bot += nlp.nlowerC
        self.sU = range(bot, bot + nlp.nupperC)
        Uvar[bot:bot+nlp.nupperC] = nlp.Ucon[nlp.upperC]

        # Add bounds corresponding to range constraints
        bot += nlp.nupperC
        self.sR = range(bot, bot + nlp.nrangeC)
        Lvar[bot:bot+nlp.nrangeC] = nlp.Lcon[nlp.rangeC]
        Uvar[bot:bot+nlp.nrangeC] = nlp.Ucon[nlp.rangeC]

        # No more inequalities. All constraints are now equal to 0
        Lcon = Ucon = np.zeros(m)

        NLPModel.__init__(self, n=n, m=m, name='Slack-'+nlp.name, Lvar=Lvar, \
                          Uvar=Uvar, Lcon=Lcon, Ucon=Ucon)

        # Redefine primal and dual initial guesses
        self.original_x0 = nlp.x0[:]
        self.x0 = np.zeros(self.n)
        self.x0[:self.original_n] = self.original_x0[:]

        self.original_pi0 = nlp.pi0[:]
        self.pi0 = np.zeros(self.m)
        self.pi0[:self.original_m] = self.original_pi0[:]

        return
Exemple #5
0
    def __init__(self, model, **kwargs):

        NLPModel.__init__(self, n=model.n, m=model.m, Lcon=model.Lcon, \
                          Ucon=model.Ucon, Lvar=model.Lvar, Uvar=model.Uvar,
                          name= model.name, **kwargs)
        
        self.model = model
        # Save number of variables and constraints prior to transformation
        self.original_n = self.n
        self.original_m = self.m
        self.original_nbounds = self.nbounds
        
        
        # Number of slacks for inequality constraints with a lower bound
        n_con_low = self.nlowerC + self.nrangeC ; self.n_con_low = n_con_low

        # Number of slacks for inequality constraints with an upper bound
        n_con_up = self.nupperC + self.nrangeC ; self.n_con_up = n_con_up

        # Number of slacks for variables with a lower bound
        n_var_low = self.nlowerB + self.nrangeB ; self.n_var_low = n_var_low

        # Number of slacks for variables with an upper bound
        n_var_up = self.nupperB + self.nrangeB ; self.n_var_up = n_var_up

        # Update effective number of variables and constraints
        self.n  = self.original_n + n_con_low + n_con_up + n_var_low + n_var_up
        self.m  = self.original_m + self.nrangeC + n_var_low + n_var_up

        # Redefine primal and dual initial guesses
        self.original_x0 = self.x0[:]
        self.x0 = numpy.zeros(self.n)
        self.x0[:self.original_n] = self.original_x0[:]

        self.original_pi0 = self.pi0[:]
        self.pi0 = numpy.zeros(self.m)
        self.pi0[:self.original_m] = self.original_pi0[:]
        return
Exemple #6
0
	# J is square and symmetric for this problem, so ...
	return jprod(x,w)

# end def 

# =============================================================================
# Main program
# =============================================================================

n = 5
x0 = 3*numpy.ones(n,'d')
pi0 = numpy.zeros(n,'d')
Lvar = 1.e-4*numpy.ones(n,'d')
Ucon = numpy.ones(n,'d')

testmodel = NLPModel(n=n,m=n,name='Scalable Test 1',x0=x0,Lvar=Lvar,Ucon=Ucon)
testmodel.obj = obj
testmodel.cons = cons
testmodel.grad = grad
testmodel.jac = jac

testMFmodel = MFModel(n=n,m=n,name='Matrix-Free Scalable Test 1',x0=x0,
	Lvar=Lvar,Ucon=Ucon)
testMFmodel.obj = obj
testMFmodel.cons = cons
testMFmodel.grad = grad
testMFmodel.jprod = jprod
testMFmodel.jtprod = jtprod

# Test vectors for checking Jacobian vector product
vec1 = numpy.ones(n)