예제 #1
0
파일: derivatives.py 프로젝트: CanLi1/pyomo
instance = create_problem(0.0, 10.0)
# Discretize model using Orthogonal Collocation
discretizer = aml.TransformationFactory('dae.collocation')
discretizer.apply_to(instance, nfe=100, ncp=3, scheme='LAGRANGE-RADAU')
discretizer.reduce_collocation_points(instance,
                                      var=instance.u,
                                      ncp=1,
                                      contset=instance.t)

# Interface pyomo model with nlp
nlp = PyomoNLP(instance)
x = nlp.create_vector_x()
lam = nlp.create_vector_y()

# Evaluate jacobian
jac_c = nlp.jacobian_g(x)
plt.spy(jac_c)
plt.title('Jacobian of the constraints\n')
plt.show()

# Evaluate hessian of the lagrangian
hess_lag = nlp.hessian_lag(x, lam)
plt.spy(hess_lag)
plt.title('Hessian of the Lagrangian function\n')
plt.show()

# Build KKT matrix
kkt = BlockSymMatrix(2)
kkt[0, 0] = hess_lag
kkt[1, 0] = jac_c
plt.spy(kkt.tocoo())
예제 #2
0
    model.consteta1 = aml.Constraint(expr=model.eta1 == model.nominal_eta1)
    model.consteta2 = aml.Constraint(expr=model.eta2 == model.nominal_eta2)

    return model

#################################################################
m = create_model(4.5, 1.0)
opt = aml.SolverFactory('ipopt')
results = opt.solve(m, tee=True)

#################################################################
nlp = PyomoNLP(m)
x = nlp.x_init()
y = compute_init_lam(nlp, x=x)

J = nlp.jacobian_g(x)
H = nlp.hessian_lag(x, y)

M = BlockSymMatrix(2)
M[0, 0] = H
M[1, 0] = J

Np = BlockMatrix(2, 1)
Np[0, 0] = nlp.Hessian_lag(x, y, variables_cols=[m.eta1, m.eta2])
Np[1, 0] = nlp.Jacobian_g(x, variables=[m.eta1, m.eta2])

M_array = M.toarray()
Np_array = Np.toarray()

ds = np.linalg.solve(M_array, Np_array)
예제 #3
0
파일: sensitivity.py 프로젝트: CanLi1/pyomo
    model.consteta2 = aml.Constraint(expr=model.eta2 == model.nominal_eta2)

    return model


#################################################################
m = create_model(4.5, 1.0)
opt = aml.SolverFactory('ipopt')
results = opt.solve(m, tee=True)

#################################################################
nlp = PyomoNLP(m)
x = nlp.x_init()
y = compute_init_lam(nlp, x=x)

J = nlp.jacobian_g(x)
H = nlp.hessian_lag(x, y)

M = BlockSymMatrix(2)
M[0, 0] = H
M[1, 0] = J

Np = BlockMatrix(2, 1)
Np[0, 0] = nlp.hessian_lag(x, y, subset_variables_col=[m.eta1, m.eta2])
Np[1, 0] = nlp.jacobian_g(x, subset_variables=[m.eta1, m.eta2])

ds = spsolve(M.tocsc(), Np.tocsc())
print(nlp.variable_order())

#################################################################
예제 #4
0
파일: derivatives.py 프로젝트: Pyomo/pyomo
    m.init_condition_names = ['init_conditions']
    return m

instance = create_problem(0.0, 10.0)
# Discretize model using Orthogonal Collocation
discretizer = aml.TransformationFactory('dae.collocation')
discretizer.apply_to(instance, nfe=100, ncp=3, scheme='LAGRANGE-RADAU')
discretizer.reduce_collocation_points(instance, var=instance.u, ncp=1, contset=instance.t)

# Interface pyomo model with nlp
nlp = PyomoNLP(instance)
x = nlp.create_vector_x()
lam = nlp.create_vector_y()

# Evaluate jacobian
jac_c = nlp.jacobian_g(x)
plt.spy(jac_c)
plt.title('Jacobian of the constraints\n')
plt.show()

# Evaluate hessian of the lagrangian
hess_lag = nlp.hessian_lag(x, lam)
plt.spy(hess_lag)
plt.title('Hessian of the Lagrangian function\n')
plt.show()

# Build KKT matrix
kkt = BlockSymMatrix(2)
kkt[0, 0] = hess_lag
kkt[1, 0] = jac_c
plt.spy(kkt.tocoo())