Exemple #1
0
# evaluate residual of inequality constraints
res_ineq = nlp.evaluate_ineq_constraints()

# demonstrate the use of compression from full set of
# lower and upper bounds on the inequality constraints
# to only the finite values using masks
ineqlb_mask = build_bounds_mask(nlp.ineq_lb())
inequb_mask = build_bounds_mask(nlp.ineq_ub())
# get the compressed vector
compressed_ineq_lb = full_to_compressed(nlp.ineq_lb(), ineqlb_mask)
compressed_ineq_ub = full_to_compressed(nlp.ineq_ub(), inequb_mask)
# we can also build compression matrices
Cineq_ineqlb = build_compression_matrix(ineqlb_mask)
Cineq_inequb = build_compression_matrix(inequb_mask)

# lower and upper inequalities residual
res_ineq_lb = Cineq_ineqlb * res_ineq - compressed_ineq_lb
res_ineq_ub = compressed_ineq_ub - Cineq_inequb*res_ineq
print("Residuals of inequality constraints lower bounds:", res_ineq_lb)
print("Residuals of inequality constraints upper bounds:", res_ineq_ub)

feasible = False
if np.all(res_xl >= 0) and np.all(res_xu >= 0) \
    and np.all(res_ineq_lb >= 0) and np.all(res_ineq_ub >= 0) and \
    np.allclose(res_eq, np.zeros(nlp.n_eq_constraints()), atol=1e-5):
    feasible = True

print("Is x0 feasible:", feasible)

Exemple #2
0
def main():
    model = create_basic_model()
    solver = pyo.SolverFactory('ipopt')
    solver.solve(model, tee=True)

    # build nlp initialized at the solution
    nlp = PyomoNLP(model)

    # get initial point
    print(nlp.primals_names())
    x0 = nlp.get_primals()

    # vectors of lower and upper bounds
    xl = nlp.primals_lb()
    xu = nlp.primals_ub()

    # demonstrate use of compression from full set of bounds
    # to only finite bounds using masks
    xlb_mask = build_bounds_mask(xl)
    xub_mask = build_bounds_mask(xu)
    # get the compressed vector
    compressed_xl = full_to_compressed(xl, xlb_mask)
    compressed_xu = full_to_compressed(xu, xub_mask)
    # we can also build compression matrices
    Cx_xl = build_compression_matrix(xlb_mask)
    Cx_xu = build_compression_matrix(xub_mask)

    # lower and upper bounds residual
    res_xl = Cx_xl * x0 - compressed_xl
    res_xu = compressed_xu - Cx_xu * x0
    print("Residuals lower bounds x-xl:", res_xl)
    print("Residuals upper bounds xu-x:", res_xu)

    # set the value of the primals (we can skip the duals)
    # here we set them to the initial values, but we could
    # set them to anything
    nlp.set_primals(x0)

    # evaluate residual of equality constraints
    print(nlp.constraint_names())
    res_eq = nlp.evaluate_eq_constraints()
    print("Residuals of equality constraints:", res_eq)

    # evaluate residual of inequality constraints
    res_ineq = nlp.evaluate_ineq_constraints()

    # demonstrate the use of compression from full set of
    # lower and upper bounds on the inequality constraints
    # to only the finite values using masks
    ineqlb_mask = build_bounds_mask(nlp.ineq_lb())
    inequb_mask = build_bounds_mask(nlp.ineq_ub())
    # get the compressed vector
    compressed_ineq_lb = full_to_compressed(nlp.ineq_lb(), ineqlb_mask)
    compressed_ineq_ub = full_to_compressed(nlp.ineq_ub(), inequb_mask)
    # we can also build compression matrices
    Cineq_ineqlb = build_compression_matrix(ineqlb_mask)
    Cineq_inequb = build_compression_matrix(inequb_mask)

    # lower and upper inequalities residual
    res_ineq_lb = Cineq_ineqlb * res_ineq - compressed_ineq_lb
    res_ineq_ub = compressed_ineq_ub - Cineq_inequb * res_ineq
    print("Residuals of inequality constraints lower bounds:", res_ineq_lb)
    print("Residuals of inequality constraints upper bounds:", res_ineq_ub)

    feasible = False
    if np.all(res_xl >= 0) and np.all(res_xu >= 0) \
        and np.all(res_ineq_lb >= 0) and np.all(res_ineq_ub >= 0) and \
        np.allclose(res_eq, np.zeros(nlp.n_eq_constraints()), atol=1e-5):
        feasible = True

    print("Is x0 feasible:", feasible)

    return feasible
                                      ncp=1,
                                      contset=instance.t)

# Interface pyomo model with nlp
nlp = PyomoNLP(instance)
print(nlp.variable_names())

x = nlp.create_new_vector('primals')
x.fill(1.0)
nlp.set_primals(x)

lam = nlp.create_new_vector('duals')
lam.fill(1.0)
nlp.set_duals(lam)

nlp.n_constraints(), nlp.n_eq_constraints(), nlp.n_ineq_constraints()

# Evaluate jacobian
jac = nlp.evaluate_jacobian()
plt.spy(jac)
plt.title('Jacobian of the constraints\n')
plt.show()

# Evaluate hessian of the lagrangian
hess_lag = nlp.evaluate_hessian_lag()
plt.spy(hess_lag)
plt.title('Hessian of the Lagrangian function\n')
plt.show()

# Build KKT matrix
kkt = BlockSymMatrix(2)
Exemple #4
0
instance_factory = ScenarioTreeInstanceFactory(pysp_instance_creation_callback,
                                               nx_scenario_tree)
options = ScenarioTreeManagerFactory.register_options()
options.scenario_tree_manager = 'serial'
sp = ScenarioTreeManagerFactory(options, factory=instance_factory)
sp.initialize()

instance = create_ef_instance(sp.scenario_tree)

#instance = create_model(1.0)
nlp = PyomoNLP(instance)
print("\n----------------------")
print("Problem statistics:")
print("----------------------")
print("Number of variables: {:>25d}".format(nlp.n_primals()))
print("Number of equality constraints: {:>14d}".format(nlp.n_eq_constraints()))
print("Number of inequality constraints: {:>11d}".format(
    nlp.n_ineq_constraints()))
print("Total number of constraints: {:>17d}".format(nlp.n_constraints()))
print("Number of nnz in Jacobian: {:>20d}".format(nlp.nnz_jacobian()))
print("Number of nnz in hessian of Lagrange: {:>8d}".format(
    nlp.nnz_hessian_lag()))

x = nlp.init_primals().copy()
y = nlp.create_new_vector('duals')
y.fill(1.0)
nlp.set_primals(x)
nlp.set_duals(y)

# Evaluate jacobian of all constraints
jac_full = nlp.evaluate_jacobian()