コード例 #1
0
ファイル: sensitivity.py プロジェクト: jialuw96/pyomo
def main():
    m = create_model(4.5, 1.0)
    opt = pyo.SolverFactory('ipopt')
    results = opt.solve(m, tee=True)

    nlp = PyomoNLP(m)
    x = nlp.init_primals()
    y = compute_init_lam(nlp, x=x)
    nlp.set_primals(x)
    nlp.set_duals(y)

    J = nlp.extract_submatrix_jacobian(pyomo_variables=[m.x1, m.x2, m.x3],
                                       pyomo_constraints=[m.const1, m.const2])
    H = nlp.extract_submatrix_hessian_lag(
        pyomo_variables_rows=[m.x1, m.x2, m.x3],
        pyomo_variables_cols=[m.x1, m.x2, m.x3])

    M = BlockMatrix(2, 2)
    M.set_block(0, 0, H)
    M.set_block(1, 0, J)
    M.set_block(0, 1, J.transpose())

    Np = BlockMatrix(2, 1)
    Np.set_block(
        0, 0,
        nlp.extract_submatrix_hessian_lag(
            pyomo_variables_rows=[m.x1, m.x2, m.x3],
            pyomo_variables_cols=[m.eta1, m.eta2]))
    Np.set_block(
        1, 0,
        nlp.extract_submatrix_jacobian(pyomo_variables=[m.eta1, m.eta2],
                                       pyomo_constraints=[m.const1, m.const2]))

    ds = spsolve(M.tocsc(), -Np.tocsc())

    print("ds:\n", ds.todense())
    #################################################################

    p0 = np.array([pyo.value(m.nominal_eta1), pyo.value(m.nominal_eta2)])
    p = np.array([4.45, 1.05])
    dp = p - p0
    dx = ds.dot(dp)[0:3]
    x_indices = nlp.get_primal_indices([m.x1, m.x2, m.x3])
    x_names = np.array(nlp.primals_names())
    new_x_sens = x[x_indices] + dx
    print("dp:", dp)
    print("dx:", dx)
    print("Variable names: \n", x_names[x_indices])
    print("Sensitivity based x:\n", new_x_sens)

    #################################################################
    m = create_model(4.45, 1.05)
    opt = pyo.SolverFactory('ipopt')
    results = opt.solve(m, tee=False)
    nlp = PyomoNLP(m)
    new_x = nlp.init_primals()[nlp.get_primal_indices([m.x1, m.x2, m.x3])]
    print("NLP based x:\n", new_x)

    return new_x_sens, new_x
コード例 #2
0
def get_hessian_of_constraint(constraint, wrt1=None, wrt2=None, nlp=None):
    constraints = [constraint]
    if wrt1 is None and wrt2 is None:
        variables = list(
            identify_variables(constraint.expr, include_fixed=False))
        wrt1 = variables
        wrt2 = variables
    elif wrt1 is not None and wrt2 is not None:
        variables = wrt1 + wrt2
    elif wrt1 is not None:  # but wrt2 is None
        wrt2 = wrt1
        variables = wrt1
    else:
        # wrt2 is not None and wrt1 is None
        wrt1 = wrt2
        variables = wrt1

    if nlp is None:
        block = create_subsystem_block(constraints, variables=variables)
        # Could fix input_vars so I don't evaluate the Hessian with respect
        # to variables I don't care about...

        # HUGE HACK: Variables not included in a constraint are not written
        # to the nl file, so we cannot take the derivative with respect to
        # them, even though we know this derivative is zero. To work around,
        # we make sure all variables appear on the block in the form of a
        # dummy constraint. Then we can take derivatives of any constraint
        # with respect to them. Conveniently, the extract_submatrix_
        # call deals with extracting the variables and constraint we care
        # about, in the proper order.
        block._dummy_var = Var()
        block._dummy_con = Constraint(expr=sum(variables) == block._dummy_var)
        block._obj = Objective(expr=0.0)
        nlp = PyomoNLP(block)

    saved_duals = nlp.get_duals()
    saved_obj_factor = nlp.get_obj_factor()
    temp_duals = np.zeros(len(saved_duals))

    # NOTE: This makes some assumption about how the Lagrangian is constructed.
    # TODO: Define the convention we assume and convert if necessary.
    idx = nlp.get_constraint_indices(constraints)[0]
    temp_duals[idx] = 1.0
    nlp.set_duals(temp_duals)
    nlp.set_obj_factor(0.0)

    # NOTE: The returned matrix preserves explicit zeros. I.e. it contains
    # coordinates for every entry that could possibly be nonzero.
    submatrix = nlp.extract_submatrix_hessian_lag(wrt1, wrt2)

    nlp.set_obj_factor(saved_obj_factor)
    nlp.set_duals(saved_duals)
    return submatrix
コード例 #3
0
    def calculate_duals(self):
        """Get duals of the current model

        :return: self.duals
        :rtype: dict
        """

        # For testing - very slow and should not be used!
        if self.kkt_method == 'pynumero':

            nlp = PyomoNLP(self.model_object)
            varList = nlp.get_pyomo_variables()
            conList = nlp.get_pyomo_constraints()
            duals = nlp.get_duals()

            J = nlp.extract_submatrix_jacobian(pyomo_variables=varList,
                                               pyomo_constraints=conList)
            H = nlp.extract_submatrix_hessian_lag(pyomo_variables_rows=varList,
                                                  pyomo_variables_cols=varList)
            J = csc_matrix(J)

            var_index_names = [v.name for v in varList]
            con_index_names = [v.name for v in conList]

            dummy_constraints = [
                f'{self.global_constraint_name}[{k}]'
                for k in self.parameter_set
            ]
            jac_row_ind = [con_index_names.index(d) for d in dummy_constraints]
            duals_imp = [duals[i] for i in jac_row_ind]

            self.duals = dict(zip(self.parameter_set, duals_imp))
            if self.verbose:
                print(f'The pynumero results are:')
                print(self.duals)

        else:

            self.duals = {
                key: self.model_object.dual[getattr(
                    self.model_object, self.global_constraint_name)[key]]
                for key, val in getattr(self.model_object,
                                        self.global_param_name).items()
            }

            if self.verbose:
                print('The duals are:')
                print(self.duals)

        self.delete_sol_files()

        return self.duals
コード例 #4
0
ファイル: EstimationPotential.py プロジェクト: Ishanki/kipet
    def _get_kkt_info(self, model):
        """Takes the model and uses PyNumero to get the jacobian and Hessian
        information as dataframes
        
        Args:
            model (pyomo ConcreteModel): A pyomo model instance of the current
            problem (used in calculating the reduced Hessian)
    
        Returns:
            
            KKT (pd.DataFrame): the KKT matrix as a dataframe
            
            H_df (pd.DataFrame): the Hessian as a dataframe
            
            J_df (pd.DataFrame): the jacobian as a dataframe
            
            var_index_names (list): the index of variables
            
            con_index_names (list): the index of constraints
            
        """
        nlp = PyomoNLP(model)
        varList = nlp.get_pyomo_variables()
        conList = nlp.get_pyomo_constraints()

        J = nlp.extract_submatrix_jacobian(pyomo_variables=varList,
                                           pyomo_constraints=conList)
        H = nlp.extract_submatrix_hessian_lag(pyomo_variables_rows=varList,
                                              pyomo_variables_cols=varList)

        var_index_names = [v.name for v in varList]
        con_index_names = [v.name for v in conList]

        J_df = pd.DataFrame(J.todense(),
                            columns=var_index_names,
                            index=con_index_names)
        H_df = pd.DataFrame(H.todense(),
                            columns=var_index_names,
                            index=var_index_names)

        var_index_names = pd.DataFrame(var_index_names)

        KKT_up = pd.merge(H_df,
                          J_df.transpose(),
                          left_index=True,
                          right_index=True)
        KKT = pd.concat((KKT_up, J_df))
        KKT = KKT.fillna(0)

        return KKT, H_df, J_df, var_index_names, con_index_names
コード例 #5
0
def _get_kkt_matrix(model):
    """This uses pynumero to get the Hessian and Jacobian in order to build the
    KKT matrix
    
    Args:
        model (pyomo ConcreteModel): the current model used in the inner 
            problem optimization
            
    Returns:
        KKT (pandas.DataFrame): KKT matrix for the current iteration
        
        var_index_names (list): list of variable names
        
        con_index_names (list): list of constraint names
    
    """
    nlp = PyomoNLP(model)
    varList = nlp.get_pyomo_variables()
    conList = nlp.get_pyomo_constraints()
    duals = nlp.get_duals()
    
    J = nlp.extract_submatrix_jacobian(pyomo_variables=varList, pyomo_constraints=conList)
    H = nlp.extract_submatrix_hessian_lag(pyomo_variables_rows=varList, pyomo_variables_cols=varList)
    
    var_index_names = [v.name for v in varList]
    con_index_names = [v.name for v in conList]

    J_df = pd.DataFrame(J.todense(), columns=var_index_names, index=con_index_names)
    H_df = pd.DataFrame(H.todense(), columns=var_index_names, index=var_index_names)
    
    var_index_names = pd.DataFrame(var_index_names)
    
    KKT_up = pd.merge(H_df, J_df.transpose(), left_index=True, right_index=True)
    KKT = pd.concat((KKT_up, J_df))
    KKT = KKT.fillna(0)
    
    return KKT, var_index_names, con_index_names
コード例 #6
0
nlp = PyomoNLP(m)
x = nlp.init_primals()
y = compute_init_lam(nlp, x=x)
nlp.set_primals(x)
nlp.set_duals(y)

J = nlp.evaluate_jacobian()
H = nlp.evaluate_hessian_lag()

M = BlockSymMatrix(2)
M[0, 0] = H
M[1, 0] = J

Np = BlockMatrix(2, 1)
Np[0, 0] = nlp.extract_submatrix_hessian_lag(
    pyomo_variables_rows=nlp.get_pyomo_variables(),
    pyomo_variables_cols=[m.eta1, m.eta2])
Np[1, 0] = nlp.extract_submatrix_jacobian(
    pyomo_variables=[m.eta1, m.eta2],
    pyomo_constraints=nlp.get_pyomo_constraints())

ds = spsolve(M.tocsc(), Np.tocsc())
print(nlp.variable_names())

#################################################################

p0 = np.array([pyo.value(m.nominal_eta1), pyo.value(m.nominal_eta2)])
p = np.array([4.45, 1.05])
dp = p - p0
dx = ds.dot(dp)[0:nlp.n_primals()]
new_x = x + dx
コード例 #7
0
    def get_kkt_info(self):
        """Takes the model and uses PyNumero to get the jacobian and Hessian
        information as dataframes
        
        Args:
            model_object (pyomo ConcreteModel): A pyomo model instance of the current
                problem (used in calculating the reduced Hessian)
    
            method (str): defaults to k_aug, method by which to obtain optimization
                results
    
        Returns:
            
            kkt_data (dict): dictionary with the following structure:
                
                    {
                    'J': J,   # Jacobian
                    'H': H,   # Hessian
                    'var_ind': var_index_names, # Variable index
                    'con_ind': con_index_names, # Constraint index
                    'duals': duals, # Duals
                    }
            
        """
        self.get_file_info()

        if self.kkt_method == 'pynumero':

            nlp = PyomoNLP(self.model_object)
            varList = nlp.get_pyomo_variables()
            conList = nlp.get_pyomo_constraints()
            duals = nlp.get_duals()

            J = nlp.extract_submatrix_jacobian(pyomo_variables=varList,
                                               pyomo_constraints=conList)
            H = nlp.extract_submatrix_hessian_lag(pyomo_variables_rows=varList,
                                                  pyomo_variables_cols=varList)
            J = csc_matrix(J)

            var_index_names = [v.name for v in varList]
            con_index_names = [v.name for v in conList]

        elif self.kkt_method == 'k_aug':

            kaug = SolverFactory('k_aug')

            kaug.options["deb_kkt"] = ""
            kaug.solve(self.model_object, tee=False)

            hess = pd.read_csv('hess_debug.in',
                               delim_whitespace=True,
                               header=None,
                               skipinitialspace=True)
            hess.columns = ['irow', 'jcol', 'vals']
            hess.irow -= 1
            hess.jcol -= 1
            os.unlink('hess_debug.in')

            jac = pd.read_csv('jacobi_debug.in',
                              delim_whitespace=True,
                              header=None,
                              skipinitialspace=True)
            m = jac.iloc[0, 0]
            n = jac.iloc[0, 1]
            jac.drop(index=[0], inplace=True)
            jac.columns = ['irow', 'jcol', 'vals']
            jac.irow -= 1
            jac.jcol -= 1
            os.unlink('jacobi_debug.in')

            #try:
            #    duals = read_duals(stub + '.sol')
            #except:
            duals = None

            J = coo_matrix((jac.vals, (jac.irow, jac.jcol)), shape=(m, n))
            Hess_coo = coo_matrix((hess.vals, (hess.irow, hess.jcol)),
                                  shape=(n, n))
            H = Hess_coo + triu(Hess_coo, 1).T

            var_index_names = pd.read_csv(self.sol_files['col'],
                                          sep=';',
                                          header=None)  # dummy sep
            con_index_names = pd.read_csv(self.sol_files['row'],
                                          sep=';',
                                          header=None)  # dummy sep

            var_index_names = [var_name for var_name in var_index_names[0]]
            con_index_names = [
                con_name for con_name in con_index_names[0].iloc[:-1]
            ]
            con_index_number = {v: k for k, v in enumerate(con_index_names)}

        self.delete_sol_files()

        self.kkt_data = {
            'J': J,
            'H': H,
            'var_ind': var_index_names,
            'con_ind': con_index_names,
            'duals': duals,
        }

        return None
コード例 #8
0
    def test_indices_methods(self):
        nlp = PyomoNLP(self.pm)

        # get_pyomo_variables
        variables = nlp.get_pyomo_variables()
        expected_ids = [id(self.pm.x[i]) for i in range(1, 10)]
        ids = [id(variables[i]) for i in range(9)]
        self.assertTrue(expected_ids == ids)

        variable_names = nlp.variable_names()
        expected_names = [self.pm.x[i].getname() for i in range(1, 10)]
        self.assertTrue(variable_names == expected_names)

        # get_pyomo_constraints
        constraints = nlp.get_pyomo_constraints()
        expected_ids = [id(self.pm.c[i]) for i in range(1, 10)]
        ids = [id(constraints[i]) for i in range(9)]
        self.assertTrue(expected_ids == ids)

        constraint_names = nlp.constraint_names()
        expected_names = [c.getname() for c in nlp.get_pyomo_constraints()]
        self.assertTrue(constraint_names == expected_names)

        # get_pyomo_equality_constraints
        eq_constraints = nlp.get_pyomo_equality_constraints()
        # 2 and 6 are the equality constraints
        eq_indices = [2, 6]  # "indices" here is a bit overloaded
        expected_eq_ids = [id(self.pm.c[i]) for i in eq_indices]
        eq_ids = [id(con) for con in eq_constraints]
        self.assertEqual(eq_ids, expected_eq_ids)

        eq_constraint_names = nlp.equality_constraint_names()
        expected_eq_names = [
            c.getname(fully_qualified=True)
            for c in nlp.get_pyomo_equality_constraints()
        ]
        self.assertEqual(eq_constraint_names, expected_eq_names)

        # get_pyomo_inequality_constraints
        ineq_constraints = nlp.get_pyomo_inequality_constraints()
        # 1, 3, 4, 5, 7, 8, and 9 are the inequality constraints
        ineq_indices = [1, 3, 4, 5, 7, 8, 9]
        expected_ineq_ids = [id(self.pm.c[i]) for i in ineq_indices]
        ineq_ids = [id(con) for con in ineq_constraints]
        self.assertEqual(eq_ids, expected_eq_ids)

        # get_primal_indices
        expected_primal_indices = [i for i in range(9)]
        self.assertTrue(
            expected_primal_indices == nlp.get_primal_indices([self.pm.x]))
        expected_primal_indices = [0, 3, 8, 4]
        variables = [self.pm.x[1], self.pm.x[4], self.pm.x[9], self.pm.x[5]]
        self.assertTrue(
            expected_primal_indices == nlp.get_primal_indices(variables))

        # get_constraint_indices
        expected_constraint_indices = [i for i in range(9)]
        self.assertTrue(expected_constraint_indices ==
                        nlp.get_constraint_indices([self.pm.c]))
        expected_constraint_indices = [0, 3, 8, 4]
        constraints = [self.pm.c[1], self.pm.c[4], self.pm.c[9], self.pm.c[5]]
        self.assertTrue(expected_constraint_indices ==
                        nlp.get_constraint_indices(constraints))

        # get_equality_constraint_indices
        pyomo_eq_indices = [2, 6]
        with self.assertRaises(KeyError):
            # At least one data object in container is not an equality
            nlp.get_equality_constraint_indices([self.pm.c])
        eq_constraints = [self.pm.c[i] for i in pyomo_eq_indices]
        expected_eq_indices = [0, 1]
        # ^indices in the list of equality constraints
        eq_constraint_indices = nlp.get_equality_constraint_indices(
            eq_constraints)
        self.assertEqual(expected_eq_indices, eq_constraint_indices)

        # get_inequality_constraint_indices
        pyomo_ineq_indices = [1, 3, 4, 5, 7, 9]
        with self.assertRaises(KeyError):
            # At least one data object in container is not an equality
            nlp.get_inequality_constraint_indices([self.pm.c])
        ineq_constraints = [self.pm.c[i] for i in pyomo_ineq_indices]
        expected_ineq_indices = [0, 1, 2, 3, 4, 6]
        # ^indices in the list of equality constraints; didn't include 8
        ineq_constraint_indices = nlp.get_inequality_constraint_indices(
            ineq_constraints)
        self.assertEqual(expected_ineq_indices, ineq_constraint_indices)

        # extract_subvector_grad_objective
        expected_gradient = np.asarray(
            [2 * sum((i + 1) * (j + 1) for j in range(9)) for i in range(9)],
            dtype=np.float64)
        grad_obj = nlp.extract_subvector_grad_objective([self.pm.x])
        self.assertTrue(np.array_equal(expected_gradient, grad_obj))

        expected_gradient = np.asarray([
            2 * sum((i + 1) * (j + 1) for j in range(9)) for i in [0, 3, 8, 4]
        ],
                                       dtype=np.float64)
        variables = [self.pm.x[1], self.pm.x[4], self.pm.x[9], self.pm.x[5]]
        grad_obj = nlp.extract_subvector_grad_objective(variables)
        self.assertTrue(np.array_equal(expected_gradient, grad_obj))

        # extract_subvector_constraints
        expected_con = np.asarray(
            [45, 88, 3 * 45, 4 * 45, 5 * 45, 276, 7 * 45, 8 * 45, 9 * 45],
            dtype=np.float64)
        con = nlp.extract_subvector_constraints([self.pm.c])
        self.assertTrue(np.array_equal(expected_con, con))

        expected_con = np.asarray([45, 4 * 45, 9 * 45, 5 * 45],
                                  dtype=np.float64)
        constraints = [self.pm.c[1], self.pm.c[4], self.pm.c[9], self.pm.c[5]]
        con = nlp.extract_subvector_constraints(constraints)
        self.assertTrue(np.array_equal(expected_con, con))

        # extract_submatrix_jacobian
        expected_jac = [[(i) * (j) for j in range(1, 10)]
                        for i in range(1, 10)]
        expected_jac = np.asarray(expected_jac, dtype=np.float64)
        jac = nlp.extract_submatrix_jacobian(pyomo_variables=[self.pm.x],
                                             pyomo_constraints=[self.pm.c])
        dense_jac = jac.todense()
        self.assertTrue(np.array_equal(dense_jac, expected_jac))

        expected_jac = [[(i) * (j) for j in [1, 4, 9, 5]] for i in [2, 6, 4]]
        expected_jac = np.asarray(expected_jac, dtype=np.float64)
        variables = [self.pm.x[1], self.pm.x[4], self.pm.x[9], self.pm.x[5]]
        constraints = [self.pm.c[2], self.pm.c[6], self.pm.c[4]]
        jac = nlp.extract_submatrix_jacobian(pyomo_variables=variables,
                                             pyomo_constraints=constraints)
        dense_jac = jac.todense()
        self.assertTrue(np.array_equal(dense_jac, expected_jac))

        # extract_submatrix_hessian_lag
        expected_hess = [[2.0 * i * j for j in range(1, 10)]
                         for i in range(1, 10)]
        expected_hess = np.asarray(expected_hess, dtype=np.float64)
        hess = nlp.extract_submatrix_hessian_lag(
            pyomo_variables_rows=[self.pm.x], pyomo_variables_cols=[self.pm.x])
        dense_hess = hess.todense()
        self.assertTrue(np.array_equal(dense_hess, expected_hess))

        expected_hess = [[2.0 * i * j for j in [1, 4, 9, 5]]
                         for i in [1, 4, 9, 5]]
        expected_hess = np.asarray(expected_hess, dtype=np.float64)
        variables = [self.pm.x[1], self.pm.x[4], self.pm.x[9], self.pm.x[5]]
        hess = nlp.extract_submatrix_hessian_lag(
            pyomo_variables_rows=variables, pyomo_variables_cols=variables)
        dense_hess = hess.todense()
        self.assertTrue(np.array_equal(dense_hess, expected_hess))
コード例 #9
0
ファイル: sensitivity.py プロジェクト: vova292/pyomo
#################################################################
m = create_model(4.5, 1.0)
opt = pyo.SolverFactory('ipopt')
results = opt.solve(m, tee=True)

#################################################################
nlp = PyomoNLP(m)
x = nlp.init_primals()
y = compute_init_lam(nlp, x=x)
nlp.set_primals(x)
nlp.set_duals(y)

J = nlp.extract_submatrix_jacobian(pyomo_variables=[m.x1, m.x2, m.x3],
                                   pyomo_constraints=[m.const1, m.const2])
H = nlp.extract_submatrix_hessian_lag(pyomo_variables_rows=[m.x1, m.x2, m.x3],
                                      pyomo_variables_cols=[m.x1, m.x2, m.x3])

M = BlockMatrix(2, 2)
M.set_block(0, 0, H)
M.set_block(1, 0, J)
M.set_block(0, 1, J.transpose())

Np = BlockMatrix(2, 1)
Np.set_block(
    0, 0,
    nlp.extract_submatrix_hessian_lag(pyomo_variables_rows=[m.x1, m.x2, m.x3],
                                      pyomo_variables_cols=[m.eta1, m.eta2]))
Np.set_block(
    1, 0,
    nlp.extract_submatrix_jacobian(pyomo_variables=[m.eta1, m.eta2],
                                   pyomo_constraints=[m.const1, m.const2]))
コード例 #10
0
    def test_indices_methods(self):
        nlp = PyomoNLP(self.pm)

        # get_pyomo_variables
        variables = nlp.get_pyomo_variables()
        expected_ids = [id(self.pm.x[i]) for i in range(1, 10)]
        ids = [id(variables[i]) for i in range(9)]
        self.assertTrue(expected_ids == ids)

        variable_names = nlp.variable_names()
        expected_names = [self.pm.x[i].getname() for i in range(1, 10)]
        self.assertTrue(variable_names == expected_names)

        # get_pyomo_constraints
        constraints = nlp.get_pyomo_constraints()
        expected_ids = [id(self.pm.c[i]) for i in range(1, 10)]
        ids = [id(constraints[i]) for i in range(9)]
        self.assertTrue(expected_ids == ids)

        constraint_names = nlp.constraint_names()
        expected_names = [c.getname() for c in nlp.get_pyomo_constraints()]
        self.assertTrue(constraint_names == expected_names)

        # get_primal_indices
        expected_primal_indices = [i for i in range(9)]
        self.assertTrue(
            expected_primal_indices == nlp.get_primal_indices([self.pm.x]))
        expected_primal_indices = [0, 3, 8, 4]
        variables = [self.pm.x[1], self.pm.x[4], self.pm.x[9], self.pm.x[5]]
        self.assertTrue(
            expected_primal_indices == nlp.get_primal_indices(variables))

        # get_constraint_indices
        expected_constraint_indices = [i for i in range(9)]
        self.assertTrue(expected_constraint_indices ==
                        nlp.get_constraint_indices([self.pm.c]))
        expected_constraint_indices = [0, 3, 8, 4]
        constraints = [self.pm.c[1], self.pm.c[4], self.pm.c[9], self.pm.c[5]]
        self.assertTrue(expected_constraint_indices ==
                        nlp.get_constraint_indices(constraints))

        # extract_subvector_grad_objective
        expected_gradient = np.asarray(
            [2 * sum((i + 1) * (j + 1) for j in range(9)) for i in range(9)],
            dtype=np.float64)
        grad_obj = nlp.extract_subvector_grad_objective([self.pm.x])
        self.assertTrue(np.array_equal(expected_gradient, grad_obj))

        expected_gradient = np.asarray([
            2 * sum((i + 1) * (j + 1) for j in range(9)) for i in [0, 3, 8, 4]
        ],
                                       dtype=np.float64)
        variables = [self.pm.x[1], self.pm.x[4], self.pm.x[9], self.pm.x[5]]
        grad_obj = nlp.extract_subvector_grad_objective(variables)
        self.assertTrue(np.array_equal(expected_gradient, grad_obj))

        # extract_subvector_constraints
        expected_con = np.asarray(
            [45, 88, 3 * 45, 4 * 45, 5 * 45, 276, 7 * 45, 8 * 45, 9 * 45],
            dtype=np.float64)
        con = nlp.extract_subvector_constraints([self.pm.c])
        self.assertTrue(np.array_equal(expected_con, con))

        expected_con = np.asarray([45, 4 * 45, 9 * 45, 5 * 45],
                                  dtype=np.float64)
        constraints = [self.pm.c[1], self.pm.c[4], self.pm.c[9], self.pm.c[5]]
        con = nlp.extract_subvector_constraints(constraints)
        self.assertTrue(np.array_equal(expected_con, con))

        # extract_submatrix_jacobian
        expected_jac = [[(i) * (j) for j in range(1, 10)]
                        for i in range(1, 10)]
        expected_jac = np.asarray(expected_jac, dtype=np.float64)
        jac = nlp.extract_submatrix_jacobian(pyomo_variables=[self.pm.x],
                                             pyomo_constraints=[self.pm.c])
        dense_jac = jac.todense()
        self.assertTrue(np.array_equal(dense_jac, expected_jac))

        expected_jac = [[(i) * (j) for j in [1, 4, 9, 5]] for i in [2, 6, 4]]
        expected_jac = np.asarray(expected_jac, dtype=np.float64)
        variables = [self.pm.x[1], self.pm.x[4], self.pm.x[9], self.pm.x[5]]
        constraints = [self.pm.c[2], self.pm.c[6], self.pm.c[4]]
        jac = nlp.extract_submatrix_jacobian(pyomo_variables=variables,
                                             pyomo_constraints=constraints)
        dense_jac = jac.todense()
        self.assertTrue(np.array_equal(dense_jac, expected_jac))

        # extract_submatrix_hessian_lag
        expected_hess = [[2.0 * i * j for j in range(1, 10)]
                         for i in range(1, 10)]
        expected_hess = np.asarray(expected_hess, dtype=np.float64)
        hess = nlp.extract_submatrix_hessian_lag(
            pyomo_variables_rows=[self.pm.x], pyomo_variables_cols=[self.pm.x])
        dense_hess = hess.todense()
        self.assertTrue(np.array_equal(dense_hess, expected_hess))

        expected_hess = [[2.0 * i * j for j in [1, 4, 9, 5]]
                         for i in [1, 4, 9, 5]]
        expected_hess = np.asarray(expected_hess, dtype=np.float64)
        variables = [self.pm.x[1], self.pm.x[4], self.pm.x[9], self.pm.x[5]]
        hess = nlp.extract_submatrix_hessian_lag(
            pyomo_variables_rows=variables, pyomo_variables_cols=variables)
        dense_hess = hess.todense()
        self.assertTrue(np.array_equal(dense_hess, expected_hess))
コード例 #11
0
    def get_kkt_info(self):
        """Takes the model and uses PyNumero or k_aug to get the jacobian and Hessian
        information as dataframes. This is done in place and does not return anything.

        kkt_data (dict): dictionary with the following structure:

                {
                'J': J,   # Jacobian
                'H': H,   # Hessian
                'var_ind': var_index_names, # Variable index
                'con_ind': con_index_names, # Constraint index
                'duals': duals, # Duals
                }

        :return: None

        """
        self.get_file_info()

        if self.kkt_method == 'pynumero':

            nlp = PyomoNLP(self.model_object)
            varList = nlp.get_pyomo_variables()
            conList = nlp.get_pyomo_constraints()
            duals = nlp.get_duals()

            J = nlp.extract_submatrix_jacobian(pyomo_variables=varList,
                                               pyomo_constraints=conList)
            H = nlp.extract_submatrix_hessian_lag(pyomo_variables_rows=varList,
                                                  pyomo_variables_cols=varList)
            J = csc_matrix(J)

            var_index_names = [v.name for v in varList]
            con_index_names = [v.name for v in conList]

        elif self.kkt_method == 'k_aug':

            kaug = SolverFactory('k_aug')

            kaug.options["print_kkt"] = ""
            kaug.solve(self.model_object, tee=True)

            kaug_files = Path('GJH')
            var_index_names = pd.read_csv(self.sol_files['col'],
                                          sep=';',
                                          header=None)  # dummy sep
            con_index_names = pd.read_csv(self.sol_files['row'],
                                          sep=';',
                                          header=None)  # dummy sep

            var_index_names = [var_name for var_name in var_index_names[0]]
            con_index_names = [
                con_name for con_name in con_index_names[0].iloc[:-1]
            ]
            # con_index_number = {v: k for k, v in enumerate(con_index_names)}

            n = len(var_index_names)
            m = len(con_index_names)

            print(f'size: vars: {n}, cons {m}')

            hess_file = kaug_files.joinpath('H_print.txt')
            hess = pd.read_csv(hess_file,
                               delim_whitespace=True,
                               header=None,
                               skipinitialspace=True)

            hess.columns = ['irow', 'jcol', 'vals']
            hess.irow -= 1
            hess.jcol -= 1
            # os.unlink(f'{kaug_files}hess_debug.in')

            jac_file = kaug_files.joinpath('A_print.txt')
            jac = pd.read_csv(jac_file,
                              delim_whitespace=True,
                              header=None,
                              skipinitialspace=True)

            jac.columns = ['irow', 'jcol', 'vals']
            jac.irow -= 1
            jac.jcol -= 1
            # os.unlink(f'{kaug_files}jacobi_debug.in')

            # try:
            #    duals = read_duals(stub + '.sol')
            # except:
            duals = None

            J = coo_matrix((jac.vals, (jac.jcol, jac.irow)), shape=(m, n))
            Hess_coo = coo_matrix((hess.vals, (hess.irow, hess.jcol)),
                                  shape=(n, n))
            H = Hess_coo + triu(Hess_coo, 1).T

            print('This sizes of H and J')
            print(H.shape)
            print(J.shape)

        self.delete_sol_files()

        self.kkt_data = {
            'J': J,
            'H': H,
            'var_ind': var_index_names,
            'con_ind': con_index_names,
            'duals': duals,
        }

        return None