Пример #1
0
    def test_is_feasible_function(self):
        m = ConcreteModel()
        m.x = Var(bounds=(0, 3), initialize=2)
        m.c = Constraint(expr=m.x == 2)
        self.assertTrue(is_feasible(m, GDPoptSolver.CONFIG()))

        m.c2 = Constraint(expr=m.x <= 1)
        self.assertFalse(is_feasible(m, GDPoptSolver.CONFIG()))

        m = ConcreteModel()
        m.x = Var(bounds=(0, 3), initialize=2)
        m.c = Constraint(expr=m.x >= 5)
        self.assertFalse(is_feasible(m, GDPoptSolver.CONFIG()))

        m = ConcreteModel()
        m.x = Var(bounds=(3, 3), initialize=2)
        self.assertFalse(is_feasible(m, GDPoptSolver.CONFIG()))

        m = ConcreteModel()
        m.x = Var(bounds=(0, 1), initialize=2)
        self.assertFalse(is_feasible(m, GDPoptSolver.CONFIG()))

        m = ConcreteModel()
        m.x = Var(bounds=(0, 1), initialize=2)
        m.d = Disjunct()
        with self.assertRaisesRegexp(NotImplementedError, "Found active disjunct"):
            is_feasible(m, GDPoptSolver.CONFIG())
Пример #2
0
    def test_is_feasible_function(self):
        m = ConcreteModel()
        m.x = Var(bounds=(0, 3), initialize=2)
        m.c = Constraint(expr=m.x == 2)
        self.assertTrue(is_feasible(m, GDPoptSolver.CONFIG()))

        m.c2 = Constraint(expr=m.x <= 1)
        self.assertFalse(is_feasible(m, GDPoptSolver.CONFIG()))

        m = ConcreteModel()
        m.x = Var(bounds=(0, 3), initialize=2)
        m.c = Constraint(expr=m.x >= 5)
        self.assertFalse(is_feasible(m, GDPoptSolver.CONFIG()))

        m = ConcreteModel()
        m.x = Var(bounds=(3, 3), initialize=2)
        self.assertFalse(is_feasible(m, GDPoptSolver.CONFIG()))

        m = ConcreteModel()
        m.x = Var(bounds=(0, 1), initialize=2)
        self.assertFalse(is_feasible(m, GDPoptSolver.CONFIG()))

        m = ConcreteModel()
        m.x = Var(bounds=(0, 1), initialize=2)
        m.d = Disjunct()
        with self.assertRaisesRegexp(NotImplementedError,
                                     "Found active disjunct"):
            is_feasible(m, GDPoptSolver.CONFIG())
Пример #3
0
 def test_FP(self):
     """Test the feasibility pump algorithm."""
     with SolverFactory('mindtpy') as opt:
         for model in model_list:
             results = opt.solve(model,
                                 strategy='FP',
                                 mip_solver=required_solvers[1],
                                 nlp_solver=required_solvers[0],
                                 bound_tolerance=1E-5)
             log_infeasible_constraints(model)
             self.assertTrue(is_feasible(model, self.get_config(opt)))
Пример #4
0
 def test_FP_8PP(self):
     """Test the feasibility pump algorithm."""
     with SolverFactory('mindtpy') as opt:
         model = EightProcessFlowsheet(convex=True)
         print('\n Solving 8PP problem using feasibility pump')
         results = opt.solve(model,
                             strategy='FP',
                             mip_solver=required_solvers[1],
                             nlp_solver=required_solvers[0],
                             bound_tolerance=1E-5)
         log_infeasible_constraints(model)
         self.assertTrue(is_feasible(model, self.get_config(opt)))
Пример #5
0
 def test_fp_OnlineDocExample(self):
     """Test the feasibility pump algorithm."""
     """TODO: bug fix"""
     with SolverFactory('mindtpy') as opt:
         model = OnlineDocExample()
         print('\n Solving OnlineDocExample using feasibility pump')
         results = opt.solve(model,
                             strategy='FP',
                             mip_solver=required_solvers[1],
                             nlp_solver=required_solvers[0],
                             iteration_limit=0)
         self.assertTrue(is_feasible(model, self.get_config(opt)))
Пример #6
0
    def test_fp_Proposal(self):
        """Test the feasibility pump algorithm."""
        with SolverFactory('mindtpy') as opt:
            model = ProposalModel()
            print('\n Solving ProposalModel using feasibility pump')
            results = opt.solve(model,
                                strategy='FP',
                                mip_solver=required_solvers[1],
                                nlp_solver=required_solvers[0],
                                iteration_limit=30)

            self.assertTrue(is_feasible(model, self.get_config(opt)))
Пример #7
0
 def test_FP_Feasibility_Pump1(self):
     """Test the feasibility pump algorithm."""
     with SolverFactory('mindtpy') as opt:
         model = Feasibility_Pump1()
         print('\n Solving Feasibility_Pump1 with feasibility pump')
         results = opt.solve(model,
                             strategy='FP',
                             mip_solver=required_solvers[1],
                             nlp_solver=required_solvers[0],
                             bound_tolerance=1E-5)
         log_infeasible_constraints(model)
         self.assertTrue(is_feasible(model, self.get_config(opt)))
Пример #8
0
 def test_FP_OA_Feasibility_Pump1(self):
     """Test the FP-OA algorithm."""
     with SolverFactory('mindtpy') as opt:
         model = Feasibility_Pump1()
         print('\n Solving Feasibility_Pump1 with FP-OA')
         results = opt.solve(model,
                             strategy='OA',
                             init_strategy='FP',
                             mip_solver=required_solvers[1],
                             nlp_solver=required_solvers[0],
                             bound_tolerance=1E-5)
         self.assertIs(results.solver.termination_condition,
                       TerminationCondition.optimal)
         self.assertTrue(is_feasible(model, self.get_config(opt)))
Пример #9
0
 def test_FP_8PP_Norm_infinity_with_norm_constraint(self):
     """Test the feasibility pump algorithm."""
     with SolverFactory('mindtpy') as opt:
         model = EightProcessFlowsheet(convex=True)
         print(
             '\n Solving 8PP problem using feasibility pump with Norm infinity in mip regularization problem'
         )
         results = opt.solve(model,
                             strategy='FP',
                             mip_solver=required_solvers[1],
                             nlp_solver=required_solvers[0],
                             bound_tolerance=1E-5,
                             fp_main_norm='L_infinity',
                             fp_norm_constraint=False)
         log_infeasible_constraints(model)
         self.assertTrue(is_feasible(model, self.get_config(opt)))
Пример #10
0
def solve_NLP(nlp_model, solve_data, config):
    """Solve the NLP subproblem."""
    config.logger.info('Solving nonlinear subproblem for '
                       'fixed binaries and logical realizations.')

    # Error checking for unfixed discrete variables
    unfixed_discrete_vars = detect_unfixed_discrete_vars(nlp_model)
    assert len(unfixed_discrete_vars) == 0, \
        "Unfixed discrete variables exist on the NLP subproblem: {0}".format(
        list(v.name for v in unfixed_discrete_vars))

    GDPopt = nlp_model.GDPopt_utils

    initialize_subproblem(nlp_model, solve_data)

    # Callback immediately before solving NLP subproblem
    config.call_before_subproblem_solve(nlp_model, solve_data)

    nlp_solver = SolverFactory(config.nlp_solver)
    if not nlp_solver.available():
        raise RuntimeError("NLP solver %s is not available." %
                           config.nlp_solver)
    with SuppressInfeasibleWarning():
        try:
            results = nlp_solver.solve(nlp_model, **config.nlp_solver_args)
        except ValueError as err:
            if 'Cannot load SolverResults object with bad status: error' in str(
                    err):
                results = SolverResults()
                results.solver.termination_condition = tc.error
                results.solver.message = str(err)
            else:
                raise

    nlp_result = SubproblemResult()
    nlp_result.feasible = True
    nlp_result.var_values = list(v.value for v in GDPopt.variable_list)
    nlp_result.pyomo_results = results
    nlp_result.dual_values = list(
        nlp_model.dual.get(c, None) for c in GDPopt.constraint_list)

    term_cond = results.solver.termination_condition
    if any(term_cond == cond
           for cond in (tc.optimal, tc.locallyOptimal, tc.feasible)):
        pass
    elif term_cond == tc.infeasible:
        config.logger.info('NLP subproblem was infeasible.')
        nlp_result.feasible = False
    elif term_cond == tc.maxIterations:
        # TODO try something else? Reinitialize with different initial
        # value?
        config.logger.info(
            'NLP subproblem failed to converge within iteration limit.')
        if is_feasible(nlp_model, config):
            config.logger.info(
                'NLP solution is still feasible. '
                'Using potentially suboptimal feasible solution.')
        else:
            nlp_result.feasible = False
    elif term_cond == tc.internalSolverError:
        # Possible that IPOPT had a restoration failure
        config.logger.info("NLP solver had an internal failure: %s" %
                           results.solver.message)
        nlp_result.feasible = False
    elif (term_cond == tc.other
          and "Too few degrees of freedom" in str(results.solver.message)):
        # Possible IPOPT degrees of freedom error
        config.logger.info("IPOPT has too few degrees of freedom: %s" %
                           results.solver.message)
        nlp_result.feasible = False
    elif term_cond == tc.other:
        config.logger.info(
            "NLP solver had a termination condition of 'other': %s" %
            results.solver.message)
        nlp_result.feasible = False
    elif term_cond == tc.error:
        config.logger.info(
            "NLP solver had a termination condition of 'error': %s" %
            results.solver.message)
        nlp_result.feasible = False
    elif term_cond == tc.maxTimeLimit:
        config.logger.info(
            "NLP solver ran out of time. Assuming infeasible for now.")
        nlp_result.feasible = False
    else:
        raise ValueError('GDPopt unable to handle NLP subproblem termination '
                         'condition of %s. Results: %s' % (term_cond, results))

    # Call the NLP post-solve callback
    config.call_after_subproblem_solve(nlp_model, solve_data)

    # if feasible, call the NLP post-feasible callback
    if nlp_result.feasible:
        config.call_after_subproblem_feasible(nlp_model, solve_data)

    return nlp_result
Пример #11
0
def solve_MINLP(model, solve_data, config):
    """Solve the MINLP subproblem."""
    config.logger.info(
        "Solving MINLP subproblem for fixed logical realizations.")

    GDPopt = model.GDPopt_utils

    initialize_subproblem(model, solve_data)

    # Callback immediately before solving MINLP subproblem
    config.call_before_subproblem_solve(model, solve_data)

    minlp_solver = SolverFactory(config.minlp_solver)
    if not minlp_solver.available():
        raise RuntimeError("MINLP solver %s is not available." %
                           config.minlp_solver)
    with SuppressInfeasibleWarning():
        results = minlp_solver.solve(model, **config.minlp_solver_args)

    subprob_result = SubproblemResult()
    subprob_result.feasible = True
    subprob_result.var_values = list(v.value for v in GDPopt.variable_list)
    subprob_result.pyomo_results = results
    subprob_result.dual_values = list(
        model.dual.get(c, None) for c in GDPopt.constraint_list)

    term_cond = results.solver.termination_condition
    if any(term_cond == cond
           for cond in (tc.optimal, tc.locallyOptimal, tc.feasible)):
        pass
    elif term_cond == tc.infeasible:
        config.logger.info('MINLP subproblem was infeasible.')
        subprob_result.feasible = False
    elif term_cond == tc.maxIterations:
        # TODO try something else? Reinitialize with different initial
        # value?
        config.logger.info(
            'MINLP subproblem failed to converge within iteration limit.')
        if is_feasible(model, config):
            config.logger.info(
                'MINLP solution is still feasible. '
                'Using potentially suboptimal feasible solution.')
        else:
            subprob_result.feasible = False
    elif term_cond == tc.intermediateNonInteger:
        config.logger.info(
            "MINLP solver could not find feasible integer solution: %s" %
            results.solver.message)
        subprob_result.feasible = False
    else:
        raise ValueError(
            'GDPopt unable to handle MINLP subproblem termination '
            'condition of %s. Results: %s' % (term_cond, results))

    # Call the subproblem post-solve callback
    config.call_after_subproblem_solve(model, solve_data)

    # if feasible, call the subproblem post-feasible callback
    if subprob_result.feasible:
        config.call_after_subproblem_feasible(model, solve_data)

    return subprob_result
Пример #12
0
def solve_NLP(nlp_model, solve_data, config):
    """Solve the NLP subproblem."""
    config.logger.info('Solving nonlinear subproblem for '
                       'fixed binaries and logical realizations.')
    unfixed_discrete_vars = detect_unfixed_discrete_vars(nlp_model)
    if unfixed_discrete_vars:
        discrete_var_names = list(v.name for v in unfixed_discrete_vars)
        config.logger.warning(
            "Unfixed discrete variables exist on the NLP subproblem: %s" %
            (discrete_var_names, ))

    GDPopt = nlp_model.GDPopt_utils

    preprocessing_transformations = [
        # Propagate variable bounds
        'contrib.propagate_eq_var_bounds',
        # Detect fixed variables
        'contrib.detect_fixed_vars',
        # Propagate fixed variables
        'contrib.propagate_fixed_vars',
        # Remove zero terms in linear expressions
        'contrib.remove_zero_terms',
        # Remove terms in equal to zero summations
        'contrib.propagate_zero_sum',
        # Transform bound constraints
        'contrib.constraints_to_var_bounds',
        # Detect fixed variables
        'contrib.detect_fixed_vars',
        # Remove terms in equal to zero summations
        'contrib.propagate_zero_sum',
        # Remove trivial constraints
        'contrib.deactivate_trivial_constraints'
    ]
    for xfrm in preprocessing_transformations:
        TransformationFactory(xfrm).apply_to(nlp_model)

    initialize_NLP(nlp_model, solve_data)

    # Callback immediately before solving NLP subproblem
    config.call_before_subproblem_solve(nlp_model, solve_data)

    nlp_solver = SolverFactory(config.nlp_solver)
    if not nlp_solver.available():
        raise RuntimeError("NLP solver %s is not available." %
                           config.nlp_solver)
    with SuppressInfeasibleWarning():
        results = nlp_solver.solve(nlp_model, **config.nlp_solver_args)

    nlp_result = SubproblemResult()
    nlp_result.feasible = True
    nlp_result.var_values = list(v.value for v in GDPopt.working_var_list)
    nlp_result.pyomo_results = results
    nlp_result.dual_values = list(
        nlp_model.dual.get(c, None) for c in GDPopt.working_constraints_list)

    subprob_terminate_cond = results.solver.termination_condition
    if subprob_terminate_cond is tc.optimal:
        pass
    elif subprob_terminate_cond is tc.infeasible:
        config.logger.info('NLP subproblem was locally infeasible.')
        nlp_result.feasible = False
    elif subprob_terminate_cond is tc.maxIterations:
        # TODO try something else? Reinitialize with different initial
        # value?
        config.logger.info(
            'NLP subproblem failed to converge within iteration limit.')
        if is_feasible(nlp_model, config):
            config.logger.info(
                'NLP solution is still feasible. '
                'Using potentially suboptimal feasible solution.')
        else:
            nlp_result.feasible = False
    elif subprob_terminate_cond is tc.internalSolverError:
        # Possible that IPOPT had a restoration failture
        config.logger.info("NLP solver had an internal failure: %s" %
                           results.solver.message)
        nlp_result.feasible = False
    else:
        raise ValueError('GDPopt unable to handle NLP subproblem termination '
                         'condition of %s. Results: %s' %
                         (subprob_terminate_cond, results))

    # Call the NLP post-solve callback
    config.call_after_subproblem_solve(nlp_model, solve_data)

    # if feasible, call the NLP post-feasible callback
    if nlp_result.feasible:
        config.call_after_subproblem_feasible(nlp_model, solve_data)

    return nlp_result
Пример #13
0
def solve_NLP(nlp_model, solve_data, config):
    """Solve the NLP subproblem."""
    config.logger.info('Solving nonlinear subproblem for '
                       'fixed binaries and logical realizations.')

    # Error checking for unfixed discrete variables
    unfixed_discrete_vars = detect_unfixed_discrete_vars(nlp_model)
    assert len(unfixed_discrete_vars) == 0, \
        "Unfixed discrete variables exist on the NLP subproblem: {0}".format(
        list(v.name for v in unfixed_discrete_vars))

    GDPopt = nlp_model.GDPopt_utils

    if config.subproblem_presolve:
        preprocess_subproblem(nlp_model, config)

    initialize_subproblem(nlp_model, solve_data)

    # Callback immediately before solving NLP subproblem
    config.call_before_subproblem_solve(nlp_model, solve_data)

    nlp_solver = SolverFactory(config.nlp_solver)
    if not nlp_solver.available():
        raise RuntimeError("NLP solver %s is not available." %
                           config.nlp_solver)
    with SuppressInfeasibleWarning():
        results = nlp_solver.solve(nlp_model, **config.nlp_solver_args)

    nlp_result = SubproblemResult()
    nlp_result.feasible = True
    nlp_result.var_values = list(v.value for v in GDPopt.variable_list)
    nlp_result.pyomo_results = results
    nlp_result.dual_values = list(
        nlp_model.dual.get(c, None) for c in GDPopt.constraint_list)

    subprob_terminate_cond = results.solver.termination_condition
    if (subprob_terminate_cond is tc.optimal
            or subprob_terminate_cond is tc.locallyOptimal
            or subprob_terminate_cond is tc.feasible):
        pass
    elif subprob_terminate_cond is tc.infeasible:
        config.logger.info('NLP subproblem was infeasible.')
        nlp_result.feasible = False
    elif subprob_terminate_cond is tc.maxIterations:
        # TODO try something else? Reinitialize with different initial
        # value?
        config.logger.info(
            'NLP subproblem failed to converge within iteration limit.')
        if is_feasible(nlp_model, config):
            config.logger.info(
                'NLP solution is still feasible. '
                'Using potentially suboptimal feasible solution.')
        else:
            nlp_result.feasible = False
    elif subprob_terminate_cond is tc.internalSolverError:
        # Possible that IPOPT had a restoration failure
        config.logger.info("NLP solver had an internal failure: %s" %
                           results.solver.message)
        nlp_result.feasible = False
    else:
        raise ValueError('GDPopt unable to handle NLP subproblem termination '
                         'condition of %s. Results: %s' %
                         (subprob_terminate_cond, results))

    # Call the NLP post-solve callback
    config.call_after_subproblem_solve(nlp_model, solve_data)

    # if feasible, call the NLP post-feasible callback
    if nlp_result.feasible:
        config.call_after_subproblem_feasible(nlp_model, solve_data)

    return nlp_result
Пример #14
0
def solve_NLP(nlp_model, solve_data, config):
    """Solve the NLP subproblem."""
    config.logger.info(
        'Solving nonlinear subproblem for '
        'fixed binaries and logical realizations.')

    # Error checking for unfixed discrete variables
    unfixed_discrete_vars = detect_unfixed_discrete_vars(nlp_model)
    assert len(unfixed_discrete_vars) == 0, \
        "Unfixed discrete variables exist on the NLP subproblem: {0}".format(
        list(v.name for v in unfixed_discrete_vars))

    GDPopt = nlp_model.GDPopt_utils

    if config.subproblem_presolve:
        preprocess_subproblem(nlp_model, config)

    initialize_subproblem(nlp_model, solve_data)

    # Callback immediately before solving NLP subproblem
    config.call_before_subproblem_solve(nlp_model, solve_data)

    nlp_solver = SolverFactory(config.nlp_solver)
    if not nlp_solver.available():
        raise RuntimeError("NLP solver %s is not available." %
                           config.nlp_solver)
    with SuppressInfeasibleWarning():
        results = nlp_solver.solve(nlp_model, **config.nlp_solver_args)

    nlp_result = SubproblemResult()
    nlp_result.feasible = True
    nlp_result.var_values = list(v.value for v in GDPopt.variable_list)
    nlp_result.pyomo_results = results
    nlp_result.dual_values = list(
        nlp_model.dual.get(c, None)
        for c in GDPopt.constraint_list)

    subprob_terminate_cond = results.solver.termination_condition
    if (subprob_terminate_cond is tc.optimal or
            subprob_terminate_cond is tc.locallyOptimal or
            subprob_terminate_cond is tc.feasible):
        pass
    elif subprob_terminate_cond is tc.infeasible:
        config.logger.info('NLP subproblem was infeasible.')
        nlp_result.feasible = False
    elif subprob_terminate_cond is tc.maxIterations:
        # TODO try something else? Reinitialize with different initial
        # value?
        config.logger.info(
            'NLP subproblem failed to converge within iteration limit.')
        if is_feasible(nlp_model, config):
            config.logger.info(
                'NLP solution is still feasible. '
                'Using potentially suboptimal feasible solution.')
        else:
            nlp_result.feasible = False
    elif subprob_terminate_cond is tc.internalSolverError:
        # Possible that IPOPT had a restoration failure
        config.logger.info(
            "NLP solver had an internal failure: %s" % results.solver.message)
        nlp_result.feasible = False
    else:
        raise ValueError(
            'GDPopt unable to handle NLP subproblem termination '
            'condition of %s. Results: %s'
            % (subprob_terminate_cond, results))

    # Call the NLP post-solve callback
    config.call_after_subproblem_solve(nlp_model, solve_data)

    # if feasible, call the NLP post-feasible callback
    if nlp_result.feasible:
        config.call_after_subproblem_feasible(nlp_model, solve_data)

    return nlp_result
Пример #15
0
def solve_MINLP(model, solve_data, config):
    """Solve the MINLP subproblem."""
    config.logger.info(
        "Solving MINLP subproblem for fixed logical realizations."
    )

    GDPopt = model.GDPopt_utils

    if config.subproblem_presolve:
        preprocess_subproblem(model, config)

    initialize_subproblem(model, solve_data)

    # Callback immediately before solving NLP subproblem
    config.call_before_subproblem_solve(model, solve_data)

    minlp_solver = SolverFactory(config.minlp_solver)
    if not minlp_solver.available():
        raise RuntimeError("MINLP solver %s is not available." %
                           config.minlp_solver)
    with SuppressInfeasibleWarning():
        results = minlp_solver.solve(model, **config.minlp_solver_args)

    subprob_result = SubproblemResult()
    subprob_result.feasible = True
    subprob_result.var_values = list(v.value for v in GDPopt.variable_list)
    subprob_result.pyomo_results = results
    subprob_result.dual_values = list(
        model.dual.get(c, None)
        for c in GDPopt.constraint_list)

    subprob_terminate_cond = results.solver.termination_condition
    if (subprob_terminate_cond is tc.optimal or
            subprob_terminate_cond is tc.locallyOptimal or
            subprob_terminate_cond is tc.feasible):
        pass
    elif subprob_terminate_cond is tc.infeasible:
        config.logger.info('MINLP subproblem was infeasible.')
        subprob_result.feasible = False
    elif subprob_terminate_cond is tc.maxIterations:
        # TODO try something else? Reinitialize with different initial
        # value?
        config.logger.info(
            'MINLP subproblem failed to converge within iteration limit.')
        if is_feasible(model, config):
            config.logger.info(
                'MINLP solution is still feasible. '
                'Using potentially suboptimal feasible solution.')
        else:
            subprob_result.feasible = False
    elif subprob_terminate_cond is tc.intermediateNonInteger:
        config.logger.info(
            "MINLP solver could not find feasible integer solution: %s" % results.solver.message)
        subprob_result.feasible = False
    else:
        raise ValueError(
            'GDPopt unable to handle MINLP subproblem termination '
            'condition of %s. Results: %s'
            % (subprob_terminate_cond, results))

    # Call the subproblem post-solve callback
    config.call_after_subproblem_solve(model, solve_data)

    # if feasible, call the subproblem post-feasible callback
    if subprob_result.feasible:
        config.call_after_subproblem_feasible(model, solve_data)

    return subprob_result