def solve_NLP_feas(solve_data, config): """Solves feasibility NLP and copies result to working model Returns: Result values and dual values """ fixed_nlp = solve_data.working_model.clone() add_feas_slacks(fixed_nlp, config) MindtPy = fixed_nlp.MindtPy_utils next(fixed_nlp.component_data_objects(Objective, active=True)).deactivate() for constr in fixed_nlp.component_data_objects(ctype=Constraint, active=True, descend_into=True): if constr.body.polynomial_degree() not in [0, 1]: constr.deactivate() MindtPy.MindtPy_feas.activate() if config.feasibility_norm == 'L1': MindtPy.MindtPy_feas_obj = Objective(expr=sum( s for s in MindtPy.MindtPy_feas.slack_var[...]), sense=minimize) elif config.feasibility_norm == 'L2': MindtPy.MindtPy_feas_obj = Objective(expr=sum( s * s for s in MindtPy.MindtPy_feas.slack_var[...]), sense=minimize) else: MindtPy.MindtPy_feas_obj = Objective( expr=MindtPy.MindtPy_feas.slack_var, sense=minimize) TransformationFactory('core.fix_integer_vars').apply_to(fixed_nlp) with SuppressInfeasibleWarning(): feas_soln = SolverFactory(config.nlp_solver).solve( fixed_nlp, **config.nlp_solver_args) subprob_terminate_cond = feas_soln.solver.termination_condition if subprob_terminate_cond is tc.optimal or subprob_terminate_cond is tc.locallyOptimal: copy_var_list_values( MindtPy.variable_list, solve_data.working_model.MindtPy_utils.variable_list, config) elif subprob_terminate_cond is tc.infeasible: raise ValueError('Feasibility NLP infeasible. ' 'This should never happen.') else: raise ValueError( 'MindtPy unable to handle feasibility NLP termination condition ' 'of {}'.format(subprob_terminate_cond)) var_values = [v.value for v in MindtPy.variable_list] duals = [0 for _ in MindtPy.constraint_list] for i, c in enumerate(MindtPy.constraint_list): rhs = c.upper if c.has_ub() else c.lower c_geq = -1 if c.has_ub() else 1 duals[i] = c_geq * max(0, c_geq * (rhs - value(c.body))) if value(MindtPy.MindtPy_feas_obj.expr) == 0: raise ValueError('Problem is not feasible, check NLP solver') return fixed_nlp, feas_soln
def solve_feasibility_subproblem(solve_data, config): """Solves a feasibility NLP if the fixed_nlp problem is infeasible. Args: solve_data (MindtPySolveData): data container that holds solve-instance data. config (ConfigBlock): the specific configurations for MindtPy. Returns: feas_subproblem (Pyomo model): feasibility NLP from the model. feas_soln (SolverResults): results from solving the feasibility NLP. """ feas_subproblem = solve_data.working_model.clone() add_feas_slacks(feas_subproblem, config) MindtPy = feas_subproblem.MindtPy_utils if MindtPy.find_component('objective_value') is not None: MindtPy.objective_value.value = 0 next(feas_subproblem.component_data_objects( Objective, active=True)).deactivate() for constr in feas_subproblem.MindtPy_utils.nonlinear_constraint_list: constr.deactivate() MindtPy.feas_opt.activate() if config.feasibility_norm == 'L1': MindtPy.feas_obj = Objective( expr=sum(s for s in MindtPy.feas_opt.slack_var[...]), sense=minimize) elif config.feasibility_norm == 'L2': MindtPy.feas_obj = Objective( expr=sum(s*s for s in MindtPy.feas_opt.slack_var[...]), sense=minimize) else: MindtPy.feas_obj = Objective( expr=MindtPy.feas_opt.slack_var, sense=minimize) TransformationFactory('core.fix_integer_vars').apply_to(feas_subproblem) nlpopt = SolverFactory(config.nlp_solver) nlp_args = dict(config.nlp_solver_args) set_solver_options(nlpopt, solve_data, config, solver_type='nlp') with SuppressInfeasibleWarning(): try: with time_code(solve_data.timing, 'feasibility subproblem'): feas_soln = nlpopt.solve( feas_subproblem, tee=config.nlp_solver_tee, **nlp_args) except (ValueError, OverflowError) as error: for nlp_var, orig_val in zip( MindtPy.variable_list, solve_data.initial_var_values): if not nlp_var.fixed and not nlp_var.is_binary(): nlp_var.set_value(orig_val, skip_validation=True) with time_code(solve_data.timing, 'feasibility subproblem'): feas_soln = nlpopt.solve( feas_subproblem, tee=config.nlp_solver_tee, **nlp_args) handle_feasibility_subproblem_tc( feas_soln.solver.termination_condition, MindtPy, solve_data, config) return feas_subproblem, feas_soln
def solve_NLP_feas(solve_data, config): """Solves feasibility NLP and copies result to working model Returns: Result values and dual values """ fix_nlp = solve_data.working_model.clone() add_feas_slacks(fix_nlp) MindtPy = fix_nlp.MindtPy_utils next(fix_nlp.component_data_objects(Objective, active=True)).deactivate() for constr in fix_nlp.component_data_objects(ctype=Constraint, active=True, descend_into=True): if constr.body.polynomial_degree() not in [0, 1]: constr.deactivate() MindtPy.MindtPy_feas.activate() MindtPy.MindtPy_feas_obj = Objective(expr=sum( s for s in MindtPy.MindtPy_feas.slack_var[...]), sense=minimize) TransformationFactory('core.fix_discrete').apply_to(fix_nlp) with SuppressInfeasibleWarning(): feas_soln = SolverFactory(config.nlp_solver).solve( fix_nlp, **config.nlp_solver_args) subprob_terminate_cond = feas_soln.solver.termination_condition if subprob_terminate_cond is tc.optimal: copy_var_list_values( MindtPy.variable_list, solve_data.working_model.MindtPy_utils.variable_list, config) elif subprob_terminate_cond is tc.infeasible: raise ValueError('Feasibility NLP infeasible. ' 'This should never happen.') else: raise ValueError( 'MindtPy unable to handle feasibility NLP termination condition ' 'of {}'.format(subprob_terminate_cond)) var_values = [v.value for v in MindtPy.variable_list] duals = [0 for _ in MindtPy.constraint_list] for i, constr in enumerate(MindtPy.constraint_list): # TODO rhs only works if constr.upper and constr.lower do not both have values. # Sometimes you might have 1 <= expr <= 1. This would give an incorrect rhs of 2. rhs = ((0 if constr.upper is None else constr.upper) + (0 if constr.lower is None else constr.lower)) sign_adjust = 1 if value(constr.upper) is None else -1 duals[i] = sign_adjust * max(0, sign_adjust * (rhs - value(constr.body))) if value(MindtPy.MindtPy_feas_obj.expr) == 0: raise ValueError('Problem is not feasible, check NLP solver') return fix_nlp, feas_soln
def solve_NLP_feas(solve_data, config): m = solve_data.working_model.clone() add_feas_slacks(m) MindtPy = m.MindtPy_utils next(m.component_data_objects(Objective, active=True)).deactivate() for constr in m.component_data_objects( ctype=Constraint, active=True, descend_into=True): constr.deactivate() MindtPy.MindtPy_feas.activate() MindtPy.MindtPy_feas_obj = Objective( expr=sum(s for s in MindtPy.MindtPy_feas.slack_var[...]), sense=minimize) for v in MindtPy.variable_list: if v.is_binary(): v.fix(int(round(v.value))) # m.pprint() #print nlp feasibility problem for debugging with SuppressInfeasibleWarning(): feas_soln = SolverFactory(config.nlp_solver).solve( m, **config.nlp_solver_args) subprob_terminate_cond = feas_soln.solver.termination_condition if subprob_terminate_cond is tc.optimal: copy_var_list_values( MindtPy.variable_list, solve_data.working_model.MindtPy_utils.variable_list, config) pass elif subprob_terminate_cond is tc.infeasible: raise ValueError('Feasibility NLP infeasible. ' 'This should never happen.') else: raise ValueError( 'MindtPy unable to handle feasibility NLP termination condition ' 'of {}'.format(subprob_terminate_cond)) var_values = [v.value for v in MindtPy.variable_list] duals = [0 for _ in MindtPy.constraint_list] for i, constr in enumerate(MindtPy.constraint_list): # TODO rhs only works if constr.upper and constr.lower do not both have values. # Sometimes you might have 1 <= expr <= 1. This would give an incorrect rhs of 2. rhs = ((0 if constr.upper is None else constr.upper) + (0 if constr.lower is None else constr.lower)) sign_adjust = 1 if value(constr.upper) is None else -1 duals[i] = sign_adjust * max( 0, sign_adjust * (rhs - value(constr.body))) if value(MindtPy.MindtPy_feas_obj.expr) == 0: raise ValueError( 'Problem is not feasible, check NLP solver') return var_values, duals
def solve_NLP_feas(solve_data, config): m = solve_data.working_model.clone() add_feas_slacks(m) MindtPy = m.MindtPy_utils next(m.component_data_objects(Objective, active=True)).deactivate() for constr in m.component_data_objects(ctype=Constraint, active=True, descend_into=True): constr.deactivate() MindtPy.MindtPy_feas.activate() MindtPy.MindtPy_feas_obj = Objective(expr=sum( s for s in MindtPy.MindtPy_feas.slack_var[...]), sense=minimize) for v in MindtPy.variable_list: if v.is_binary(): v.fix(int(round(v.value))) # m.pprint() #print nlp feasibility problem for debugging with SuppressInfeasibleWarning(): feas_soln = SolverFactory(config.nlp_solver).solve( m, **config.nlp_solver_args) subprob_terminate_cond = feas_soln.solver.termination_condition if subprob_terminate_cond is tc.optimal: copy_var_list_values( MindtPy.variable_list, solve_data.working_model.MindtPy_utils.variable_list, config) pass elif subprob_terminate_cond is tc.infeasible: raise ValueError('Feasibility NLP infeasible. ' 'This should never happen.') else: raise ValueError( 'MindtPy unable to handle feasibility NLP termination condition ' 'of {}'.format(subprob_terminate_cond)) var_values = [v.value for v in MindtPy.variable_list] duals = [0 for _ in MindtPy.constraint_list] for i, constr in enumerate(MindtPy.constraint_list): # TODO rhs only works if constr.upper and constr.lower do not both have values. # Sometimes you might have 1 <= expr <= 1. This would give an incorrect rhs of 2. rhs = ((0 if constr.upper is None else constr.upper) + (0 if constr.lower is None else constr.lower)) sign_adjust = 1 if value(constr.upper) is None else -1 duals[i] = sign_adjust * max(0, sign_adjust * (rhs - value(constr.body))) if value(MindtPy.MindtPy_feas_obj.expr) == 0: raise ValueError('Problem is not feasible, check NLP solver') return var_values, duals
def test_handle_termination_condition(self): """Test the outer approximation decomposition algorithm.""" model = SimpleMINLP() config = _get_MindtPy_config() solve_data = set_up_solve_data(model, config) with time_code(solve_data.timing, 'total', is_main_timer=True), \ create_utility_block(solve_data.working_model, 'MindtPy_utils', solve_data): MindtPy = solve_data.working_model.MindtPy_utils MindtPy = solve_data.working_model.MindtPy_utils setup_results_object(solve_data, config) process_objective( solve_data, config, move_linear_objective=(config.init_strategy == 'FP' or config.add_regularization is not None), use_mcpp=config.use_mcpp, update_var_con_list=config.add_regularization is None) feas = MindtPy.feas_opt = Block() feas.deactivate() feas.feas_constraints = ConstraintList( doc='Feasibility Problem Constraints') lin = MindtPy.cuts = Block() lin.deactivate() if config.feasibility_norm == 'L1' or config.feasibility_norm == 'L2': feas.nl_constraint_set = RangeSet( len(MindtPy.nonlinear_constraint_list), doc='Integer index set over the nonlinear constraints.') # Create slack variables for feasibility problem feas.slack_var = Var(feas.nl_constraint_set, domain=NonNegativeReals, initialize=1) else: feas.slack_var = Var(domain=NonNegativeReals, initialize=1) # no-good cuts exclude particular discrete decisions lin.no_good_cuts = ConstraintList(doc='no-good cuts') fixed_nlp = solve_data.working_model.clone() TransformationFactory('core.fix_integer_vars').apply_to(fixed_nlp) MindtPy_initialize_main(solve_data, config) # test handle_subproblem_other_termination termination_condition = tc.maxIterations config.add_no_good_cuts = True handle_subproblem_other_termination(fixed_nlp, termination_condition, solve_data, config) self.assertEqual( len(solve_data.mip.MindtPy_utils.cuts.no_good_cuts), 1) # test handle_main_other_conditions main_mip, main_mip_results = solve_main(solve_data, config) main_mip_results.solver.termination_condition = tc.infeasible handle_main_other_conditions(solve_data.mip, main_mip_results, solve_data, config) self.assertIs(solve_data.results.solver.termination_condition, tc.feasible) main_mip_results.solver.termination_condition = tc.unbounded handle_main_other_conditions(solve_data.mip, main_mip_results, solve_data, config) self.assertIn(main_mip.MindtPy_utils.objective_bound, main_mip.component_data_objects(ctype=Constraint)) main_mip.MindtPy_utils.del_component('objective_bound') main_mip_results.solver.termination_condition = tc.infeasibleOrUnbounded handle_main_other_conditions(solve_data.mip, main_mip_results, solve_data, config) self.assertIn(main_mip.MindtPy_utils.objective_bound, main_mip.component_data_objects(ctype=Constraint)) main_mip_results.solver.termination_condition = tc.maxTimeLimit handle_main_other_conditions(solve_data.mip, main_mip_results, solve_data, config) self.assertIs(solve_data.results.solver.termination_condition, tc.maxTimeLimit) main_mip_results.solver.termination_condition = tc.other main_mip_results.solution.status = SolutionStatus.feasible handle_main_other_conditions(solve_data.mip, main_mip_results, solve_data, config) for v1, v2 in zip( main_mip.MindtPy_utils.variable_list, solve_data.working_model.MindtPy_utils.variable_list): self.assertEqual(v1.value, v2.value) # test handle_feasibility_subproblem_tc feas_subproblem = solve_data.working_model.clone() add_feas_slacks(feas_subproblem, config) MindtPy = feas_subproblem.MindtPy_utils MindtPy.feas_opt.activate() if config.feasibility_norm == 'L1': MindtPy.feas_obj = Objective(expr=sum( s for s in MindtPy.feas_opt.slack_var[...]), sense=minimize) elif config.feasibility_norm == 'L2': MindtPy.feas_obj = Objective(expr=sum( s * s for s in MindtPy.feas_opt.slack_var[...]), sense=minimize) else: MindtPy.feas_obj = Objective(expr=MindtPy.feas_opt.slack_var, sense=minimize) handle_feasibility_subproblem_tc(tc.optimal, MindtPy, solve_data, config) handle_feasibility_subproblem_tc(tc.infeasible, MindtPy, solve_data, config) self.assertIs(solve_data.should_terminate, True) self.assertIs(solve_data.results.solver.status, SolverStatus.error) solve_data.should_terminate = False solve_data.results.solver.status = None handle_feasibility_subproblem_tc(tc.maxIterations, MindtPy, solve_data, config) self.assertIs(solve_data.should_terminate, True) self.assertIs(solve_data.results.solver.status, SolverStatus.error) solve_data.should_terminate = False solve_data.results.solver.status = None handle_feasibility_subproblem_tc(tc.solverFailure, MindtPy, solve_data, config) self.assertIs(solve_data.should_terminate, True) self.assertIs(solve_data.results.solver.status, SolverStatus.error) # test NLP subproblem infeasible solve_data.working_model.Y[1].value = 0 solve_data.working_model.Y[2].value = 0 solve_data.working_model.Y[3].value = 0 fixed_nlp, fixed_nlp_results = solve_subproblem(solve_data, config) solve_data.working_model.Y[1].value = None solve_data.working_model.Y[2].value = None solve_data.working_model.Y[3].value = None # test handle_nlp_subproblem_tc fixed_nlp_results.solver.termination_condition = tc.maxTimeLimit handle_nlp_subproblem_tc(fixed_nlp, fixed_nlp_results, solve_data, config) self.assertIs(solve_data.should_terminate, True) self.assertIs(solve_data.results.solver.termination_condition, tc.maxTimeLimit) fixed_nlp_results.solver.termination_condition = tc.maxEvaluations handle_nlp_subproblem_tc(fixed_nlp, fixed_nlp_results, solve_data, config) self.assertIs(solve_data.should_terminate, True) self.assertIs(solve_data.results.solver.termination_condition, tc.maxEvaluations) fixed_nlp_results.solver.termination_condition = tc.maxIterations handle_nlp_subproblem_tc(fixed_nlp, fixed_nlp_results, solve_data, config) self.assertIs(solve_data.should_terminate, True) self.assertIs(solve_data.results.solver.termination_condition, tc.maxEvaluations) # test handle_fp_main_tc config.init_strategy = 'FP' solve_data.fp_iter = 1 init_rNLP(solve_data, config) feas_main, feas_main_results = solve_main(solve_data, config, fp=True) feas_main_results.solver.termination_condition = tc.optimal fp_should_terminate = handle_fp_main_tc(feas_main_results, solve_data, config) self.assertIs(fp_should_terminate, False) feas_main_results.solver.termination_condition = tc.maxTimeLimit fp_should_terminate = handle_fp_main_tc(feas_main_results, solve_data, config) self.assertIs(fp_should_terminate, True) self.assertIs(solve_data.results.solver.termination_condition, tc.maxTimeLimit) feas_main_results.solver.termination_condition = tc.infeasible fp_should_terminate = handle_fp_main_tc(feas_main_results, solve_data, config) self.assertIs(fp_should_terminate, True) feas_main_results.solver.termination_condition = tc.unbounded fp_should_terminate = handle_fp_main_tc(feas_main_results, solve_data, config) self.assertIs(fp_should_terminate, True) feas_main_results.solver.termination_condition = tc.other feas_main_results.solution.status = SolutionStatus.feasible fp_should_terminate = handle_fp_main_tc(feas_main_results, solve_data, config) self.assertIs(fp_should_terminate, False) feas_main_results.solver.termination_condition = tc.solverFailure fp_should_terminate = handle_fp_main_tc(feas_main_results, solve_data, config) self.assertIs(fp_should_terminate, True) # test generate_norm_constraint fp_nlp = solve_data.working_model.clone() config.fp_main_norm = 'L1' generate_norm_constraint(fp_nlp, solve_data, config) self.assertIsNotNone( fp_nlp.MindtPy_utils.find_component('L1_norm_constraint')) config.fp_main_norm = 'L2' generate_norm_constraint(fp_nlp, solve_data, config) self.assertIsNotNone(fp_nlp.find_component('norm_constraint')) fp_nlp.del_component('norm_constraint') config.fp_main_norm = 'L_infinity' generate_norm_constraint(fp_nlp, solve_data, config) self.assertIsNotNone(fp_nlp.find_component('norm_constraint')) # test set_solver_options config.mip_solver = 'gams' config.threads = 1 opt = SolverFactory(config.mip_solver) set_solver_options(opt, solve_data, config, 'mip', regularization=False) config.mip_solver = 'gurobi' config.mip_regularization_solver = 'gurobi' config.regularization_mip_threads = 1 opt = SolverFactory(config.mip_solver) set_solver_options(opt, solve_data, config, 'mip', regularization=True) config.nlp_solver = 'gams' config.nlp_solver_args['solver'] = 'ipopt' set_solver_options(opt, solve_data, config, 'nlp', regularization=False) config.nlp_solver_args['solver'] = 'ipopth' set_solver_options(opt, solve_data, config, 'nlp', regularization=False) config.nlp_solver_args['solver'] = 'conopt' set_solver_options(opt, solve_data, config, 'nlp', regularization=False) config.nlp_solver_args['solver'] = 'msnlp' set_solver_options(opt, solve_data, config, 'nlp', regularization=False) config.nlp_solver_args['solver'] = 'baron' set_solver_options(opt, solve_data, config, 'nlp', regularization=False) # test algorithm_should_terminate solve_data.should_terminate = True solve_data.UB = float('inf') self.assertIs( algorithm_should_terminate(solve_data, config, check_cycling=False), True) self.assertIs(solve_data.results.solver.termination_condition, tc.noSolution) solve_data.UB = 100 self.assertIs( algorithm_should_terminate(solve_data, config, check_cycling=False), True) self.assertIs(solve_data.results.solver.termination_condition, tc.feasible) solve_data.objective_sense = maximize solve_data.LB = float('-inf') self.assertIs( algorithm_should_terminate(solve_data, config, check_cycling=False), True) self.assertIs(solve_data.results.solver.termination_condition, tc.noSolution) solve_data.LB = 100 self.assertIs( algorithm_should_terminate(solve_data, config, check_cycling=False), True) self.assertIs(solve_data.results.solver.termination_condition, tc.feasible)
def solve_feasibility_subproblem(solve_data, config): """ Solves a feasibility NLP if the fixed_nlp problem is infeasible Parameters ---------- solve_data: MindtPy Data Container data container that holds solve-instance data config: ConfigBlock contains the specific configurations for the algorithm Returns ------- feas_subproblem: Pyomo model feasibility NLP from the model feas_soln: Pyomo results object result from solving the feasibility NLP """ feas_subproblem = solve_data.working_model.clone() add_feas_slacks(feas_subproblem, config) MindtPy = feas_subproblem.MindtPy_utils if MindtPy.find_component('objective_value') is not None: MindtPy.objective_value.value = 0 next(feas_subproblem.component_data_objects(Objective, active=True)).deactivate() for constr in feas_subproblem.MindtPy_utils.nonlinear_constraint_list: constr.deactivate() MindtPy.feas_opt.activate() if config.feasibility_norm == 'L1': MindtPy.feas_obj = Objective(expr=sum( s for s in MindtPy.feas_opt.slack_var[...]), sense=minimize) elif config.feasibility_norm == 'L2': MindtPy.feas_obj = Objective(expr=sum( s * s for s in MindtPy.feas_opt.slack_var[...]), sense=minimize) else: MindtPy.feas_obj = Objective(expr=MindtPy.feas_opt.slack_var, sense=minimize) TransformationFactory('core.fix_integer_vars').apply_to(feas_subproblem) nlpopt = SolverFactory(config.nlp_solver) nlp_args = dict(config.nlp_solver_args) set_solver_options(nlpopt, solve_data, config, solver_type='nlp') with SuppressInfeasibleWarning(): try: with time_code(solve_data.timing, 'feasibility subproblem'): feas_soln = nlpopt.solve(feas_subproblem, tee=config.nlp_solver_tee, **nlp_args) except (ValueError, OverflowError) as error: for nlp_var, orig_val in zip(MindtPy.variable_list, solve_data.initial_var_values): if not nlp_var.fixed and not nlp_var.is_binary(): nlp_var.value = orig_val with time_code(solve_data.timing, 'feasibility subproblem'): feas_soln = nlpopt.solve(feas_subproblem, tee=config.nlp_solver_tee, **nlp_args) subprob_terminate_cond = feas_soln.solver.termination_condition if subprob_terminate_cond in {tc.optimal, tc.locallyOptimal, tc.feasible}: copy_var_list_values( MindtPy.variable_list, solve_data.working_model.MindtPy_utils.variable_list, config) elif subprob_terminate_cond in {tc.infeasible, tc.noSolution}: config.logger.error('Feasibility subproblem infeasible. ' 'This should never happen.') solve_data.should_terminate = True solve_data.results.solver.status = SolverStatus.error return feas_subproblem, feas_soln elif subprob_terminate_cond is tc.maxIterations: config.logger.error( 'Subsolver reached its maximum number of iterations without converging, ' 'consider increasing the iterations limit of the subsolver or reviewing your formulation.' ) solve_data.should_terminate = True solve_data.results.solver.status = SolverStatus.error return feas_subproblem, feas_soln else: config.error( 'MindtPy unable to handle feasibility subproblem termination condition ' 'of {}'.format(subprob_terminate_cond)) solve_data.should_terminate = True solve_data.results.solver.status = SolverStatus.error return feas_subproblem, feas_soln if value(MindtPy.feas_obj.expr) <= config.zero_tolerance: config.logger.warning( 'The objective value %.4E of feasibility problem is less than zero_tolerance. ' 'This indicates that the nlp subproblem is feasible, although it is found infeasible in the previous step. ' 'Check the nlp solver output' % value(MindtPy.feas_obj.expr)) return feas_subproblem, feas_soln
def solve_NLP_feas(solve_data, config): """ Solves a feasibility NLP if the fixed_nlp problem is infeasible Parameters ---------- solve_data: MindtPy Data Container data container that holds solve-instance data config: ConfigBlock contains the specific configurations for the algorithm Returns ------- feas_nlp: Pyomo model feasibility NLP from the model feas_soln: Pyomo results object result from solving the feasibility NLP """ feas_nlp = solve_data.working_model.clone() add_feas_slacks(feas_nlp, config) MindtPy = feas_nlp.MindtPy_utils if MindtPy.find_component('objective_value') is not None: MindtPy.objective_value.value = 0 next(feas_nlp.component_data_objects(Objective, active=True)).deactivate() for constr in feas_nlp.component_data_objects(ctype=Constraint, active=True, descend_into=True): if constr.body.polynomial_degree() not in [0, 1]: constr.deactivate() MindtPy.MindtPy_feas.activate() if config.feasibility_norm == 'L1': MindtPy.MindtPy_feas_obj = Objective(expr=sum( s for s in MindtPy.MindtPy_feas.slack_var[...]), sense=minimize) elif config.feasibility_norm == 'L2': MindtPy.MindtPy_feas_obj = Objective(expr=sum( s * s for s in MindtPy.MindtPy_feas.slack_var[...]), sense=minimize) else: MindtPy.MindtPy_feas_obj = Objective( expr=MindtPy.MindtPy_feas.slack_var, sense=minimize) TransformationFactory('core.fix_integer_vars').apply_to(feas_nlp) with SuppressInfeasibleWarning(): try: nlpopt = SolverFactory(config.nlp_solver) nlp_args = dict(config.nlp_solver_args) elapsed = get_main_elapsed_time(solve_data.timing) remaining = int(max(config.time_limit - elapsed, 1)) if config.nlp_solver == 'gams': nlp_args['add_options'] = nlp_args.get('add_options', []) nlp_args['add_options'].append('option reslim=%s;' % remaining) feas_soln = nlpopt.solve(feas_nlp, tee=config.solver_tee, **nlp_args) except (ValueError, OverflowError) as error: for nlp_var, orig_val in zip(MindtPy.variable_list, solve_data.initial_var_values): if not nlp_var.fixed and not nlp_var.is_binary(): nlp_var.value = orig_val feas_soln = nlpopt.solve(feas_nlp, tee=config.solver_tee, **nlp_args) subprob_terminate_cond = feas_soln.solver.termination_condition if subprob_terminate_cond in {tc.optimal, tc.locallyOptimal, tc.feasible}: copy_var_list_values( MindtPy.variable_list, solve_data.working_model.MindtPy_utils.variable_list, config) elif subprob_terminate_cond is tc.infeasible: raise ValueError('Feasibility NLP infeasible. ' 'This should never happen.') elif subprob_terminate_cond is tc.maxIterations: raise ValueError( 'Subsolver reached its maximum number of iterations without converging, ' 'consider increasing the iterations limit of the subsolver or reviewing your formulation.' ) else: raise ValueError( 'MindtPy unable to handle feasibility NLP termination condition ' 'of {}'.format(subprob_terminate_cond)) var_values = [v.value for v in MindtPy.variable_list] duals = [0 for _ in MindtPy.constraint_list] for i, c in enumerate(MindtPy.constraint_list): rhs = c.upper if c.has_ub() else c.lower c_geq = -1 if c.has_ub() else 1 duals[i] = c_geq * max(0, c_geq * (rhs - value(c.body))) if value(MindtPy.MindtPy_feas_obj.expr) <= config.zero_tolerance: config.logger.warning( "The objective value %.4E of feasibility problem is less than zero_tolerance. " "This indicates that the nlp subproblem is feasible, although it is found infeasible in the previous step. " "Check the nlp solver output" % value(MindtPy.MindtPy_feas_obj.expr)) return feas_nlp, feas_soln