Ejemplo n.º 1
0
    def handle_lazy_subproblem_optimal(self, fixed_nlp, solve_data, config,
                                       opt):
        """This function copies the optimal solution of the fixed NLP subproblem to the MIP
        main problem(explanation see below), updates bound, adds OA and no-good cuts, 
        stores incumbent solution if it has been improved.

        Parameters
        ----------
        fixed_nlp : Pyomo model
            Integer-variable-fixed NLP model.
        solve_data : MindtPySolveData
            Data container that holds solve-instance data.
        config : ConfigBlock
            The specific configurations for MindtPy.
        opt : SolverFactory
            The cplex_persistent solver.
        """
        if config.calculate_dual:
            for c in fixed_nlp.tmp_duals:
                if fixed_nlp.dual.get(c, None) is None:
                    fixed_nlp.dual[c] = fixed_nlp.tmp_duals[c]
            dual_values = list(
                fixed_nlp.dual[c]
                for c in fixed_nlp.MindtPy_utils.constraint_list)
        else:
            dual_values = None
        main_objective = fixed_nlp.MindtPy_utils.objective_list[-1]
        update_primal_bound(solve_data, value(main_objective.expr))
        if solve_data.primal_bound_improved:
            solve_data.best_solution_found = fixed_nlp.clone()
            solve_data.best_solution_found_time = get_main_elapsed_time(
                solve_data.timing)
            if config.add_no_good_cuts or config.use_tabu_list:
                solve_data.stored_bound.update(
                    {solve_data.primal_bound: solve_data.dual_bound})
        config.logger.info(
            solve_data.fixed_nlp_log_formatter.format(
                '*' if solve_data.primal_bound_improved else ' ',
                solve_data.nlp_iter, 'Fixed NLP', value(main_objective.expr),
                solve_data.primal_bound, solve_data.dual_bound,
                solve_data.rel_gap, get_main_elapsed_time(solve_data.timing)))

        # In OA algorithm, OA cuts are generated based on the solution of the subproblem
        # We need to first copy the value of variables from the subproblem and then add cuts
        # since value(constr.body), value(jacs[constr][var]), value(var) are used in self.add_lazy_oa_cuts()
        copy_var_list_values(fixed_nlp.MindtPy_utils.variable_list,
                             solve_data.mip.MindtPy_utils.variable_list,
                             config)
        if config.strategy == 'OA':
            self.add_lazy_oa_cuts(solve_data.mip, dual_values, solve_data,
                                  config, opt)
            if config.add_regularization is not None:
                add_oa_cuts(solve_data.mip, dual_values, solve_data, config)
        elif config.strategy == 'GOA':
            self.add_lazy_affine_cuts(solve_data, config, opt)
        if config.add_no_good_cuts:
            var_values = list(v.value
                              for v in fixed_nlp.MindtPy_utils.variable_list)
            self.add_lazy_no_good_cuts(var_values, solve_data, config, opt)
Ejemplo n.º 2
0
def algorithm_should_terminate(solve_data, config):
    """Check if the algorithm should terminate.

    Termination conditions based on solver options and progress.

    """
    # Check bound convergence
    if solve_data.LB + config.bound_tolerance >= solve_data.UB:
        config.logger.info(
            'GDPopt exiting on bound convergence. '
            'LB: {:.10g} + (tol {:.10g}) >= UB: {:.10g}'.format(
                solve_data.LB, config.bound_tolerance, solve_data.UB))
        if solve_data.LB == float('inf') and solve_data.UB == float('inf'):
            solve_data.results.solver.termination_condition = tc.infeasible
        elif solve_data.LB == float('-inf') and solve_data.UB == float('-inf'):
            solve_data.results.solver.termination_condition = tc.infeasible
        else:
            solve_data.results.solver.termination_condition = tc.optimal
        return True

    # Check iteration limit
    if solve_data.master_iteration >= config.iterlim:
        config.logger.info(
            'GDPopt unable to converge bounds '
            'after %s master iterations.'
            % (solve_data.master_iteration,))
        config.logger.info(
            'Final bound values: LB: {:.10g}  UB: {:.10g}'.format(
                solve_data.LB, solve_data.UB))
        solve_data.results.solver.termination_condition = tc.maxIterations
        return True

    # Check time limit
    if get_main_elapsed_time(solve_data.timing) >= config.time_limit:
        config.logger.info(
            'GDPopt unable to converge bounds '
            'before time limit of {} seconds. '
            'Elapsed: {} seconds'
            .format(config.time_limit, get_main_elapsed_time(solve_data.timing)))
        config.logger.info(
            'Final bound values: LB: {}  UB: {}'.
            format(solve_data.LB, solve_data.UB))
        solve_data.results.solver.termination_condition = tc.maxTimeLimit
        return True

    if not algorithm_is_making_progress(solve_data, config):
        config.logger.debug(
            'Algorithm is not making enough progress. '
            'Exiting iteration loop.')
        solve_data.results.solver.termination_condition = tc.locallyOptimal
        return True
    return False
Ejemplo n.º 3
0
def algorithm_should_terminate(solve_data, config):
    """Check if the algorithm should terminate.

    Termination conditions based on solver options and progress.

    """
    # Check bound convergence
    if solve_data.LB + config.bound_tolerance >= solve_data.UB:
        config.logger.info('GDPopt exiting on bound convergence. '
                           'LB: {:.10g} + (tol {:.10g}) >= UB: {:.10g}'.format(
                               solve_data.LB, config.bound_tolerance,
                               solve_data.UB))
        if solve_data.LB == float('inf') and solve_data.UB == float('inf'):
            solve_data.results.solver.termination_condition = tc.infeasible
        elif solve_data.LB == float('-inf') and solve_data.UB == float('-inf'):
            solve_data.results.solver.termination_condition = tc.infeasible
        else:
            solve_data.results.solver.termination_condition = tc.optimal
        return True

    # Check iteration limit
    if solve_data.master_iteration >= config.iterlim:
        config.logger.info('GDPopt unable to converge bounds '
                           'after %s master iterations.' %
                           (solve_data.master_iteration, ))
        config.logger.info(
            'Final bound values: LB: {:.10g}  UB: {:.10g}'.format(
                solve_data.LB, solve_data.UB))
        solve_data.results.solver.termination_condition = tc.maxIterations
        return True

    # Check time limit
    if get_main_elapsed_time(solve_data.timing) >= config.time_limit:
        config.logger.info('GDPopt unable to converge bounds '
                           'before time limit of {} seconds. '
                           'Elapsed: {} seconds'.format(
                               config.time_limit,
                               get_main_elapsed_time(solve_data.timing)))
        config.logger.info('Final bound values: LB: {}  UB: {}'.format(
            solve_data.LB, solve_data.UB))
        solve_data.results.solver.termination_condition = tc.maxTimeLimit
        return True

    if not algorithm_is_making_progress(solve_data, config):
        config.logger.debug('Algorithm is not making enough progress. '
                            'Exiting iteration loop.')
        solve_data.results.solver.termination_condition = tc.locallyOptimal
        return True
    return False
Ejemplo n.º 4
0
def handle_main_max_timelimit(main_mip, main_mip_results, solve_data, config):
    """This function handles the result of the latest iteration of solving the MIP problem
    given that solving the MIP takes too long.

    Parameters
    ----------
    main_mip : Pyomo model
        The MIP main problem.
    main_mip_results : [type]
        Results from solving the MIP main subproblem.
    solve_data : MindtPySolveData
        Data container that holds solve-instance data.
    config : ConfigBlock
        The specific configurations for MindtPy.
    """
    # TODO if we have found a valid feasible solution, we take that, if not, we can at least use the dual bound
    MindtPy = main_mip.MindtPy_utils
    config.logger.info('Unable to optimize MILP main problem '
                       'within time limit. '
                       'Using current solver feasible solution.')
    copy_var_list_values(main_mip.MindtPy_utils.variable_list,
                         solve_data.working_model.MindtPy_utils.variable_list,
                         config)
    update_suboptimal_dual_bound(solve_data, main_mip_results)
    config.logger.info(
        solve_data.log_formatter.format(
            solve_data.mip_iter, 'MILP', value(MindtPy.mip_obj.expr),
            solve_data.LB, solve_data.UB, solve_data.rel_gap,
            get_main_elapsed_time(solve_data.timing)))
Ejemplo n.º 5
0
    def handle_lazy_main_feasible_solution(self, main_mip, solve_data, config,
                                           opt):
        """This function is called during the branch and bound of main mip, more 
        exactly when a feasible solution is found and LazyCallback is activated.
        Copy the result to working model and update upper or lower bound.
        In LP-NLP, upper or lower bound are updated during solving the main problem.

        Parameters
        ----------
        main_mip : Pyomo model
            The MIP main problem.
        solve_data : MindtPySolveData
            Data container that holds solve-instance data.
        config : ConfigBlock
            The specific configurations for MindtPy.
        opt : SolverFactory
            The cplex_persistent solver.
        """
        # proceed. Just need integer values

        # this value copy is useful since we need to fix subproblem based on the solution of the main problem
        self.copy_lazy_var_list_values(
            opt, main_mip.MindtPy_utils.variable_list,
            solve_data.working_model.MindtPy_utils.variable_list, config)
        update_dual_bound(solve_data, self.get_best_objective_value())
        config.logger.info(
            solve_data.log_formatter.format(
                solve_data.mip_iter, 'restrLP', self.get_objective_value(),
                solve_data.primal_bound, solve_data.dual_bound,
                solve_data.rel_gap, get_main_elapsed_time(solve_data.timing)))
Ejemplo n.º 6
0
def handle_lazy_main_feasible_solution_gurobi(cb_m, cb_opt, solve_data,
                                              config):
    """This function is called during the branch and bound of main MIP problem, 
    more exactly when a feasible solution is found and LazyCallback is activated.

    Copy the solution to working model and update upper or lower bound.
    In LP-NLP, upper or lower bound are updated during solving the main problem.

    Parameters
    ----------
    cb_m : Pyomo model
        The MIP main problem.
    cb_opt : SolverFactory
        The gurobi_persistent solver.
    solve_data : MindtPySolveData
        Data container that holds solve-instance data.
    config : ConfigBlock
        The specific configurations for MindtPy.
    """
    # proceed. Just need integer values
    cb_opt.cbGetSolution(vars=cb_m.MindtPy_utils.variable_list)
    # this value copy is useful since we need to fix subproblem based on the solution of the main problem
    copy_var_list_values(cb_m.MindtPy_utils.variable_list,
                         solve_data.working_model.MindtPy_utils.variable_list,
                         config)
    update_dual_bound(solve_data,
                      cb_opt.cbGet(gurobipy.GRB.Callback.MIPSOL_OBJBND))
    config.logger.info(
        solve_data.log_formatter.format(
            solve_data.mip_iter, 'restrLP',
            cb_opt.cbGet(gurobipy.GRB.Callback.MIPSOL_OBJ),
            solve_data.primal_bound, solve_data.dual_bound, solve_data.rel_gap,
            get_main_elapsed_time(solve_data.timing)))
Ejemplo n.º 7
0
def update_primal_bound(solve_data, bound_value):
    """Update the primal bound.

    Call after solve fixed NLP subproblem.
    Use the optimal primal bound of the relaxed problem to update the dual bound.

    Parameters
    ----------
    solve_data : MindtPySolveData
        Data container that holds solve-instance data.
    bound_value : float
        The input value used to update the primal bound.
    """
    if math.isnan(bound_value):
        return
    if solve_data.objective_sense == minimize:
        solve_data.primal_bound = min(bound_value, solve_data.primal_bound)
        solve_data.primal_bound_improved = solve_data.primal_bound < solve_data.primal_bound_progress[
            -1]
    else:
        solve_data.primal_bound = max(bound_value, solve_data.primal_bound)
        solve_data.primal_bound_improved = solve_data.primal_bound > solve_data.primal_bound_progress[
            -1]
    solve_data.primal_bound_progress.append(solve_data.primal_bound)
    solve_data.primal_bound_progress_time.append(
        get_main_elapsed_time(solve_data.timing))
    if solve_data.primal_bound_improved:
        update_gap(solve_data)
Ejemplo n.º 8
0
def handle_main_optimal(main_mip, solve_data, config, update_bound=True):
    """This function copies the results from 'solve_main' to the working model and updates the upper/lower bound. This
    function is called after an optimal solution is found for the main problem.

    Args:
        main_mip (Pyomo model): the MIP main problem.
        solve_data (MindtPySolveData): data container that holds solve-instance data.
        config (ConfigBlock): the specific configurations for MindtPy.
        update_bound (bool, optional): whether update the bound. Bound will not be updated when handle regularization problem. Defaults to True.
    """
    # proceed. Just need integer values
    MindtPy = main_mip.MindtPy_utils
    # check if the value of binary variable is valid
    for var in MindtPy.discrete_variable_list:
        if var.value is None:
            config.logger.warning(
                f"Integer variable {var.name} not initialized.  "
                "Setting it to its lower bound")
            var.set_value(var.lb, skip_validation=True)  # nlp_var.bounds[0]
    # warm start for the nlp subproblem
    copy_var_list_values(main_mip.MindtPy_utils.variable_list,
                         solve_data.working_model.MindtPy_utils.variable_list,
                         config)

    if update_bound:
        update_dual_bound(solve_data, value(MindtPy.mip_obj.expr))
        config.logger.info(
            solve_data.log_formatter.format(
                solve_data.mip_iter, 'MILP', value(MindtPy.mip_obj.expr),
                solve_data.LB, solve_data.UB, solve_data.rel_gap,
                get_main_elapsed_time(solve_data.timing)))
Ejemplo n.º 9
0
def algorithm_should_terminate(solve_data, config):
    """Check if the algorithm should terminate.

    Termination conditions based on solver options and progress.
    Sets the solve_data.results.solver.termination_condition to the appropriate
    condition, i.e. optimal, maxIterations, maxTimeLimit

    """
    # Check bound convergence
    if solve_data.LB + config.bound_tolerance >= solve_data.UB:
        config.logger.info(
            'MindtPy exiting on bound convergence. '
            'LB: {} + (tol {}) >= UB: {}\n'.format(
                solve_data.LB, config.bound_tolerance, solve_data.UB))
        solve_data.results.solver.termination_condition = tc.optimal
        return True

    # Check iteration limit
    if solve_data.mip_iter >= config.iteration_limit:
        config.logger.info(
            'MindtPy unable to converge bounds '
            'after {} master iterations.'.format(solve_data.mip_iter))
        config.logger.info(
            'Final bound values: LB: {}  UB: {}'.
            format(solve_data.LB, solve_data.UB))
        solve_data.results.solver.termination_condition = tc.maxIterations
        return True

    # Check time limit
    if get_main_elapsed_time(solve_data.timing) >= config.time_limit:
        config.logger.info(
            'MindtPy unable to converge bounds '
            'before time limit of {} seconds. '
            'Elapsed: {} seconds'
            .format(config.time_limit, get_main_elapsed_time(solve_data.timing)))
        config.logger.info(
            'Final bound values: LB: {}  UB: {}'.
            format(solve_data.LB, solve_data.UB))
        solve_data.results.solver.termination_condition = tc.maxTimeLimit
        return True
    # if not algorithm_is_making_progress(solve_data, config):
    #     config.logger.debug(
    #         'Algorithm is not making enough progress. '
    #         'Exiting iteration loop.')
    #     return True
    return False
Ejemplo n.º 10
0
def algorithm_should_terminate(solve_data, config):
    """Check if the algorithm should terminate.

    Termination conditions based on solver options and progress.

    """
    # Check bound convergence
    if solve_data.LB + config.bound_tolerance >= solve_data.UB:
        config.logger.info(
            'MindtPy exiting on bound convergence. '
            'LB: {} + (tol {}) >= UB: {}\n'.format(
                solve_data.LB, config.bound_tolerance, solve_data.UB))
        solve_data.results.solver.termination_condition = tc.optimal
        return True

    # Check iteration limit
    if solve_data.mip_iter >= config.iteration_limit:
        config.logger.info(
            'MindtPy unable to converge bounds '
            'after {} master iterations.'.format(solve_data.mip_iter))
        config.logger.info(
            'Final bound values: LB: {}  UB: {}'.
            format(solve_data.LB, solve_data.UB))
        solve_data.results.solver.termination_condition = tc.maxIterations
        return True

    # Check time limit
    if get_main_elapsed_time(solve_data.timing) >= config.time_limit:
        config.logger.info(
            'MindtPy unable to converge bounds '
            'before time limit of {} seconds. '
            'Elapsed: {} seconds'
            .format(config.time_limit, get_main_elapsed_time(solve_data.timing)))
        config.logger.info(
            'Final bound values: LB: {}  UB: {}'.
            format(solve_data.LB, solve_data.UB))
        solve_data.results.solver.termination_condition = tc.maxTimeLimit
        return True
    # if not algorithm_is_making_progress(solve_data, config):
    #     config.logger.debug(
    #         'Algorithm is not making enough progress. '
    #         'Exiting iteration loop.')
    #     return True
    return False
Ejemplo n.º 11
0
def handle_main_other_conditions(main_mip, main_mip_results, solve_data,
                                 config):
    """This function handles the result of the latest iteration of solving the MIP problem (given any of a few
    edge conditions, such as if the solution is neither infeasible nor optimal).

    Parameters
    ----------
    main_mip : Pyomo model
        The MIP main problem.
    main_mip_results : SolverResults
        Results from solving the MIP problem.
    solve_data : MindtPySolveData
        Data container that holds solve-instance data.
    config : ConfigBlock
        The specific configurations for MindtPy.

    Raises
    ------
    ValueError
        MindtPy unable to handle MILP main termination condition.
    """
    if main_mip_results.solver.termination_condition is tc.infeasible:
        handle_main_infeasible(main_mip, solve_data, config)
    elif main_mip_results.solver.termination_condition is tc.unbounded:
        temp_results = handle_main_unbounded(main_mip, solve_data, config)
    elif main_mip_results.solver.termination_condition is tc.infeasibleOrUnbounded:
        temp_results = handle_main_unbounded(main_mip, solve_data, config)
        if temp_results.solver.termination_condition is tc.infeasible:
            handle_main_infeasible(main_mip, solve_data, config)
    elif main_mip_results.solver.termination_condition is tc.maxTimeLimit:
        handle_main_max_timelimit(main_mip, main_mip_results, solve_data,
                                  config)
        solve_data.results.solver.termination_condition = tc.maxTimeLimit
    elif (main_mip_results.solver.termination_condition is tc.other
          and main_mip_results.solution.status is SolutionStatus.feasible):
        # load the solution and suppress the warning message by setting
        # solver status to ok.
        MindtPy = main_mip.MindtPy_utils
        config.logger.info('MILP solver reported feasible solution, '
                           'but not guaranteed to be optimal.')
        copy_var_list_values(
            main_mip.MindtPy_utils.variable_list,
            solve_data.working_model.MindtPy_utils.variable_list, config)
        update_suboptimal_dual_bound(solve_data, main_mip_results)
        config.logger.info(
            solve_data.log_formatter.format(
                solve_data.mip_iter, 'MILP', value(MindtPy.mip_obj.expr),
                solve_data.LB, solve_data.UB, solve_data.rel_gap,
                get_main_elapsed_time(solve_data.timing)))
    else:
        raise ValueError(
            'MindtPy unable to handle MILP main termination condition '
            'of %s. Solver message: %s' %
            (main_mip_results.solver.termination_condition,
             main_mip_results.solver.message))
Ejemplo n.º 12
0
def solve_linear_subproblem(mip_model, solve_data, config):
    GDPopt = mip_model.GDPopt_utils

    initialize_subproblem(mip_model, solve_data)

    # Callback immediately before solving NLP subproblem
    config.call_before_subproblem_solve(mip_model, solve_data)

    mip_solver = SolverFactory(config.mip_solver)
    if not mip_solver.available():
        raise RuntimeError("MIP solver %s is not available." %
                           config.mip_solver)
    with SuppressInfeasibleWarning():
        mip_args = dict(config.mip_solver_args)
        elapsed = get_main_elapsed_time(solve_data.timing)
        remaining = max(config.time_limit - elapsed, 1)
        if config.mip_solver == 'gams':
            mip_args['add_options'] = mip_args.get('add_options', [])
            mip_args['add_options'].append('option reslim=%s;' % remaining)
        elif config.mip_solver == 'multisolve':
            mip_args['time_limit'] = min(
                mip_args.get('time_limit', float('inf')), remaining)
        results = mip_solver.solve(mip_model, **mip_args)

    subprob_result = SubproblemResult()
    subprob_result.feasible = True
    subprob_result.var_values = list(v.value for v in GDPopt.variable_list)
    subprob_result.pyomo_results = results
    subprob_result.dual_values = list(
        mip_model.dual.get(c, None) for c in GDPopt.constraint_list)

    subprob_terminate_cond = results.solver.termination_condition
    if subprob_terminate_cond is tc.optimal:
        pass
    elif subprob_terminate_cond is tc.infeasible:
        config.logger.info('MIP subproblem was infeasible.')
        subprob_result.feasible = False
    else:
        raise ValueError('GDPopt unable to handle MIP subproblem termination '
                         'condition of %s. Results: %s' %
                         (subprob_terminate_cond, results))

    # Call the NLP post-solve callback
    config.call_after_subproblem_solve(mip_model, solve_data)

    # if feasible, call the NLP post-feasible callback
    if subprob_result.feasible:
        config.call_after_subproblem_feasible(mip_model, solve_data)

    return subprob_result
Ejemplo n.º 13
0
def handle_fp_main_tc(feas_main_results, solve_data, config):
    """Handle the termination condition of the feasibility pump main problem.

    Parameters
    ----------
    feas_main_results : SolverResults
        The results from solving the FP main problem.
    solve_data : MindtPySolveData
        Data container that holds solve-instance data.
    config : ConfigBlock
        The specific configurations for MindtPy.

    Returns
    -------
    bool
        True if FP loop should terminate, False otherwise.
    """
    if feas_main_results.solver.termination_condition is tc.optimal:
        config.logger.info(
            solve_data.log_formatter.format(
                solve_data.fp_iter, 'FP-MIP',
                value(solve_data.mip.MindtPy_utils.fp_mip_obj), solve_data.LB,
                solve_data.UB, solve_data.rel_gap,
                get_main_elapsed_time(solve_data.timing)))
        return False
    elif feas_main_results.solver.termination_condition is tc.maxTimeLimit:
        config.logger.warning('FP-MIP reaches max TimeLimit')
        solve_data.results.solver.termination_condition = tc.maxTimeLimit
        return True
    elif feas_main_results.solver.termination_condition is tc.infeasible:
        config.logger.warning('FP-MIP infeasible')
        no_good_cuts = solve_data.mip.MindtPy_utils.cuts.no_good_cuts
        if no_good_cuts.__len__() > 0:
            no_good_cuts[no_good_cuts.__len__()].deactivate()
        return True
    elif feas_main_results.solver.termination_condition is tc.unbounded:
        config.logger.warning('FP-MIP unbounded')
        return True
    elif (feas_main_results.solver.termination_condition is tc.other
          and feas_main_results.solution.status is SolutionStatus.feasible):
        config.logger.warning(
            'MILP solver reported feasible solution of FP-MIP, '
            'but not guaranteed to be optimal.')
        return False
    else:
        config.logger.warning('Unexpected result of FP-MIP')
        return True
Ejemplo n.º 14
0
def handle_main_optimal(main_mip, solve_data, config, update_bound=True):
    """
    This function copies the result from 'solve_main' to the working model and updates the upper/lower bound. This
    function is called after an optimal solution is found for the main problem.

    Parameters
    ----------
    main_mip: Pyomo model
        the MIP main problem
    solve_data: MindtPy Data Container
        data container that holds solve-instance data
    config: ConfigBlock
        contains the specific configurations for the algorithm
    """
    # proceed. Just need integer values
    MindtPy = main_mip.MindtPy_utils
    # check if the value of binary variable is valid
    for var in MindtPy.discrete_variable_list:
        if var.value is None:
            config.logger.warning(
                "Integer variable {} not initialized. It is set to it's lower bound"
                .format(var.name))
            var.value = var.lb  # nlp_var.bounds[0]
    # warm start for the nlp subproblem
    copy_var_list_values(main_mip.MindtPy_utils.variable_list,
                         solve_data.working_model.MindtPy_utils.variable_list,
                         config)

    if update_bound:
        if solve_data.objective_sense == minimize:
            solve_data.LB = max(value(MindtPy.mip_obj.expr), solve_data.LB)
            solve_data.bound_improved = solve_data.LB > solve_data.LB_progress[
                -1]
            solve_data.LB_progress.append(solve_data.LB)
        else:
            solve_data.UB = min(value(MindtPy.mip_obj.expr), solve_data.UB)
            solve_data.bound_improved = solve_data.UB < solve_data.UB_progress[
                -1]
            solve_data.UB_progress.append(solve_data.UB)
        config.logger.info(
            'MIP %s: OBJ: %s  LB: %s  UB: %s  TIME: %ss' %
            (solve_data.mip_iter, value(
                MindtPy.mip_obj.expr), solve_data.LB, solve_data.UB,
             round(get_main_elapsed_time(solve_data.timing), 2)))
Ejemplo n.º 15
0
    def handle_lazy_main_feasible_solution(self, main_mip, solve_data, config,
                                           opt):
        """ This function is called during the branch and bound of main mip, more exactly when a feasible solution is found and LazyCallback is activated.
        Copy the result to working model and update upper or lower bound.
        In LP-NLP, upper or lower bound are updated during solving the main problem

        Parameters
        ----------
        main_mip: Pyomo model
            the MIP main problem
        solve_data: MindtPy Data Container
            data container that holds solve-instance data
        config: ConfigBlock
            contains the specific configurations for the algorithm
        opt: SolverFactory
            the mip solver
        """
        # proceed. Just need integer values

        # this value copy is useful since we need to fix subproblem based on the solution of the main problem
        self.copy_lazy_var_list_values(
            opt, main_mip.MindtPy_utils.variable_list,
            solve_data.working_model.MindtPy_utils.variable_list, config)
        if solve_data.objective_sense == minimize:
            solve_data.LB = max(self.get_best_objective_value(), solve_data.LB)
            solve_data.bound_improved = solve_data.LB > solve_data.LB_progress[
                -1]
            solve_data.LB_progress.append(solve_data.LB)
        else:
            solve_data.UB = min(self.get_best_objective_value(), solve_data.UB)
            solve_data.bound_improved = solve_data.UB < solve_data.UB_progress[
                -1]
            solve_data.UB_progress.append(solve_data.UB)
        config.logger.info(
            'MIP %s: OBJ (at current node): %s  Bound: %s  LB: %s  UB: %s  TIME: %s'
            % (solve_data.mip_iter, self.get_objective_value(),
               self.get_best_objective_value(), solve_data.LB, solve_data.UB,
               round(get_main_elapsed_time(solve_data.timing), 2)))
Ejemplo n.º 16
0
def handle_main_max_timelimit(main_mip, main_mip_results, solve_data, config):
    """This function handles the result of the latest iteration of solving the MIP problem given that solving the
    MIP takes too long.

    Args:
        main_mip (Pyomo model): the MIP main problem.
        main_mip_results (SolverResults): results from solving the MIP main subproblem.
        solve_data (MindtPySolveData): data container that holds solve-instance data.
        config (ConfigBlock): the specific configurations for MindtPy.
    """
    # TODO check that status is actually ok and everything is feasible
    MindtPy = main_mip.MindtPy_utils
    config.logger.info('Unable to optimize MILP main problem '
                       'within time limit. '
                       'Using current solver feasible solution.')
    copy_var_list_values(main_mip.MindtPy_utils.variable_list,
                         solve_data.working_model.MindtPy_utils.variable_list,
                         config)
    uptade_suboptimal_dual_bound(solve_data, main_mip_results)
    config.logger.info(
        solve_data.log_formatter.format(
            solve_data.mip_iter, 'MILP', value(MindtPy.mip_obj.expr),
            solve_data.LB, solve_data.UB, solve_data.rel_gap,
            get_main_elapsed_time(solve_data.timing)))
Ejemplo n.º 17
0
def init_max_binaries(solve_data, config):
    """Modifies model by maximizing the number of activated binary variables.

    Note - The user would usually want to call solve_subproblem after an invocation 
    of this function.

    Parameters
    ----------
    solve_data : MindtPySolveData
        Data container that holds solve-instance data.
    config : ConfigBlock
        The specific configurations for MindtPy.

    Raises
    ------
    ValueError
        MILP main problem is infeasible.
    ValueError
        MindtPy unable to handle the termination condition of the MILP main problem.
    """
    m = solve_data.working_model.clone()
    if config.calculate_dual:
        m.dual.deactivate()
    MindtPy = m.MindtPy_utils
    solve_data.mip_subiter += 1
    config.logger.debug('Initialization: maximize value of binaries')
    for c in MindtPy.nonlinear_constraint_list:
        c.deactivate()
    objective = next(m.component_data_objects(Objective, active=True))
    objective.deactivate()
    binary_vars = (v for v in m.MindtPy_utils.discrete_variable_list
                   if v.is_binary() and not v.fixed)
    MindtPy.max_binary_obj = Objective(expr=sum(v for v in binary_vars),
                                       sense=maximize)

    getattr(m, 'ipopt_zL_out', _DoNothing()).deactivate()
    getattr(m, 'ipopt_zU_out', _DoNothing()).deactivate()

    mipopt = SolverFactory(config.mip_solver)
    if isinstance(mipopt, PersistentSolver):
        mipopt.set_instance(m)
    mip_args = dict(config.mip_solver_args)
    set_solver_options(mipopt, solve_data, config, solver_type='mip')
    results = mipopt.solve(m, tee=config.mip_solver_tee, **mip_args)

    solve_terminate_cond = results.solver.termination_condition
    if solve_terminate_cond is tc.optimal:
        copy_var_list_values(
            MindtPy.variable_list,
            solve_data.working_model.MindtPy_utils.variable_list, config)
        config.logger.info(
            solve_data.log_formatter.format(
                '-', 'Max binary MILP', value(MindtPy.max_binary_obj.expr),
                solve_data.LB, solve_data.UB, solve_data.rel_gap,
                get_main_elapsed_time(solve_data.timing)))
    elif solve_terminate_cond is tc.infeasible:
        raise ValueError('MILP main problem is infeasible. '
                         'Problem may have no more feasible '
                         'binary configurations.')
    elif solve_terminate_cond is tc.maxTimeLimit:
        config.logger.info(
            'NLP subproblem failed to converge within time limit.')
        solve_data.results.solver.termination_condition = tc.maxTimeLimit
    elif solve_terminate_cond is tc.maxIterations:
        config.logger.info(
            'NLP subproblem failed to converge within iteration limit.')
    else:
        raise ValueError(
            'MindtPy unable to handle MILP main termination condition '
            'of %s. Solver message: %s' %
            (solve_terminate_cond, results.solver.message))
Ejemplo n.º 18
0
def solve_MINLP(model, solve_data, config):
    """Solve the MINLP subproblem."""
    config.logger.info(
        "Solving MINLP subproblem for fixed logical realizations.")

    GDPopt = model.GDPopt_utils

    initialize_subproblem(model, solve_data)

    # Callback immediately before solving MINLP subproblem
    config.call_before_subproblem_solve(model, solve_data)

    minlp_solver = SolverFactory(config.minlp_solver)
    if not minlp_solver.available():
        raise RuntimeError("MINLP solver %s is not available." %
                           config.minlp_solver)
    with SuppressInfeasibleWarning():
        minlp_args = dict(config.minlp_solver_args)
        elapsed = get_main_elapsed_time(solve_data.timing)
        remaining = max(config.time_limit - elapsed, 1)
        if config.minlp_solver == 'gams':
            minlp_args['add_options'] = minlp_args.get('add_options', [])
            minlp_args['add_options'].append('option reslim=%s;' % remaining)
        elif config.minlp_solver == 'multisolve':
            minlp_args['time_limit'] = min(
                minlp_args.get('time_limit', float('inf')), remaining)
        results = minlp_solver.solve(model, **minlp_args)

    subprob_result = SubproblemResult()
    subprob_result.feasible = True
    subprob_result.var_values = list(v.value for v in GDPopt.variable_list)
    subprob_result.pyomo_results = results
    subprob_result.dual_values = list(
        model.dual.get(c, None) for c in GDPopt.constraint_list)

    term_cond = results.solver.termination_condition
    if any(term_cond == cond
           for cond in (tc.optimal, tc.locallyOptimal, tc.feasible)):
        pass
    elif term_cond == tc.infeasible:
        config.logger.info('MINLP subproblem was infeasible.')
        subprob_result.feasible = False
    elif term_cond == tc.maxIterations:
        # TODO try something else? Reinitialize with different initial
        # value?
        config.logger.info(
            'MINLP subproblem failed to converge within iteration limit.')
        if is_feasible(model, config):
            config.logger.info(
                'MINLP solution is still feasible. '
                'Using potentially suboptimal feasible solution.')
        else:
            subprob_result.feasible = False
    elif term_cond == tc.maxTimeLimit:
        config.logger.info(
            'MINLP subproblem failed to converge within time limit.')
        if is_feasible(model, config):
            config.logger.info(
                'MINLP solution is still feasible. '
                'Using potentially suboptimal feasible solution.')
        else:
            subprob_result.feasible = False
    elif term_cond == tc.intermediateNonInteger:
        config.logger.info(
            "MINLP solver could not find feasible integer solution: %s" %
            results.solver.message)
        subprob_result.feasible = False
    else:
        raise ValueError(
            'GDPopt unable to handle MINLP subproblem termination '
            'condition of %s. Results: %s' % (term_cond, results))

    # Call the subproblem post-solve callback
    config.call_after_subproblem_solve(model, solve_data)

    # if feasible, call the subproblem post-feasible callback
    if subprob_result.feasible:
        config.call_after_subproblem_feasible(model, solve_data)

    return subprob_result
Ejemplo n.º 19
0
def _perform_branch_and_bound(solve_data):
    solve_data.explored_nodes = 0
    root_node = solve_data.working_model
    root_util_blk = root_node.GDPopt_utils
    config = solve_data.config

    # Map unfixed disjunct -> list of deactivated constraints
    root_util_blk.disjunct_to_nonlinear_constraints = ComponentMap()
    # Map relaxed disjunctions -> list of unfixed disjuncts
    root_util_blk.disjunction_to_unfixed_disjuncts = ComponentMap()

    # Preprocess the active disjunctions
    for disjunction in root_util_blk.disjunction_list:
        assert disjunction.active

        disjuncts_fixed_True = []
        disjuncts_fixed_False = []
        unfixed_disjuncts = []

        # categorize the disjuncts in the disjunction
        for disjunct in disjunction.disjuncts:
            if disjunct.indicator_var.fixed:
                if disjunct.indicator_var.value == 1:
                    disjuncts_fixed_True.append(disjunct)
                elif disjunct.indicator_var.value == 0:
                    disjuncts_fixed_False.append(disjunct)
                else:
                    pass  # raise error for fractional value?
            else:
                unfixed_disjuncts.append(disjunct)

        # update disjunct lists for predetermined disjunctions
        if len(disjuncts_fixed_False) == len(disjunction.disjuncts) - 1:
            # all but one disjunct in the disjunction is fixed to False.
            # Remaining one must be true. If not already fixed to True, do so.
            if not disjuncts_fixed_True:
                disjuncts_fixed_True = unfixed_disjuncts
                unfixed_disjuncts = []
                disjuncts_fixed_True[0].indicator_var.fix(1)
        elif disjuncts_fixed_True and disjunction.xor:
            assert len(
                disjuncts_fixed_True
            ) == 1, "XOR (only one True) violated: %s" % disjunction.name
            disjuncts_fixed_False.extend(unfixed_disjuncts)
            unfixed_disjuncts = []

        # Make sure disjuncts fixed to False are properly deactivated.
        for disjunct in disjuncts_fixed_False:
            disjunct.deactivate()

        # Deactivate nonlinear constraints in unfixed disjuncts
        for disjunct in unfixed_disjuncts:
            nonlinear_constraints_in_disjunct = [
                constr
                for constr in disjunct.component_data_objects(Constraint,
                                                              active=True)
                if constr.body.polynomial_degree() not in _linear_degrees
            ]
            for constraint in nonlinear_constraints_in_disjunct:
                constraint.deactivate()
            if nonlinear_constraints_in_disjunct:
                # TODO might be worthwhile to log number of nonlinear constraints in each disjunction
                # for later branching purposes
                root_util_blk.disjunct_to_nonlinear_constraints[
                    disjunct] = nonlinear_constraints_in_disjunct

        root_util_blk.disjunction_to_unfixed_disjuncts[
            disjunction] = unfixed_disjuncts
        pass

    # Add the BigM suffix if it does not already exist. Used later during nonlinear constraint activation.
    # TODO is this still necessary?
    if not hasattr(root_node, 'BigM'):
        root_node.BigM = Suffix()

    # Set up the priority queue
    queue = solve_data.bb_queue = []
    solve_data.created_nodes = 0
    unbranched_disjunction_indices = [
        i for i, disjunction in enumerate(root_util_blk.disjunction_list)
        if disjunction in root_util_blk.disjunction_to_unfixed_disjuncts
    ]
    sort_tuple = BBNodeData(
        obj_lb=float('-inf'),
        obj_ub=float('inf'),
        is_screened=False,
        is_evaluated=False,
        num_unbranched_disjunctions=len(unbranched_disjunction_indices),
        node_count=0,
        unbranched_disjunction_indices=unbranched_disjunction_indices,
    )
    heappush(queue, (sort_tuple, root_node))

    # Do the branch and bound
    while len(queue) > 0:
        # visit the top node on the heap
        # from pprint import pprint
        # pprint([(
        #     x[0].node_count, x[0].obj_lb, x[0].obj_ub, x[0].num_unbranched_disjunctions
        # ) for x in sorted(queue)])
        node_data, node_model = heappop(queue)
        config.logger.info("Nodes: %s LB %.10g Unbranched %s" %
                           (solve_data.explored_nodes, node_data.obj_lb,
                            node_data.num_unbranched_disjunctions))

        # Check time limit
        elapsed = get_main_elapsed_time(solve_data.timing)
        if elapsed >= config.time_limit:
            config.logger.info('GDPopt-LBB unable to converge bounds '
                               'before time limit of {} seconds. '
                               'Elapsed: {} seconds'.format(
                                   config.time_limit, elapsed))
            no_feasible_soln = float('inf')
            solve_data.LB = node_data.obj_lb if solve_data.objective_sense == minimize else -no_feasible_soln
            solve_data.UB = no_feasible_soln if solve_data.objective_sense == minimize else -node_data.obj_lb
            config.logger.info('Final bound values: LB: {}  UB: {}'.format(
                solve_data.LB, solve_data.UB))
            solve_data.results.solver.termination_condition = tc.maxTimeLimit
            return True

        # Handle current node
        if not node_data.is_screened:
            # Node has not been evaluated.
            solve_data.explored_nodes += 1
            new_node_data = _prescreen_node(node_data, node_model, solve_data)
            heappush(
                queue,
                (new_node_data, node_model))  # replace with updated node data
        elif node_data.obj_lb < node_data.obj_ub - config.bound_tolerance and not node_data.is_evaluated:
            # Node has not been fully evaluated.
            # Note: infeasible and unbounded nodes will skip this condition, because of strict inequality
            new_node_data = _evaluate_node(node_data, node_model, solve_data)
            heappush(
                queue,
                (new_node_data, node_model))  # replace with updated node data
        elif node_data.num_unbranched_disjunctions == 0 or node_data.obj_lb == float(
                'inf'):
            # We have reached a leaf node, or the best available node is infeasible.
            original_model = solve_data.original_model
            copy_var_list_values(
                from_list=node_model.GDPopt_utils.variable_list,
                to_list=original_model.GDPopt_utils.variable_list,
                config=config,
            )

            solve_data.LB = node_data.obj_lb if solve_data.objective_sense == minimize else -node_data.obj_ub
            solve_data.UB = node_data.obj_ub if solve_data.objective_sense == minimize else -node_data.obj_lb
            solve_data.master_iteration = solve_data.explored_nodes
            if node_data.obj_lb == float('inf'):
                solve_data.results.solver.termination_condition = tc.infeasible
            elif node_data.obj_ub == float('-inf'):
                solve_data.results.solver.termination_condition = tc.unbounded
            else:
                solve_data.results.solver.termination_condition = tc.optimal
            return
        else:
            _branch_on_node(node_data, node_model, solve_data)
Ejemplo n.º 20
0
def solve_main(solve_data, config, fp=False, regularization_problem=False):
    """This function solves the MIP main problem.

    Args:
        solve_data (MindtPySolveData): data container that holds solve-instance data.
        config (ConfigBlock): the specific configurations for MindtPy.
        fp (bool, optional): whether it is in the loop of feasibility pump. Defaults to False.
        regularization_problem (bool, optional): whether it is solving a regularization problem. Defaults to False.

    Returns:
        solve_data.mip (Pyomo model): the MIP stored in solve_data.
        main_mip_results (SolverResults): results from solving the main MIP.
    """
    if not fp and not regularization_problem:
        solve_data.mip_iter += 1

    # setup main problem
    setup_main(solve_data, config, fp, regularization_problem)
    mainopt = set_up_mip_solver(solve_data, config, regularization_problem)

    mip_args = dict(config.mip_solver_args)
    if config.mip_solver in {
            'cplex', 'cplex_persistent', 'gurobi', 'gurobi_persistent'
    }:
        mip_args['warmstart'] = True
    set_solver_options(mainopt,
                       solve_data,
                       config,
                       solver_type='mip',
                       regularization=regularization_problem)
    try:
        with time_code(
                solve_data.timing,
                'regularization main' if regularization_problem else
            ('fp main' if fp else 'main')):
            main_mip_results = mainopt.solve(solve_data.mip,
                                             tee=config.mip_solver_tee,
                                             **mip_args)
    except (ValueError, AttributeError):
        if config.single_tree:
            config.logger.warning('Single tree terminate.')
            if get_main_elapsed_time(
                    solve_data.timing) >= config.time_limit - 2:
                config.logger.warning('due to the timelimit.')
                solve_data.results.solver.termination_condition = tc.maxTimeLimit
            if config.strategy == 'GOA' or config.add_no_good_cuts:
                config.logger.warning(
                    'ValueError: Cannot load a SolverResults object with bad status: error. '
                    'MIP solver failed. This usually happens in the single-tree GOA algorithm. '
                    "No-good cuts are added and GOA algorithm doesn't converge within the time limit. "
                    'No integer solution is found, so the cplex solver will report an error status. '
                )
        return None, None
    if config.solution_pool:
        main_mip_results._solver_model = mainopt._solver_model
        main_mip_results._pyomo_var_to_solver_var_map = mainopt._pyomo_var_to_solver_var_map
    if main_mip_results.solver.termination_condition is tc.optimal:
        if config.single_tree and not config.add_no_good_cuts and not regularization_problem:
            uptade_suboptimal_dual_bound(solve_data, main_mip_results)
        if regularization_problem:
            config.logger.info(
                solve_data.log_formatter.format(
                    solve_data.mip_iter,
                    'Reg ' + solve_data.regularization_mip_type,
                    value(solve_data.mip.MindtPy_utils.loa_proj_mip_obj),
                    solve_data.LB, solve_data.UB, solve_data.rel_gap,
                    get_main_elapsed_time(solve_data.timing)))

    elif main_mip_results.solver.termination_condition is tc.infeasibleOrUnbounded:
        # Linear solvers will sometimes tell me that it's infeasible or
        # unbounded during presolve, but fails to distinguish. We need to
        # resolve with a solver option flag on.
        main_mip_results, _ = distinguish_mip_infeasible_or_unbounded(
            solve_data.mip, config)
        return solve_data.mip, main_mip_results

    if regularization_problem:
        solve_data.mip.MindtPy_utils.objective_constr.deactivate()
        solve_data.mip.MindtPy_utils.del_component('loa_proj_mip_obj')
        solve_data.mip.MindtPy_utils.cuts.del_component('obj_reg_estimate')
        if config.add_regularization == 'level_L1':
            solve_data.mip.MindtPy_utils.del_component('L1_obj')
        elif config.add_regularization == 'level_L_infinity':
            solve_data.mip.MindtPy_utils.del_component('L_infinity_obj')

    return solve_data.mip, main_mip_results
Ejemplo n.º 21
0
def algorithm_should_terminate(solve_data, config, check_cycling):
    """
    Checks if the algorithm should terminate at the given point

    This function determines whether the algorithm should terminate based on the solver options and progress.
    (Sets the solve_data.results.solver.termination_condition to the appropriate condition, i.e. optimal,
    maxIterations, maxTimeLimit)

    Parameters
    ----------
    solve_data: MindtPy Data Container
        data container that holds solve-instance data
    config: ConfigBlock
        contains the specific configurations for the algorithm
    check_cycling: bool
        check for a special case that causes a binary variable to loop through the same values

    Returns
    -------
    boolean
        True if the algorithm should terminate else returns False
    """

    # Check bound convergence
    if solve_data.LB + config.bound_tolerance >= solve_data.UB:
        config.logger.info('MindtPy exiting on bound convergence. '
                           'LB: {} + (tol {}) >= UB: {}\n'.format(
                               solve_data.LB, config.bound_tolerance,
                               solve_data.UB))
        solve_data.results.solver.termination_condition = tc.optimal
        return True

    # Check iteration limit
    if solve_data.mip_iter >= config.iteration_limit:
        config.logger.info('MindtPy unable to converge bounds '
                           'after {} master iterations.'.format(
                               solve_data.mip_iter))
        config.logger.info('Final bound values: LB: {}  UB: {}'.format(
            solve_data.LB, solve_data.UB))
        if config.single_tree:
            solve_data.results.solver.termination_condition = tc.feasible
        else:
            solve_data.results.solver.termination_condition = tc.maxIterations
        return True

    # Check time limit
    if get_main_elapsed_time(solve_data.timing) >= config.time_limit:
        config.logger.info('MindtPy unable to converge bounds '
                           'before time limit of {} seconds. '
                           'Elapsed: {} seconds'.format(
                               config.time_limit,
                               get_main_elapsed_time(solve_data.timing)))
        config.logger.info('Final bound values: LB: {}  UB: {}'.format(
            solve_data.LB, solve_data.UB))
        solve_data.results.solver.termination_condition = tc.maxTimeLimit
        return True

    # Check if algorithm is stalling
    if len(solve_data.LB_progress) >= config.stalling_limit:
        if abs(solve_data.LB_progress[-1] -
               solve_data.LB_progress[-config.stalling_limit]
               ) <= config.zero_tolerance:
            config.logger.info('Algorithm is not making enough progress. '
                               'Exiting iteration loop.')
            config.logger.info('Final bound values: LB: {}  UB: {}'.format(
                solve_data.LB, solve_data.UB))
            if solve_data.best_solution_found is not None:
                solve_data.results.solver.termination_condition = tc.feasible
            else:
                solve_data.best_solution_found = solve_data.working_model.clone(
                )
                config.logger.warning(
                    'Algorithm did not find a feasible solution. '
                    'Returning best bound solution. Consider increasing stalling_limit or bound_tolerance.'
                )
                solve_data.results.solver.termination_condition = tc.noSolution

            return True

    if config.strategy == 'ECP':
        # check to see if the nonlinear constraints are satisfied
        MindtPy = solve_data.working_model.MindtPy_utils
        nonlinear_constraints = [
            c for c in MindtPy.constraint_list
            if c.body.polynomial_degree() not in (1, 0)
        ]
        for nlc in nonlinear_constraints:
            if nlc.has_lb():
                try:
                    lower_slack = nlc.lslack()
                except (ValueError, OverflowError):
                    lower_slack = -10
                    # Use not fixed numbers in this case. Try some factor of ecp_tolerance
                if lower_slack < -config.ecp_tolerance:
                    config.logger.info(
                        'MindtPy-ECP continuing as {} has not met the '
                        'nonlinear constraints satisfaction.'
                        '\n'.format(nlc))
                    return False
            if nlc.has_ub():
                try:
                    upper_slack = nlc.uslack()
                except (ValueError, OverflowError):
                    upper_slack = -10
                if upper_slack < -config.ecp_tolerance:
                    config.logger.info(
                        'MindtPy-ECP continuing as {} has not met the '
                        'nonlinear constraints satisfaction.'
                        '\n'.format(nlc))
                    return False
        # For ECP to know whether to know which bound to copy over (primal or dual)
        if solve_data.objective_sense == 1:
            solve_data.UB = solve_data.LB
        else:
            solve_data.LB = solve_data.UB
        config.logger.info(
            'MindtPy-ECP exiting on nonlinear constraints satisfaction. '
            'LB: {} UB: {}\n'.format(solve_data.LB, solve_data.UB))

        solve_data.best_solution_found = solve_data.working_model.clone()
        solve_data.results.solver.termination_condition = tc.optimal
        return True
    # Cycling check
    if config.cycling_check and solve_data.mip_iter >= 1 and check_cycling:
        temp = []
        for var in solve_data.mip.component_data_objects(ctype=Var):
            if var.is_integer():
                temp.append(int(round(var.value)))
        solve_data.curr_int_sol = temp

        if solve_data.curr_int_sol == solve_data.prev_int_sol:
            config.logger.info(
                'Cycling happens after {} master iterations. '
                'This issue happens when the NLP subproblem violates constraint qualification. '
                'Convergence to optimal solution is not guaranteed.'.format(
                    solve_data.mip_iter))
            config.logger.info('Final bound values: LB: {}  UB: {}'.format(
                solve_data.LB, solve_data.UB))
            # TODO determine solve_data.LB, solve_data.UB is inf or -inf.
            solve_data.results.solver.termination_condition = tc.feasible
            return True

        solve_data.prev_int_sol = solve_data.curr_int_sol

    # if not algorithm_is_making_progress(solve_data, config):
    #     config.logger.debug(
    #         'Algorithm is not making enough progress. '
    #         'Exiting iteration loop.')
    #     return True
    return False
Ejemplo n.º 22
0
def bound_fix(solve_data, config, last_iter_cuts):
    if config.single_tree:
        config.logger.info(
            'Fix the bound to the value of one iteration before optimal solution is found.'
        )
        if solve_data.results.problem.sense == ProblemSense.minimize:
            solve_data.LB = solve_data.stored_bound[solve_data.UB]
        else:
            solve_data.UB = solve_data.stored_bound[solve_data.LB]
    else:
        config.logger.info(
            'Solve the master problem without the last nogood cut to fix the bound.'
            'zero_tolerance is set to 1E-4')
        config.zero_tolerance = 1E-4
        # Solve NLP subproblem
        # The constraint linearization happens in the handlers
        if last_iter_cuts is False:
            fixed_nlp, fixed_nlp_result = solve_NLP_subproblem(
                solve_data, config)
            if fixed_nlp_result.solver.termination_condition in {
                    tc.optimal, tc.locallyOptimal, tc.feasible
            }:
                handle_NLP_subproblem_optimal(fixed_nlp, solve_data, config)
            elif fixed_nlp_result.solver.termination_condition is tc.infeasible:
                handle_NLP_subproblem_infeasible(fixed_nlp, solve_data, config)
            else:
                handle_NLP_subproblem_other_termination(
                    fixed_nlp, fixed_nlp_result.solver.termination_condition,
                    solve_data, config)

        MindtPy = solve_data.mip.MindtPy_utils
        # only deactivate the last integer cut.
        if config.strategy == 'GOA':
            if solve_data.results.problem.sense == ProblemSense.minimize:
                valid_no_good_cuts_num = solve_data.num_no_good_cuts_added[
                    solve_data.UB]
            else:
                valid_no_good_cuts_num = solve_data.num_no_good_cuts_added[
                    solve_data.LB]
            for i in range(valid_no_good_cuts_num + 1,
                           len(MindtPy.MindtPy_linear_cuts.integer_cuts) + 1):
                MindtPy.MindtPy_linear_cuts.integer_cuts[i].deactivate()
        elif config.strategy == 'OA':
            MindtPy.MindtPy_linear_cuts.integer_cuts[len(
                MindtPy.MindtPy_linear_cuts.integer_cuts)].deactivate()
        # MindtPy.MindtPy_linear_cuts.oa_cuts.activate()
        masteropt = SolverFactory(config.mip_solver)
        # determine if persistent solver is called.
        if isinstance(masteropt, PersistentSolver):
            masteropt.set_instance(solve_data.mip, symbolic_solver_labels=True)
        mip_args = dict(config.mip_solver_args)
        elapsed = get_main_elapsed_time(solve_data.timing)
        remaining = int(max(config.time_limit - elapsed, 1))
        if config.mip_solver == 'gams':
            mip_args['add_options'] = mip_args.get('add_options', [])
            mip_args['add_options'].append('option optcr=0.001;')
        if config.threads > 0:
            masteropt.options["threads"] = config.threads
        master_mip_results = masteropt.solve(solve_data.mip,
                                             tee=config.solver_tee,
                                             **mip_args)
        main_objective = next(
            solve_data.working_model.component_data_objects(Objective,
                                                            active=True))
        if main_objective.sense == minimize:
            solve_data.LB = max([master_mip_results.problem.lower_bound] +
                                solve_data.LB_progress[:-1])
            solve_data.LB_progress.append(solve_data.LB)
        else:
            solve_data.UB = min([master_mip_results.problem.upper_bound] +
                                solve_data.UB_progress[:-1])
            solve_data.UB_progress.append(solve_data.UB)
        config.logger.info('Fixed bound values: LB: {}  UB: {}'.format(
            solve_data.LB, solve_data.UB))
        # Check bound convergence
        if solve_data.LB + config.bound_tolerance >= solve_data.UB:
            solve_data.results.solver.termination_condition = tc.optimal
Ejemplo n.º 23
0
def solve_main(solve_data, config, fp=False, regularization_problem=False):
    """
    This function solves the MIP main problem

    Parameters
    ----------
    solve_data: MindtPy Data Container
        data container that holds solve-instance data
    config: ConfigBlock
        contains the specific configurations for the algorithm

    Returns
    -------
    solve_data.mip: Pyomo model
        the MIP stored in solve_data
    main_mip_results: Pyomo results object
        result from solving the main MIP
    fp: Bool
        generate the feasibility pump regularization main problem
    regularization_problem: Bool
        generate the ROA regularization main problem
    """
    if fp:
        config.logger.info('FP-MIP %s: Solve main problem.' %
                           (solve_data.fp_iter,))
    elif regularization_problem:
        config.logger.info('Regularization-MIP %s: Solve main regularization problem.' %
                           (solve_data.mip_iter,))
    else:
        solve_data.mip_iter += 1
        config.logger.info('MIP %s: Solve main problem.' %
                           (solve_data.mip_iter,))

    # setup main problem
    setup_main(solve_data, config, fp, regularization_problem)
    mainopt = setup_mip_solver(solve_data, config, regularization_problem)

    mip_args = dict(config.mip_solver_args)
    if config.mip_solver in {'cplex', 'cplex_persistent', 'gurobi', 'gurobi_persistent'}:
        mip_args['warmstart'] = True
    set_solver_options(mainopt, solve_data, config,
                       solver_type='mip', regularization=regularization_problem)
    try:
        with time_code(solve_data.timing, 'regularization main' if regularization_problem else ('fp main' if fp else 'main')):
            main_mip_results = mainopt.solve(solve_data.mip,
                                             tee=config.mip_solver_tee, **mip_args)
    except (ValueError, AttributeError):
        if config.single_tree:
            config.logger.warning('Single tree terminate.')
            if get_main_elapsed_time(solve_data.timing) >= config.time_limit - 2:
                config.logger.warning('due to the timelimit.')
                solve_data.results.solver.termination_condition = tc.maxTimeLimit
            if config.strategy == 'GOA' or config.add_no_good_cuts:
                config.logger.warning('ValueError: Cannot load a SolverResults object with bad status: error. '
                                      'MIP solver failed. This usually happens in the single-tree GOA algorithm. '
                                      "No-good cuts are added and GOA algorithm doesn't converge within the time limit. "
                                      'No integer solution is found, so the cplex solver will report an error status. ')
        return None, None
    if main_mip_results.solver.termination_condition is tc.optimal:
        if config.single_tree and not config.add_no_good_cuts and not regularization_problem:
            if solve_data.objective_sense == minimize:
                solve_data.LB = max(
                    main_mip_results.problem.lower_bound, solve_data.LB)
                solve_data.bound_improved = solve_data.LB > solve_data.LB_progress[-1]
                solve_data.LB_progress.append(solve_data.LB)
            else:
                solve_data.UB = min(
                    main_mip_results.problem.upper_bound, solve_data.UB)
                solve_data.bound_improved = solve_data.UB < solve_data.UB_progress[-1]
                solve_data.UB_progress.append(solve_data.UB)

    elif main_mip_results.solver.termination_condition is tc.infeasibleOrUnbounded:
        # Linear solvers will sometimes tell me that it's infeasible or
        # unbounded during presolve, but fails to distinguish. We need to
        # resolve with a solver option flag on.
        main_mip_results, _ = distinguish_mip_infeasible_or_unbounded(
            solve_data.mip, config)
        return solve_data.mip, main_mip_results

    if regularization_problem:
        solve_data.mip.MindtPy_utils.objective_constr.deactivate()
        solve_data.mip.MindtPy_utils.del_component('loa_proj_mip_obj')
        solve_data.mip.MindtPy_utils.cuts.del_component('obj_reg_estimate')
        if config.add_regularization == 'level_L1':
            solve_data.mip.MindtPy_utils.del_component('L1_obj')
        elif config.add_regularization == 'level_L_infinity':
            solve_data.mip.MindtPy_utils.del_component(
                'L_infinity_obj')

    return solve_data.mip, main_mip_results
Ejemplo n.º 24
0
    def solve(self, model, **kwds):
        config = self.CONFIG(kwds.pop('options', {}))
        config.set_value(kwds)

        # Validate model to be used with gdpbb
        self.validate_model(model)
        # Set solver as an MINLP
        solve_data = GDPbbSolveData()
        solve_data.timing = Container()
        solve_data.original_model = model
        solve_data.results = SolverResults()

        old_logger_level = config.logger.getEffectiveLevel()
        with time_code(solve_data.timing, 'total', is_main_timer=True), \
                restore_logger_level(config.logger), \
                create_utility_block(model, 'GDPbb_utils', solve_data):
            if config.tee and old_logger_level > logging.INFO:
                # If the logger does not already include INFO, include it.
                config.logger.setLevel(logging.INFO)
            config.logger.info(
                "Starting GDPbb version %s using %s as subsolver"
                % (".".join(map(str, self.version())), config.solver)
            )

            # Setup results
            solve_data.results.solver.name = 'GDPbb - %s' % (str(config.solver))
            setup_results_object(solve_data, config)

            # clone original model for root node of branch and bound
            root = solve_data.working_model = solve_data.original_model.clone()

            # get objective sense
            process_objective(solve_data, config)
            objectives = solve_data.original_model.component_data_objects(Objective, active=True)
            obj = next(objectives, None)
            obj_sign = 1 if obj.sense == minimize else -1
            solve_data.results.problem.sense = obj.sense

            # set up lists to keep track of which disjunctions have been covered.

            # this list keeps track of the relaxed disjunctions
            root.GDPbb_utils.unenforced_disjunctions = list(
                disjunction for disjunction in root.GDPbb_utils.disjunction_list if disjunction.active
            )

            root.GDPbb_utils.deactivated_constraints = ComponentSet([
                constr for disjunction in root.GDPbb_utils.unenforced_disjunctions
                for disjunct in disjunction.disjuncts
                for constr in disjunct.component_data_objects(ctype=Constraint, active=True)
                if constr.body.polynomial_degree() not in (1, 0)
            ])
            # Deactivate nonlinear constraints in unenforced disjunctions
            for constr in root.GDPbb_utils.deactivated_constraints:
                constr.deactivate()

            # Add the BigM suffix if it does not already exist. Used later during nonlinear constraint activation.
            if not hasattr(root, 'BigM'):
                root.BigM = Suffix()

            # Pre-screen that none of the disjunctions are already predetermined due to the disjuncts being fixed
            # to True/False values.
            # TODO this should also be done within the loop, but we aren't handling it right now.
            # Should affect efficiency, but not correctness.
            root.GDPbb_utils.disjuncts_fixed_True = ComponentSet()
            # Only find top-level (non-nested) disjunctions
            for disjunction in root.component_data_objects(Disjunction, active=True):
                fixed_true_disjuncts = [disjunct for disjunct in disjunction.disjuncts
                                        if disjunct.indicator_var.fixed
                                        and disjunct.indicator_var.value == 1]
                fixed_false_disjuncts = [disjunct for disjunct in disjunction.disjuncts
                                         if disjunct.indicator_var.fixed
                                         and disjunct.indicator_var.value == 0]
                for disjunct in fixed_false_disjuncts:
                    disjunct.deactivate()
                if len(fixed_false_disjuncts) == len(disjunction.disjuncts) - 1:
                    # all but one disjunct in the disjunction is fixed to False. Remaining one must be true.
                    if not fixed_true_disjuncts:
                        fixed_true_disjuncts = [disjunct for disjunct in disjunction.disjuncts
                                                if disjunct not in fixed_false_disjuncts]
                # Reactivate the fixed-true disjuncts
                for disjunct in fixed_true_disjuncts:
                    newly_activated = ComponentSet()
                    for constr in disjunct.component_data_objects(Constraint):
                        if constr in root.GDPbb_utils.deactivated_constraints:
                            newly_activated.add(constr)
                            constr.activate()
                            # Set the big M value for the constraint
                            root.BigM[constr] = 1
                            # Note: we use a default big M value of 1
                            # because all non-selected disjuncts should be deactivated.
                            # Therefore, none of the big M transformed nonlinear constraints will need to be relaxed.
                            # The default M value should therefore be irrelevant.
                    root.GDPbb_utils.deactivated_constraints -= newly_activated
                    root.GDPbb_utils.disjuncts_fixed_True.add(disjunct)

                if fixed_true_disjuncts:
                    assert disjunction.xor, "GDPbb only handles disjunctions in which one term can be selected. " \
                        "%s violates this assumption." % (disjunction.name, )
                    root.GDPbb_utils.unenforced_disjunctions.remove(disjunction)

            # Check satisfiability
            if config.check_sat and satisfiable(root, config.logger) is False:
                # Problem is not satisfiable. Problem is infeasible.
                obj_value = obj_sign * float('inf')
            else:
                # solve the root node
                config.logger.info("Solving the root node.")
                obj_value, result, var_values = self.subproblem_solve(root, config)

            if obj_sign * obj_value == float('inf'):
                config.logger.info("Model was found to be infeasible at the root node. Elapsed %.2f seconds."
                                   % get_main_elapsed_time(solve_data.timing))
                if solve_data.results.problem.sense == minimize:
                    solve_data.results.problem.lower_bound = float('inf')
                    solve_data.results.problem.upper_bound = None
                else:
                    solve_data.results.problem.lower_bound = None
                    solve_data.results.problem.upper_bound = float('-inf')
                solve_data.results.solver.timing = solve_data.timing
                solve_data.results.solver.iterations = 0
                solve_data.results.solver.termination_condition = tc.infeasible
                return solve_data.results

            # initialize minheap for Branch and Bound algorithm
            # Heap structure: (ordering tuple, model)
            # Ordering tuple: (objective value, disjunctions_left, -total_nodes_counter)
            #  - select solutions with lower objective value,
            #    then fewer disjunctions left to explore (depth first),
            #    then more recently encountered (tiebreaker)
            heap = []
            total_nodes_counter = 0
            disjunctions_left = len(root.GDPbb_utils.unenforced_disjunctions)
            heapq.heappush(
                heap, (
                    (obj_sign * obj_value, disjunctions_left, -total_nodes_counter),
                    root, result, var_values))

            # loop to branch through the tree
            while len(heap) > 0:
                # pop best model off of heap
                sort_tuple, incumbent_model, incumbent_results, incumbent_var_values = heapq.heappop(heap)
                incumbent_obj_value, disjunctions_left, _ = sort_tuple

                config.logger.info("Exploring node with LB %.10g and %s inactive disjunctions." % (
                    incumbent_obj_value, disjunctions_left
                ))

                # if all the originally active disjunctions are active, solve and
                # return solution
                if disjunctions_left == 0:
                    config.logger.info("Model solved.")
                    # Model is solved. Copy over solution values.
                    original_model = solve_data.original_model
                    for orig_var, val in zip(original_model.GDPbb_utils.variable_list, incumbent_var_values):
                        orig_var.value = val

                    solve_data.results.problem.lower_bound = incumbent_results.problem.lower_bound
                    solve_data.results.problem.upper_bound = incumbent_results.problem.upper_bound
                    solve_data.results.solver.timing = solve_data.timing
                    solve_data.results.solver.iterations = total_nodes_counter
                    solve_data.results.solver.termination_condition = incumbent_results.solver.termination_condition
                    return solve_data.results

                # Pick the next disjunction to branch on
                next_disjunction = incumbent_model.GDPbb_utils.unenforced_disjunctions[0]
                config.logger.info("Branching on disjunction %s" % next_disjunction.name)
                assert next_disjunction.xor, "GDPbb only handles disjunctions in which one term can be selected. " \
                    "%s violates this assumption." % (next_disjunction.name, )

                new_nodes_counter = 0

                for i, disjunct in enumerate(next_disjunction.disjuncts):
                    # Create one branch for each of the disjuncts on the disjunction

                    if any(disj.indicator_var.fixed and disj.indicator_var.value == 1
                           for disj in next_disjunction.disjuncts if disj is not disjunct):
                        # If any other disjunct is fixed to 1 and an xor relationship applies,
                        # then this disjunct cannot be activated.
                        continue

                    # Check time limit
                    if get_main_elapsed_time(solve_data.timing) >= config.time_limit:
                        if solve_data.results.problem.sense == minimize:
                            solve_data.results.problem.lower_bound = incumbent_obj_value
                            solve_data.results.problem.upper_bound = float('inf')
                        else:
                            solve_data.results.problem.lower_bound = float('-inf')
                            solve_data.results.problem.upper_bound = incumbent_obj_value
                        config.logger.info(
                            'GDPopt unable to converge bounds '
                            'before time limit of {} seconds. '
                            'Elapsed: {} seconds'
                            .format(config.time_limit, get_main_elapsed_time(solve_data.timing)))
                        config.logger.info(
                            'Final bound values: LB: {}  UB: {}'.
                            format(solve_data.results.problem.lower_bound, solve_data.results.problem.upper_bound))
                        solve_data.results.solver.timing = solve_data.timing
                        solve_data.results.solver.iterations = total_nodes_counter
                        solve_data.results.solver.termination_condition = tc.maxTimeLimit
                        return solve_data.results

                    # Branch on the disjunct
                    child = incumbent_model.clone()
                    # TODO I am leaving the old branching system in place, but there should be
                    # something better, ideally that deals with nested disjunctions as well.
                    disjunction_to_branch = child.GDPbb_utils.unenforced_disjunctions.pop(0)
                    child_disjunct = disjunction_to_branch.disjuncts[i]
                    child_disjunct.indicator_var.fix(1)
                    # Deactivate (and fix to 0) other disjuncts on the disjunction
                    for disj in disjunction_to_branch.disjuncts:
                        if disj is not child_disjunct:
                            disj.deactivate()
                    # Activate nonlinear constraints on the newly fixed child disjunct
                    newly_activated = ComponentSet()
                    for constr in child_disjunct.component_data_objects(Constraint):
                        if constr in child.GDPbb_utils.deactivated_constraints:
                            newly_activated.add(constr)
                            constr.activate()
                            # Set the big M value for the constraint
                            child.BigM[constr] = 1
                            # Note: we use a default big M value of 1
                            # because all non-selected disjuncts should be deactivated.
                            # Therefore, none of the big M transformed nonlinear constraints will need to be relaxed.
                            # The default M value should therefore be irrelevant.
                    child.GDPbb_utils.deactivated_constraints -= newly_activated
                    child.GDPbb_utils.disjuncts_fixed_True.add(child_disjunct)

                    if disjunct in incumbent_model.GDPbb_utils.disjuncts_fixed_True:
                        # If the disjunct was already branched to True from a parent disjunct branching, just pass
                        # through the incumbent value without resolving. The solution should be the same as the parent.
                        total_nodes_counter += 1
                        ordering_tuple = (obj_sign * incumbent_obj_value, disjunctions_left - 1, -total_nodes_counter)
                        heapq.heappush(heap, (ordering_tuple, child, result, incumbent_var_values))
                        new_nodes_counter += 1
                        continue

                    if config.check_sat and satisfiable(child, config.logger) is False:
                        # Problem is not satisfiable. Skip this disjunct.
                        continue

                    obj_value, result, var_values = self.subproblem_solve(child, config)
                    total_nodes_counter += 1
                    ordering_tuple = (obj_sign * obj_value, disjunctions_left - 1, -total_nodes_counter)
                    heapq.heappush(heap, (ordering_tuple, child, result, var_values))
                    new_nodes_counter += 1

                config.logger.info("Added %s new nodes with %s relaxed disjunctions to the heap. Size now %s." % (
                    new_nodes_counter, disjunctions_left - 1, len(heap)))
Ejemplo n.º 25
0
def solve_NLP(nlp_model, solve_data, config):
    """Solve the NLP subproblem."""
    config.logger.info('Solving nonlinear subproblem for '
                       'fixed binaries and logical realizations.')

    # Error checking for unfixed discrete variables
    unfixed_discrete_vars = detect_unfixed_discrete_vars(nlp_model)
    assert len(unfixed_discrete_vars) == 0, \
        "Unfixed discrete variables exist on the NLP subproblem: {0}".format(
        list(v.name for v in unfixed_discrete_vars))

    GDPopt = nlp_model.GDPopt_utils

    initialize_subproblem(nlp_model, solve_data)

    # Callback immediately before solving NLP subproblem
    config.call_before_subproblem_solve(nlp_model, solve_data)

    nlp_solver = SolverFactory(config.nlp_solver)
    if not nlp_solver.available():
        raise RuntimeError("NLP solver %s is not available." %
                           config.nlp_solver)
    with SuppressInfeasibleWarning():
        try:
            nlp_args = dict(config.nlp_solver_args)
            elapsed = get_main_elapsed_time(solve_data.timing)
            remaining = max(config.time_limit - elapsed, 1)
            if config.nlp_solver == 'gams':
                nlp_args['add_options'] = nlp_args.get('add_options', [])
                nlp_args['add_options'].append('option reslim=%s;' % remaining)
            elif config.nlp_solver == 'multisolve':
                nlp_args['time_limit'] = min(
                    nlp_args.get('time_limit', float('inf')), remaining)
            results = nlp_solver.solve(nlp_model, **nlp_args)
        except ValueError as err:
            if 'Cannot load a SolverResults object with bad status: error' in str(
                    err):
                results = SolverResults()
                results.solver.termination_condition = tc.error
                results.solver.message = str(err)
            else:
                raise

    nlp_result = SubproblemResult()
    nlp_result.feasible = True
    nlp_result.var_values = list(v.value for v in GDPopt.variable_list)
    nlp_result.pyomo_results = results
    nlp_result.dual_values = list(
        nlp_model.dual.get(c, None) for c in GDPopt.constraint_list)

    term_cond = results.solver.termination_condition
    if any(term_cond == cond
           for cond in (tc.optimal, tc.locallyOptimal, tc.feasible)):
        pass
    elif term_cond == tc.infeasible:
        config.logger.info('NLP subproblem was infeasible.')
        nlp_result.feasible = False
    elif term_cond == tc.maxIterations:
        # TODO try something else? Reinitialize with different initial
        # value?
        config.logger.info(
            'NLP subproblem failed to converge within iteration limit.')
        if is_feasible(nlp_model, config):
            config.logger.info(
                'NLP solution is still feasible. '
                'Using potentially suboptimal feasible solution.')
        else:
            nlp_result.feasible = False
    elif term_cond == tc.internalSolverError:
        # Possible that IPOPT had a restoration failure
        config.logger.info("NLP solver had an internal failure: %s" %
                           results.solver.message)
        nlp_result.feasible = False
    elif (term_cond == tc.other
          and "Too few degrees of freedom" in str(results.solver.message)):
        # Possible IPOPT degrees of freedom error
        config.logger.info("IPOPT has too few degrees of freedom: %s" %
                           results.solver.message)
        nlp_result.feasible = False
    elif term_cond == tc.other:
        config.logger.info(
            "NLP solver had a termination condition of 'other': %s" %
            results.solver.message)
        nlp_result.feasible = False
    elif term_cond == tc.error:
        config.logger.info(
            "NLP solver had a termination condition of 'error': %s" %
            results.solver.message)
        nlp_result.feasible = False
    elif term_cond == tc.maxTimeLimit:
        config.logger.info(
            "NLP solver ran out of time. Assuming infeasible for now.")
        nlp_result.feasible = False
    else:
        raise ValueError('GDPopt unable to handle NLP subproblem termination '
                         'condition of %s. Results: %s' % (term_cond, results))

    # Call the NLP post-solve callback
    config.call_after_subproblem_solve(nlp_model, solve_data)

    # if feasible, call the NLP post-feasible callback
    if nlp_result.feasible:
        config.call_after_subproblem_feasible(nlp_model, solve_data)

    return nlp_result
Ejemplo n.º 26
0
def init_rNLP(solve_data, config):
    """Initialize the problem by solving the relaxed NLP and then store the optimal variable
    values obtained from solving the rNLP.

    Parameters
    ----------
    solve_data : MindtPySolveData
        Data container that holds solve-instance data.
    config : ConfigBlock
        The specific configurations for MindtPy.

    Raises
    ------
    ValueError
        MindtPy unable to handle the termination condition of the relaxed NLP.
    """
    m = solve_data.working_model.clone()
    config.logger.debug('Relaxed NLP: Solve relaxed integrality')
    MindtPy = m.MindtPy_utils
    TransformationFactory('core.relax_integer_vars').apply_to(m)
    nlp_args = dict(config.nlp_solver_args)
    nlpopt = SolverFactory(config.nlp_solver)
    set_solver_options(nlpopt, solve_data, config, solver_type='nlp')
    with SuppressInfeasibleWarning():
        results = nlpopt.solve(m, tee=config.nlp_solver_tee, **nlp_args)
    subprob_terminate_cond = results.solver.termination_condition
    if subprob_terminate_cond in {tc.optimal, tc.feasible, tc.locallyOptimal}:
        main_objective = MindtPy.objective_list[-1]
        if subprob_terminate_cond == tc.optimal:
            update_dual_bound(solve_data, value(main_objective.expr))
        else:
            config.logger.info('relaxed NLP is not solved to optimality.')
            update_suboptimal_dual_bound(solve_data, results)
        dual_values = list(
            m.dual[c] for c in
            MindtPy.constraint_list) if config.calculate_dual else None
        config.logger.info(
            solve_data.log_formatter.format(
                '-', 'Relaxed NLP', value(main_objective.expr), solve_data.LB,
                solve_data.UB, solve_data.rel_gap,
                get_main_elapsed_time(solve_data.timing)))
        # Add OA cut
        if config.strategy in {'OA', 'GOA', 'FP'}:
            copy_var_list_values(m.MindtPy_utils.variable_list,
                                 solve_data.mip.MindtPy_utils.variable_list,
                                 config,
                                 ignore_integrality=True)
            if config.init_strategy == 'FP':
                copy_var_list_values(
                    m.MindtPy_utils.variable_list,
                    solve_data.working_model.MindtPy_utils.variable_list,
                    config,
                    ignore_integrality=True)
            if config.strategy in {'OA', 'FP'}:
                add_oa_cuts(solve_data.mip, dual_values, solve_data, config)
            elif config.strategy == 'GOA':
                add_affine_cuts(solve_data, config)
            for var in solve_data.mip.MindtPy_utils.discrete_variable_list:
                # We don't want to trigger the reset of the global stale
                # indicator, so we will set this variable to be "stale",
                # knowing that set_value will switch it back to "not
                # stale"
                var.stale = True
                var.set_value(int(round(var.value)), skip_validation=True)
    elif subprob_terminate_cond in {tc.infeasible, tc.noSolution}:
        # TODO fail? try something else?
        config.logger.info('Initial relaxed NLP problem is infeasible. '
                           'Problem may be infeasible.')
    elif subprob_terminate_cond is tc.maxTimeLimit:
        config.logger.info(
            'NLP subproblem failed to converge within time limit.')
        solve_data.results.solver.termination_condition = tc.maxTimeLimit
    elif subprob_terminate_cond is tc.maxIterations:
        config.logger.info(
            'NLP subproblem failed to converge within iteration limit.')
    else:
        raise ValueError(
            'MindtPy unable to handle relaxed NLP termination condition '
            'of %s. Solver message: %s' %
            (subprob_terminate_cond, results.solver.message))
Ejemplo n.º 27
0
def solve_linear_GDP(linear_GDP_model, solve_data, config):
    """Solves the linear GDP model and attempts to resolve solution issues."""
    m = linear_GDP_model
    GDPopt = m.GDPopt_utils
    # Transform disjunctions
    _bigm = TransformationFactory('gdp.bigm')
    _bigm.handlers[Port] = False
    _bigm.apply_to(m)

    preprocessing_transformations = [
        # # Propagate variable bounds
        # 'contrib.propagate_eq_var_bounds',
        # # Detect fixed variables
        # 'contrib.detect_fixed_vars',
        # # Propagate fixed variables
        # 'contrib.propagate_fixed_vars',
        # # Remove zero terms in linear expressions
        # 'contrib.remove_zero_terms',
        # # Remove terms in equal to zero summations
        # 'contrib.propagate_zero_sum',
        # # Transform bound constraints
        # 'contrib.constraints_to_var_bounds',
        # # Detect fixed variables
        # 'contrib.detect_fixed_vars',
        # # Remove terms in equal to zero summations
        # 'contrib.propagate_zero_sum',
        # Remove trivial constraints
        'contrib.deactivate_trivial_constraints',
    ]
    if config.mip_presolve:
        try:
            fbbt(m, integer_tol=config.integer_tolerance)
            for xfrm in preprocessing_transformations:
                TransformationFactory(xfrm).apply_to(m)
        except InfeasibleConstraintException:
            config.logger.debug("MIP preprocessing detected infeasibility.")
            mip_result = MasterProblemResult()
            mip_result.feasible = False
            mip_result.var_values = list(v.value for v in GDPopt.variable_list)
            mip_result.pyomo_results = SolverResults()
            mip_result.pyomo_results.solver.termination_condition = tc.error
            mip_result.disjunct_values = list(disj.indicator_var.value
                                              for disj in GDPopt.disjunct_list)
            return mip_result

    # Deactivate extraneous IMPORT/EXPORT suffixes
    getattr(m, 'ipopt_zL_out', _DoNothing()).deactivate()
    getattr(m, 'ipopt_zU_out', _DoNothing()).deactivate()

    # Create solver, check availability
    if not SolverFactory(config.mip_solver).available():
        raise RuntimeError("MIP solver %s is not available." %
                           config.mip_solver)

    # Callback immediately before solving MIP master problem
    config.call_before_master_solve(m, solve_data)

    try:
        with SuppressInfeasibleWarning():
            mip_args = dict(config.mip_solver_args)
            elapsed = get_main_elapsed_time(solve_data.timing)
            remaining = max(config.time_limit - elapsed, 1)
            if config.mip_solver == 'gams':
                mip_args['add_options'] = mip_args.get('add_options', [])
                mip_args['add_options'].append('option reslim=%s;' % remaining)
            elif config.mip_solver == 'multisolve':
                mip_args['time_limit'] = min(
                    mip_args.get('time_limit', float('inf')), remaining)
            results = SolverFactory(config.mip_solver).solve(m, **mip_args)
    except RuntimeError as e:
        if 'GAMS encountered an error during solve.' in str(e):
            config.logger.warning(
                "GAMS encountered an error in solve. Treating as infeasible.")
            mip_result = MasterProblemResult()
            mip_result.feasible = False
            mip_result.var_values = list(v.value for v in GDPopt.variable_list)
            mip_result.pyomo_results = SolverResults()
            mip_result.pyomo_results.solver.termination_condition = tc.error
            mip_result.disjunct_values = list(disj.indicator_var.value
                                              for disj in GDPopt.disjunct_list)
            return mip_result
        else:
            raise
    terminate_cond = results.solver.termination_condition
    if terminate_cond is tc.infeasibleOrUnbounded:
        # Linear solvers will sometimes tell me that it's infeasible or
        # unbounded during presolve, but fails to distinguish. We need to
        # resolve with a solver option flag on.
        results, terminate_cond = distinguish_mip_infeasible_or_unbounded(
            m, config)
    if terminate_cond is tc.unbounded:
        # Solution is unbounded. Add an arbitrary bound to the objective and resolve.
        # This occurs when the objective is nonlinear. The nonlinear objective is moved
        # to the constraints, and deactivated for the linear master problem.
        obj_bound = 1E15
        config.logger.warning(
            'Linear GDP was unbounded. '
            'Resolving with arbitrary bound values of (-{0:.10g}, {0:.10g}) on the objective. '
            'Check your initialization routine.'.format(obj_bound))
        main_objective = next(m.component_data_objects(Objective, active=True))
        GDPopt.objective_bound = Constraint(expr=(-obj_bound,
                                                  main_objective.expr,
                                                  obj_bound))
        with SuppressInfeasibleWarning():
            results = SolverFactory(config.mip_solver).solve(
                m, **config.mip_solver_args)
        terminate_cond = results.solver.termination_condition

    # Build and return results object
    mip_result = MasterProblemResult()
    mip_result.feasible = True
    mip_result.var_values = list(v.value for v in GDPopt.variable_list)
    mip_result.pyomo_results = results
    mip_result.disjunct_values = list(disj.indicator_var.value
                                      for disj in GDPopt.disjunct_list)

    if terminate_cond in {tc.optimal, tc.locallyOptimal, tc.feasible}:
        pass
    elif terminate_cond is tc.infeasible:
        config.logger.info(
            'Linear GDP is now infeasible. '
            'GDPopt has finished exploring feasible discrete configurations.')
        mip_result.feasible = False
    elif terminate_cond is tc.maxTimeLimit:
        # TODO check that status is actually ok and everything is feasible
        config.logger.info(
            'Unable to optimize linear GDP problem within time limit. '
            'Using current solver feasible solution.')
    elif (terminate_cond is tc.other
          and results.solution.status is SolutionStatus.feasible):
        # load the solution and suppress the warning message by setting
        # solver status to ok.
        config.logger.info('Linear GDP solver reported feasible solution, '
                           'but not guaranteed to be optimal.')
    else:
        raise ValueError('GDPopt unable to handle linear GDP '
                         'termination condition '
                         'of %s. Solver message: %s' %
                         (terminate_cond, results.solver.message))

    return mip_result
Ejemplo n.º 28
0
def solve_OA_master(solve_data, config):
    """
    This function solves the MIP master problem for the OA algorithm

    Parameters
    ----------
    solve_data: MindtPy Data Container
        data container that holds solve-instance data
    config: ConfigBlock
        contains the specific configurations for the algorithm

    Returns
    -------
    solve_data.mip: Pyomo model
        the MIP stored in solve_data
    master_mip_results: Pyomo results object
        result from solving the master MIP
    """
    solve_data.mip_iter += 1
    MindtPy = solve_data.mip.MindtPy_utils
    config.logger.info('MIP %s: Solve master problem.' %
                       (solve_data.mip_iter, ))
    # Set up MILP
    for c in MindtPy.constraint_list:
        if c.body.polynomial_degree() not in (1, 0):
            c.deactivate()

    MindtPy.MindtPy_linear_cuts.activate()
    main_objective = next(
        solve_data.mip.component_data_objects(Objective, active=True))
    main_objective.deactivate()

    sign_adjust = 1 if main_objective.sense == minimize else -1
    MindtPy.del_component('MindtPy_oa_obj')

    if config.add_slack:
        MindtPy.del_component('MindtPy_penalty_expr')

        MindtPy.MindtPy_penalty_expr = Expression(
            expr=sign_adjust * config.OA_penalty_factor *
            sum(v for v in MindtPy.MindtPy_linear_cuts.slack_vars[...]))

    MindtPy.MindtPy_oa_obj = Objective(
        expr=main_objective.expr +
        (MindtPy.MindtPy_penalty_expr if config.add_slack else 0),
        sense=main_objective.sense)

    if config.use_dual_bound:
        # Delete previously added dual bound constraint
        if MindtPy.MindtPy_linear_cuts.find_component(
                'dual_bound') is not None:
            MindtPy.MindtPy_linear_cuts.del_component('dual_bound')
        if main_objective.sense == minimize:
            MindtPy.MindtPy_linear_cuts.dual_bound = Constraint(
                expr=main_objective.expr +
                (MindtPy.MindtPy_penalty_expr if config.add_slack else 0) >=
                solve_data.LB,
                doc=
                'Objective function expression should improve on the best found dual bound'
            )
        else:
            MindtPy.MindtPy_linear_cuts.dual_bound = Constraint(
                expr=main_objective.expr +
                (MindtPy.MindtPy_penalty_expr if config.add_slack else 0) <=
                solve_data.UB,
                doc=
                'Objective function expression should improve on the best found dual bound'
            )

    # Deactivate extraneous IMPORT/EXPORT suffixes
    if config.nlp_solver == 'ipopt':
        getattr(solve_data.mip, 'ipopt_zL_out', _DoNothing()).deactivate()
        getattr(solve_data.mip, 'ipopt_zU_out', _DoNothing()).deactivate()

    masteropt = SolverFactory(config.mip_solver)
    # determine if persistent solver is called.
    if isinstance(masteropt, PersistentSolver):
        masteropt.set_instance(solve_data.mip, symbolic_solver_labels=True)
    if config.single_tree:
        # Configuration of lazy callback
        lazyoa = masteropt._solver_model.register_callback(
            single_tree.LazyOACallback_cplex)
        # pass necessary data and parameters to lazyoa
        lazyoa.master_mip = solve_data.mip
        lazyoa.solve_data = solve_data
        lazyoa.config = config
        lazyoa.opt = masteropt
        masteropt._solver_model.set_warning_stream(None)
        masteropt._solver_model.set_log_stream(None)
        masteropt._solver_model.set_error_stream(None)
        masteropt.options['timelimit'] = config.time_limit
    if config.threads > 0:
        masteropt.options["threads"] = config.threads
    mip_args = dict(config.mip_solver_args)
    elapsed = get_main_elapsed_time(solve_data.timing)
    remaining = int(max(config.time_limit - elapsed, 1))
    if config.mip_solver == 'gams':
        mip_args['add_options'] = mip_args.get('add_options', [])
        mip_args['add_options'].append('option optcr=0.001;')
        mip_args['add_options'].append('option reslim=%s;' % remaining)
    # elif config.mip_solver == 'glpk':
    #     masteropt.options['timelimit'] = remaining
    master_mip_results = masteropt.solve(solve_data.mip,
                                         tee=config.solver_tee,
                                         **mip_args)

    # if config.single_tree is False and config.add_nogood_cuts is False:

    if master_mip_results.solver.termination_condition is tc.optimal:
        if config.single_tree and config.add_nogood_cuts is False:
            if main_objective.sense == minimize:
                solve_data.LB = max(master_mip_results.problem.lower_bound,
                                    solve_data.LB)
                solve_data.LB_progress.append(solve_data.LB)
            else:
                solve_data.UB = min(master_mip_results.problem.upper_bound,
                                    solve_data.UB)
                solve_data.UB_progress.append(solve_data.UB)

    elif master_mip_results.solver.termination_condition is tc.infeasibleOrUnbounded:
        # Linear solvers will sometimes tell me that it's infeasible or
        # unbounded during presolve, but fails to distinguish. We need to
        # resolve with a solver option flag on.
        master_mip_results, _ = distinguish_mip_infeasible_or_unbounded(
            solve_data.mip, config)

    return solve_data.mip, master_mip_results
Ejemplo n.º 29
0
def _solve_rnGDP_subproblem(model, solve_data):
    config = solve_data.config
    subproblem = TransformationFactory('gdp.bigm').create_using(model)
    obj_sense_correction = solve_data.objective_sense != minimize

    try:
        with SuppressInfeasibleWarning():
            try:
                fbbt(subproblem, integer_tol=config.integer_tolerance)
            except InfeasibleConstraintException:
                copy_var_list_values(  # copy variable values, even if errored
                    from_list=subproblem.GDPopt_utils.variable_list,
                    to_list=model.GDPopt_utils.variable_list,
                    config=config,
                    ignore_integrality=True)
                return float('inf'), float('inf')
            minlp_args = dict(config.minlp_solver_args)
            if config.minlp_solver == 'gams':
                elapsed = get_main_elapsed_time(solve_data.timing)
                remaining = max(config.time_limit - elapsed, 1)
                minlp_args['add_options'] = minlp_args.get('add_options', [])
                minlp_args['add_options'].append('option reslim=%s;' %
                                                 remaining)
            result = SolverFactory(config.minlp_solver).solve(
                subproblem, **minlp_args)
    except RuntimeError as e:
        config.logger.warning(
            "Solver encountered RuntimeError. Treating as infeasible. "
            "Msg: %s\n%s" % (str(e), traceback.format_exc()))
        copy_var_list_values(  # copy variable values, even if errored
            from_list=subproblem.GDPopt_utils.variable_list,
            to_list=model.GDPopt_utils.variable_list,
            config=config,
            ignore_integrality=True)
        return float('inf'), float('inf')

    term_cond = result.solver.termination_condition
    if term_cond == tc.optimal:
        assert result.solver.status is SolverStatus.ok
        lb = result.problem.lower_bound if not obj_sense_correction else -result.problem.upper_bound
        ub = result.problem.upper_bound if not obj_sense_correction else -result.problem.lower_bound
        copy_var_list_values(
            from_list=subproblem.GDPopt_utils.variable_list,
            to_list=model.GDPopt_utils.variable_list,
            config=config,
        )
        return lb, ub
    elif term_cond == tc.locallyOptimal or term_cond == tc.feasible:
        assert result.solver.status is SolverStatus.ok
        lb = result.problem.lower_bound if not obj_sense_correction else -result.problem.upper_bound
        ub = result.problem.upper_bound if not obj_sense_correction else -result.problem.lower_bound
        # TODO handle LB absent
        copy_var_list_values(
            from_list=subproblem.GDPopt_utils.variable_list,
            to_list=model.GDPopt_utils.variable_list,
            config=config,
        )
        return lb, ub
    elif term_cond == tc.unbounded:
        copy_var_list_values(from_list=subproblem.GDPopt_utils.variable_list,
                             to_list=model.GDPopt_utils.variable_list,
                             config=config,
                             ignore_integrality=True)
        return float('-inf'), float('-inf')
    elif term_cond == tc.infeasible:
        copy_var_list_values(from_list=subproblem.GDPopt_utils.variable_list,
                             to_list=model.GDPopt_utils.variable_list,
                             config=config,
                             ignore_integrality=True)
        return float('inf'), float('inf')
    else:
        config.logger.warning(
            "Unknown termination condition of %s. Treating as infeasible." %
            term_cond)
        copy_var_list_values(from_list=subproblem.GDPopt_utils.variable_list,
                             to_list=model.GDPopt_utils.variable_list,
                             config=config,
                             ignore_integrality=True)
        return float('inf'), float('inf')
Ejemplo n.º 30
0
    def handle_lazy_NLP_subproblem_optimal(self, fixed_nlp, solve_data, config,
                                           opt):
        """
        This function copies  result to mip(explaination see below), updates bound, adds OA and integer cut,
        stores best solution if new one is best

        Parameters
        ----------
        fixed_nlp: Pyomo model
            fixed NLP from the model
        solve_data: MindtPy Data Container
            data container that holds solve-instance data
        config: ConfigBlock
            contains the specific configurations for the algorithm
        opt: SolverFactory
            the mip solver
        """
        if config.use_dual:
            for c in fixed_nlp.tmp_duals:
                if fixed_nlp.dual.get(c, None) is None:
                    fixed_nlp.dual[c] = fixed_nlp.tmp_duals[c]
            dual_values = list(
                fixed_nlp.dual[c]
                for c in fixed_nlp.MindtPy_utils.constraint_list)
        else:
            dual_values = None

        main_objective = next(
            fixed_nlp.component_data_objects(Objective, active=True))
        if main_objective.sense == minimize:
            solve_data.UB = min(value(main_objective.expr), solve_data.UB)
            solve_data.solution_improved = solve_data.UB < solve_data.UB_progress[
                -1]
            solve_data.UB_progress.append(solve_data.UB)
        else:
            solve_data.LB = max(value(main_objective.expr), solve_data.LB)
            solve_data.solution_improved = solve_data.LB > solve_data.LB_progress[
                -1]
            solve_data.LB_progress.append(solve_data.LB)

        config.logger.info('NLP {}: OBJ: {}  LB: {}  UB: {}'.format(
            solve_data.nlp_iter, value(main_objective.expr), solve_data.LB,
            solve_data.UB))

        if solve_data.solution_improved:
            solve_data.best_solution_found = fixed_nlp.clone()
            solve_data.best_solution_found_time = get_main_elapsed_time(
                solve_data.timing)
            if config.add_nogood_cuts:
                if solve_data.results.problem.sense == ProblemSense.minimize:
                    solve_data.stored_bound.update(
                        {solve_data.UB: solve_data.LB})
                else:
                    solve_data.stored_bound.update(
                        {solve_data.LB: solve_data.UB})

        # In OA algorithm, OA cuts are generated based on the solution of the subproblem
        # We need to first copy the value of variables from the subproblem and then add cuts
        # since value(constr.body), value(jacs[constr][var]), value(var) are used in self.add_lazy_oa_cuts()
        copy_var_list_values(fixed_nlp.MindtPy_utils.variable_list,
                             solve_data.mip.MindtPy_utils.variable_list,
                             config)
        if config.strategy == 'OA':
            self.add_lazy_oa_cuts(solve_data.mip, dual_values, solve_data,
                                  config, opt)
        elif config.strategy == 'GOA':
            self.add_lazy_affine_cuts(solve_data, config, opt)
        if config.add_nogood_cuts:
            var_values = list(v.value
                              for v in fixed_nlp.MindtPy_utils.variable_list)
            self.add_lazy_nogood_cuts(var_values, solve_data, config, opt)
Ejemplo n.º 31
0
def algorithm_should_terminate(solve_data, config, check_cycling):
    """Checks if the algorithm should terminate at the given point.

    This function determines whether the algorithm should terminate based on the solver options and progress.
    (Sets the solve_data.results.solver.termination_condition to the appropriate condition, i.e. optimal,
    maxIterations, maxTimeLimit).

    Args:
        solve_data (MindtPySolveData): data container that holds solve-instance data.
        config (ConfigBlock): the specific configurations for MindtPy.
        check_cycling (bool): check for a special case that causes a binary variable to loop through the same values.

    Returns:
        bool: True if the algorithm should terminate else returns False.
    """
    if solve_data.should_terminate:
        if solve_data.objective_sense == minimize:
            if solve_data.UB == float('inf'):
                solve_data.results.solver.termination_condition = tc.noSolution
            else:
                solve_data.results.solver.termination_condition = tc.feasible
        else:
            if solve_data.LB == float('-inf'):
                solve_data.results.solver.termination_condition = tc.noSolution
            else:
                solve_data.results.solver.termination_condition = tc.feasible
        return True

    # Check bound convergence
    if solve_data.abs_gap <= config.bound_tolerance:
        config.logger.info('MindtPy exiting on bound convergence. '
                           'LB: {} + (tol {}) >= UB: {}\n'.format(
                               solve_data.LB, config.bound_tolerance,
                               solve_data.UB))
        solve_data.results.solver.termination_condition = tc.optimal
        return True
    # Check relative bound convergence
    if solve_data.best_solution_found is not None:
        if solve_data.rel_gap <= config.relative_bound_tolerance:
            config.logger.info(
                'MindtPy exiting on bound convergence. '
                '(UB: {} - LB: {})/ (1e-10+|bestinteger|:{}) <= relative tolerance: {}'
                .format(
                    solve_data.UB, solve_data.LB,
                    abs(solve_data.UB if solve_data.objective_sense ==
                        minimize else solve_data.LB),
                    config.relative_bound_tolerance))
            solve_data.results.solver.termination_condition = tc.optimal
            return True

    # Check iteration limit
    if solve_data.mip_iter >= config.iteration_limit:
        config.logger.info('MindtPy unable to converge bounds '
                           'after {} main iterations.'.format(
                               solve_data.mip_iter))
        config.logger.info('Final bound values: LB: {}  UB: {}'.format(
            solve_data.LB, solve_data.UB))
        if config.single_tree:
            solve_data.results.solver.termination_condition = tc.feasible
        else:
            solve_data.results.solver.termination_condition = tc.maxIterations
        return True

    # Check time limit
    if get_main_elapsed_time(solve_data.timing) >= config.time_limit:
        config.logger.info('MindtPy unable to converge bounds '
                           'before time limit of {} seconds. '
                           'Elapsed: {} seconds'.format(
                               config.time_limit,
                               get_main_elapsed_time(solve_data.timing)))
        config.logger.info('Final bound values: LB: {}  UB: {}'.format(
            solve_data.LB, solve_data.UB))
        solve_data.results.solver.termination_condition = tc.maxTimeLimit
        return True

    # Check if algorithm is stalling
    if (len(solve_data.LB_progress) >= config.stalling_limit and solve_data.objective_sense == maximize) or \
        (len(solve_data.UB_progress) >= config.stalling_limit and solve_data.objective_sense == minimize):
        if (abs(solve_data.LB_progress[-1] - solve_data.LB_progress[-config.stalling_limit]) <= config.zero_tolerance and solve_data.objective_sense == maximize) or \
            (abs(solve_data.UB_progress[-1] - solve_data.UB_progress[-config.stalling_limit]) <= config.zero_tolerance and solve_data.objective_sense == minimize):
            config.logger.info('Algorithm is not making enough progress. '
                               'Exiting iteration loop.')
            config.logger.info('Final bound values: LB: {}  UB: {}'.format(
                solve_data.LB, solve_data.UB))
            if solve_data.best_solution_found is not None:
                solve_data.results.solver.termination_condition = tc.feasible
            else:
                # TODO: Is it correct to set solve_data.working_model as the best_solution_found?
                # In function copy_var_list_values, skip_fixed is set to True in default.
                solve_data.best_solution_found = solve_data.working_model.clone(
                )
                config.logger.warning(
                    'Algorithm did not find a feasible solution. '
                    'Returning best bound solution. Consider increasing stalling_limit or bound_tolerance.'
                )
                solve_data.results.solver.termination_condition = tc.noSolution

            return True

    if config.strategy == 'ECP':
        # check to see if the nonlinear constraints are satisfied
        MindtPy = solve_data.working_model.MindtPy_utils
        nonlinear_constraints = [c for c in MindtPy.nonlinear_constraint_list]
        for nlc in nonlinear_constraints:
            if nlc.has_lb():
                try:
                    lower_slack = nlc.lslack()
                except (ValueError, OverflowError):
                    # Set lower_slack (upper_slack below) less than -config.ecp_tolerance in this case.
                    lower_slack = -10 * config.ecp_tolerance
                if lower_slack < -config.ecp_tolerance:
                    config.logger.debug(
                        'MindtPy-ECP continuing as {} has not met the '
                        'nonlinear constraints satisfaction.'
                        '\n'.format(nlc))
                    return False
            if nlc.has_ub():
                try:
                    upper_slack = nlc.uslack()
                except (ValueError, OverflowError):
                    upper_slack = -10 * config.ecp_tolerance
                if upper_slack < -config.ecp_tolerance:
                    config.logger.debug(
                        'MindtPy-ECP continuing as {} has not met the '
                        'nonlinear constraints satisfaction.'
                        '\n'.format(nlc))
                    return False
        # For ECP to know whether to know which bound to copy over (primal or dual)
        if solve_data.objective_sense == minimize:
            solve_data.UB = solve_data.LB
        else:
            solve_data.LB = solve_data.UB
        config.logger.info(
            'MindtPy-ECP exiting on nonlinear constraints satisfaction. '
            'LB: {} UB: {}\n'.format(solve_data.LB, solve_data.UB))

        solve_data.best_solution_found = solve_data.working_model.clone()
        solve_data.results.solver.termination_condition = tc.optimal
        return True

    # Cycling check
    if check_cycling:
        if config.cycling_check or config.use_tabu_list:
            solve_data.curr_int_sol = get_integer_solution(solve_data.mip)
            if config.cycling_check and solve_data.mip_iter >= 1:
                if solve_data.curr_int_sol in set(solve_data.integer_list):
                    config.logger.info(
                        'Cycling happens after {} main iterations. '
                        'The same combination is obtained in iteration {} '
                        'This issue happens when the NLP subproblem violates constraint qualification. '
                        'Convergence to optimal solution is not guaranteed.'.
                        format(
                            solve_data.mip_iter,
                            solve_data.integer_list.index(
                                solve_data.curr_int_sol) + 1))
                    config.logger.info(
                        'Final bound values: LB: {}  UB: {}'.format(
                            solve_data.LB, solve_data.UB))
                    # TODO determine solve_data.LB, solve_data.UB is inf or -inf.
                    solve_data.results.solver.termination_condition = tc.feasible
                    return True
            solve_data.integer_list.append(solve_data.curr_int_sol)

    # if not algorithm_is_making_progress(solve_data, config):
    #     config.logger.debug(
    #         'Algorithm is not making enough progress. '
    #         'Exiting iteration loop.')
    #     return True
    return False
Ejemplo n.º 32
0
    def solve(self, model, **kwds):
        config = self.CONFIG(kwds.pop('options', {}))
        config.set_value(kwds)
        return SolverFactory('gdpopt').solve(
            model,
            strategy='LBB',
            minlp_solver=config.solver,
            minlp_solver_args=config.solver_args,
            tee=config.tee,
            check_sat=config.check_sat,
            logger=config.logger,
            time_limit=config.time_limit)

        # Validate model to be used with gdpbb
        self.validate_model(model)
        # Set solver as an MINLP
        solve_data = GDPbbSolveData()
        solve_data.timing = Container()
        solve_data.original_model = model
        solve_data.results = SolverResults()

        old_logger_level = config.logger.getEffectiveLevel()
        with time_code(solve_data.timing, 'total', is_main_timer=True), \
                restore_logger_level(config.logger), \
                create_utility_block(model, 'GDPbb_utils', solve_data):
            if config.tee and old_logger_level > logging.INFO:
                # If the logger does not already include INFO, include it.
                config.logger.setLevel(logging.INFO)
            config.logger.info(
                "Starting GDPbb version %s using %s as subsolver" %
                (".".join(map(str, self.version())), config.solver))

            # Setup results
            solve_data.results.solver.name = 'GDPbb - %s' % (str(
                config.solver))
            setup_results_object(solve_data, config)

            # clone original model for root node of branch and bound
            root = solve_data.working_model = solve_data.original_model.clone()

            # get objective sense
            process_objective(solve_data, config)
            objectives = solve_data.original_model.component_data_objects(
                Objective, active=True)
            obj = next(objectives, None)
            solve_data.results.problem.sense = obj.sense

            # set up lists to keep track of which disjunctions have been covered.

            # this list keeps track of the relaxed disjunctions
            root.GDPbb_utils.unenforced_disjunctions = list(
                disjunction
                for disjunction in root.GDPbb_utils.disjunction_list
                if disjunction.active)

            root.GDPbb_utils.deactivated_constraints = ComponentSet([
                constr
                for disjunction in root.GDPbb_utils.unenforced_disjunctions
                for disjunct in disjunction.disjuncts
                for constr in disjunct.component_data_objects(ctype=Constraint,
                                                              active=True)
                if constr.body.polynomial_degree() not in (1, 0)
            ])
            # Deactivate nonlinear constraints in unenforced disjunctions
            for constr in root.GDPbb_utils.deactivated_constraints:
                constr.deactivate()

            # Add the BigM suffix if it does not already exist. Used later during nonlinear constraint activation.
            if not hasattr(root, 'BigM'):
                root.BigM = Suffix()

            # Pre-screen that none of the disjunctions are already predetermined due to the disjuncts being fixed
            # to True/False values.
            # TODO this should also be done within the loop, but we aren't handling it right now.
            # Should affect efficiency, but not correctness.
            root.GDPbb_utils.disjuncts_fixed_True = ComponentSet()
            # Only find top-level (non-nested) disjunctions
            for disjunction in root.component_data_objects(Disjunction,
                                                           active=True):
                fixed_true_disjuncts = [
                    disjunct for disjunct in disjunction.disjuncts
                    if disjunct.indicator_var.fixed
                    and disjunct.indicator_var.value == 1
                ]
                fixed_false_disjuncts = [
                    disjunct for disjunct in disjunction.disjuncts
                    if disjunct.indicator_var.fixed
                    and disjunct.indicator_var.value == 0
                ]
                for disjunct in fixed_false_disjuncts:
                    disjunct.deactivate()
                if len(fixed_false_disjuncts) == len(
                        disjunction.disjuncts) - 1:
                    # all but one disjunct in the disjunction is fixed to False. Remaining one must be true.
                    if not fixed_true_disjuncts:
                        fixed_true_disjuncts = [
                            disjunct for disjunct in disjunction.disjuncts
                            if disjunct not in fixed_false_disjuncts
                        ]
                # Reactivate the fixed-true disjuncts
                for disjunct in fixed_true_disjuncts:
                    newly_activated = ComponentSet()
                    for constr in disjunct.component_data_objects(Constraint):
                        if constr in root.GDPbb_utils.deactivated_constraints:
                            newly_activated.add(constr)
                            constr.activate()
                            # Set the big M value for the constraint
                            root.BigM[constr] = 1
                            # Note: we use a default big M value of 1
                            # because all non-selected disjuncts should be deactivated.
                            # Therefore, none of the big M transformed nonlinear constraints will need to be relaxed.
                            # The default M value should therefore be irrelevant.
                    root.GDPbb_utils.deactivated_constraints -= newly_activated
                    root.GDPbb_utils.disjuncts_fixed_True.add(disjunct)

                if fixed_true_disjuncts:
                    assert disjunction.xor, "GDPbb only handles disjunctions in which one term can be selected. " \
                        "%s violates this assumption." % (disjunction.name, )
                    root.GDPbb_utils.unenforced_disjunctions.remove(
                        disjunction)

            # Check satisfiability
            if config.check_sat and satisfiable(root, config.logger) is False:
                # Problem is not satisfiable. Problem is infeasible.
                obj_value = obj_sign * float('inf')
            else:
                # solve the root node
                config.logger.info("Solving the root node.")
                obj_value, result, var_values = self.subproblem_solve(
                    root, config)

            if obj_sign * obj_value == float('inf'):
                config.logger.info(
                    "Model was found to be infeasible at the root node. Elapsed %.2f seconds."
                    % get_main_elapsed_time(solve_data.timing))
                if solve_data.results.problem.sense == minimize:
                    solve_data.results.problem.lower_bound = float('inf')
                    solve_data.results.problem.upper_bound = None
                else:
                    solve_data.results.problem.lower_bound = None
                    solve_data.results.problem.upper_bound = float('-inf')
                solve_data.results.solver.timing = solve_data.timing
                solve_data.results.solver.iterations = 0
                solve_data.results.solver.termination_condition = tc.infeasible
                return solve_data.results

            # initialize minheap for Branch and Bound algorithm
            # Heap structure: (ordering tuple, model)
            # Ordering tuple: (objective value, disjunctions_left, -total_nodes_counter)
            #  - select solutions with lower objective value,
            #    then fewer disjunctions left to explore (depth first),
            #    then more recently encountered (tiebreaker)
            heap = []
            total_nodes_counter = 0
            disjunctions_left = len(root.GDPbb_utils.unenforced_disjunctions)
            heapq.heappush(heap,
                           ((obj_sign * obj_value, disjunctions_left,
                             -total_nodes_counter), root, result, var_values))

            # loop to branch through the tree
            while len(heap) > 0:
                # pop best model off of heap
                sort_tuple, incumbent_model, incumbent_results, incumbent_var_values = heapq.heappop(
                    heap)
                incumbent_obj_value, disjunctions_left, _ = sort_tuple

                config.logger.info(
                    "Exploring node with LB %.10g and %s inactive disjunctions."
                    % (incumbent_obj_value, disjunctions_left))

                # if all the originally active disjunctions are active, solve and
                # return solution
                if disjunctions_left == 0:
                    config.logger.info("Model solved.")
                    # Model is solved. Copy over solution values.
                    original_model = solve_data.original_model
                    for orig_var, val in zip(
                            original_model.GDPbb_utils.variable_list,
                            incumbent_var_values):
                        orig_var.value = val

                    solve_data.results.problem.lower_bound = incumbent_results.problem.lower_bound
                    solve_data.results.problem.upper_bound = incumbent_results.problem.upper_bound
                    solve_data.results.solver.timing = solve_data.timing
                    solve_data.results.solver.iterations = total_nodes_counter
                    solve_data.results.solver.termination_condition = incumbent_results.solver.termination_condition
                    return solve_data.results

                # Pick the next disjunction to branch on
                next_disjunction = incumbent_model.GDPbb_utils.unenforced_disjunctions[
                    0]
                config.logger.info("Branching on disjunction %s" %
                                   next_disjunction.name)
                assert next_disjunction.xor, "GDPbb only handles disjunctions in which one term can be selected. " \
                    "%s violates this assumption." % (next_disjunction.name, )

                new_nodes_counter = 0

                for i, disjunct in enumerate(next_disjunction.disjuncts):
                    # Create one branch for each of the disjuncts on the disjunction

                    if any(disj.indicator_var.fixed
                           and disj.indicator_var.value == 1
                           for disj in next_disjunction.disjuncts
                           if disj is not disjunct):
                        # If any other disjunct is fixed to 1 and an xor relationship applies,
                        # then this disjunct cannot be activated.
                        continue

                    # Check time limit
                    if get_main_elapsed_time(
                            solve_data.timing) >= config.time_limit:
                        if solve_data.results.problem.sense == minimize:
                            solve_data.results.problem.lower_bound = incumbent_obj_value
                            solve_data.results.problem.upper_bound = float(
                                'inf')
                        else:
                            solve_data.results.problem.lower_bound = float(
                                '-inf')
                            solve_data.results.problem.upper_bound = incumbent_obj_value
                        config.logger.info('GDPopt unable to converge bounds '
                                           'before time limit of {} seconds. '
                                           'Elapsed: {} seconds'.format(
                                               config.time_limit,
                                               get_main_elapsed_time(
                                                   solve_data.timing)))
                        config.logger.info(
                            'Final bound values: LB: {}  UB: {}'.format(
                                solve_data.results.problem.lower_bound,
                                solve_data.results.problem.upper_bound))
                        solve_data.results.solver.timing = solve_data.timing
                        solve_data.results.solver.iterations = total_nodes_counter
                        solve_data.results.solver.termination_condition = tc.maxTimeLimit
                        return solve_data.results

                    # Branch on the disjunct
                    child = incumbent_model.clone()
                    # TODO I am leaving the old branching system in place, but there should be
                    # something better, ideally that deals with nested disjunctions as well.
                    disjunction_to_branch = child.GDPbb_utils.unenforced_disjunctions.pop(
                        0)
                    child_disjunct = disjunction_to_branch.disjuncts[i]
                    child_disjunct.indicator_var.fix(1)
                    # Deactivate (and fix to 0) other disjuncts on the disjunction
                    for disj in disjunction_to_branch.disjuncts:
                        if disj is not child_disjunct:
                            disj.deactivate()
                    # Activate nonlinear constraints on the newly fixed child disjunct
                    newly_activated = ComponentSet()
                    for constr in child_disjunct.component_data_objects(
                            Constraint):
                        if constr in child.GDPbb_utils.deactivated_constraints:
                            newly_activated.add(constr)
                            constr.activate()
                            # Set the big M value for the constraint
                            child.BigM[constr] = 1
                            # Note: we use a default big M value of 1
                            # because all non-selected disjuncts should be deactivated.
                            # Therefore, none of the big M transformed nonlinear constraints will need to be relaxed.
                            # The default M value should therefore be irrelevant.
                    child.GDPbb_utils.deactivated_constraints -= newly_activated
                    child.GDPbb_utils.disjuncts_fixed_True.add(child_disjunct)

                    if disjunct in incumbent_model.GDPbb_utils.disjuncts_fixed_True:
                        # If the disjunct was already branched to True from a parent disjunct branching, just pass
                        # through the incumbent value without resolving. The solution should be the same as the parent.
                        total_nodes_counter += 1
                        ordering_tuple = (obj_sign * incumbent_obj_value,
                                          disjunctions_left - 1,
                                          -total_nodes_counter)
                        heapq.heappush(heap, (ordering_tuple, child, result,
                                              incumbent_var_values))
                        new_nodes_counter += 1
                        continue

                    if config.check_sat and satisfiable(
                            child, config.logger) is False:
                        # Problem is not satisfiable. Skip this disjunct.
                        continue

                    obj_value, result, var_values = self.subproblem_solve(
                        child, config)
                    total_nodes_counter += 1
                    ordering_tuple = (obj_sign * obj_value,
                                      disjunctions_left - 1,
                                      -total_nodes_counter)
                    heapq.heappush(heap,
                                   (ordering_tuple, child, result, var_values))
                    new_nodes_counter += 1

                config.logger.info(
                    "Added %s new nodes with %s relaxed disjunctions to the heap. Size now %s."
                    % (new_nodes_counter, disjunctions_left - 1, len(heap)))
Ejemplo n.º 33
0
def set_solver_options(opt,
                       solve_data,
                       config,
                       solver_type,
                       regularization=False):
    """ set options for MIP/NLP solvers

    Args:
        opt : SolverFactory
            the solver
        solve_data: MindtPy Data Container
            data container that holds solve-instance data
        config: ConfigBlock
            contains the specific configurations for the algorithm
        solver_type: String
            The type of the solver, i.e. mip or nlp
        regularization (bool, optional): Boolean. 
            Defaults to False.
    """
    # TODO: integrate nlp_args here
    # nlp_args = dict(config.nlp_solver_args)
    elapsed = get_main_elapsed_time(solve_data.timing)
    remaining = int(max(config.time_limit - elapsed, 1))
    if solver_type == 'mip':
        if regularization:
            solver_name = config.mip_regularization_solver
            if config.regularization_mip_threads > 0:
                opt.options['threads'] = config.regularization_mip_threads
        else:
            solver_name = config.mip_solver
            if config.threads > 0:
                opt.options['threads'] = config.threads
    elif solver_type == 'nlp':
        solver_name = config.nlp_solver
    # TODO: opt.name doesn't work for GAMS
    if solver_name in {'cplex', 'gurobi', 'gurobi_persistent'}:
        opt.options['timelimit'] = remaining
        opt.options['mipgap'] = config.mip_solver_mipgap
        if regularization == True:
            if solver_name == 'cplex':
                if config.solution_limit is not None:
                    opt.options['mip limits solutions'] = config.solution_limit
                opt.options['mip strategy presolvenode'] = 3
                # TODO: need to discuss if this option should be added.
                if config.add_regularization in {'hess_lag', 'hess_only_lag'}:
                    opt.options['optimalitytarget'] = 3
            elif solver_name == 'gurobi':
                if config.solution_limit is not None:
                    opt.options['SolutionLimit'] = config.solution_limit
                opt.options['Presolve'] = 2
    elif solver_name == 'cplex_persistent':
        opt.options['timelimit'] = remaining
        opt._solver_model.parameters.mip.tolerances.mipgap.set(
            config.mip_solver_mipgap)
        if regularization is True:
            if config.solution_limit is not None:
                opt._solver_model.parameters.mip.limits.solutions.set(
                    config.solution_limit)
            opt._solver_model.parameters.mip.strategy.presolvenode.set(3)
            if config.add_regularization in {'hess_lag', 'hess_only_lag'}:
                opt._solver_model.parameters.optimalitytarget.set(3)
    elif solver_name == 'glpk':
        opt.options['tmlim'] = remaining
        # TODO: mipgap does not work for glpk yet
        # opt.options['mipgap'] = config.mip_solver_mipgap
    elif solver_name == 'baron':
        opt.options['MaxTime'] = remaining
        opt.options['AbsConFeasTol'] = config.zero_tolerance
    elif solver_name == 'ipopt':
        opt.options['max_cpu_time'] = remaining
        opt.options['constr_viol_tol'] = config.zero_tolerance
    elif solver_name == 'gams':
        if solver_type == 'mip':
            opt.options['add_options'] = [
                'option optcr=%s;' % config.mip_solver_mipgap,
                'option reslim=%s;' % remaining
            ]
        elif solver_type == 'nlp':
            opt.options['add_options'] = ['option reslim=%s;' % remaining]
            if config.nlp_solver_args.__contains__('solver'):
                if config.nlp_solver_args['solver'] in {
                        'ipopt', 'ipopth', 'msnlp', 'conopt', 'baron'
                }:
                    if config.nlp_solver_args['solver'] == 'ipopt':
                        opt.options['add_options'].append(
                            '$onecho > ipopt.opt')
                        opt.options['add_options'].append(
                            'constr_viol_tol ' + str(config.zero_tolerance))
                    elif config.nlp_solver_args['solver'] == 'ipopth':
                        opt.options['add_options'].append(
                            '$onecho > ipopth.opt')
                        opt.options['add_options'].append(
                            'constr_viol_tol ' + str(config.zero_tolerance))
                        # TODO: Ipopt warmstart option
                        # opt.options['add_options'].append('warm_start_init_point       yes\n'
                        #                                   'warm_start_bound_push       1e-9\n'
                        #                                   'warm_start_bound_frac       1e-9\n'
                        #                                   'warm_start_slack_bound_frac 1e-9\n'
                        #                                   'warm_start_slack_bound_push 1e-9\n'
                        #                                   'warm_start_mult_bound_push  1e-9\n')
                    elif config.nlp_solver_args['solver'] == 'conopt':
                        opt.options['add_options'].append(
                            '$onecho > conopt.opt')
                        opt.options['add_options'].append(
                            'RTNWMA ' + str(config.zero_tolerance))
                    elif config.nlp_solver_args['solver'] == 'msnlp':
                        opt.options['add_options'].append(
                            '$onecho > msnlp.opt')
                        opt.options['add_options'].append(
                            'feasibility_tolerance ' +
                            str(config.zero_tolerance))
                    elif config.nlp_solver_args['solver'] == 'baron':
                        opt.options['add_options'].append(
                            '$onecho > baron.opt')
                        opt.options['add_options'].append(
                            'AbsConFeasTol ' + str(config.zero_tolerance))
                    opt.options['add_options'].append('$offecho')
                    opt.options['add_options'].append('GAMS_MODEL.optfile=1')