示例#1
0
def get_infeasible_result_object(model, message=""):
    infeas_result = SubproblemResult()
    infeas_result.feasible = False
    infeas_result.var_values = list(v.value for v in model.GDPopt_utils.variable_list)
    infeas_result.pyomo_results = SolverResults()
    infeas_result.pyomo_results.solver.termination_condition = tc.infeasible
    infeas_result.pyomo_results.message = message
    infeas_result.dual_values = list(None for _ in model.GDPopt_utils.constraint_list)
    return infeas_result
示例#2
0
def solve_linear_subproblem(mip_model, solve_data, config):
    GDPopt = mip_model.GDPopt_utils

    initialize_subproblem(mip_model, solve_data)

    # Callback immediately before solving NLP subproblem
    config.call_before_subproblem_solve(mip_model, solve_data)

    mip_solver = SolverFactory(config.mip_solver)
    if not mip_solver.available():
        raise RuntimeError("MIP solver %s is not available." %
                           config.mip_solver)
    with SuppressInfeasibleWarning():
        mip_args = dict(config.mip_solver_args)
        elapsed = get_main_elapsed_time(solve_data.timing)
        remaining = max(config.time_limit - elapsed, 1)
        if config.mip_solver == 'gams':
            mip_args['add_options'] = mip_args.get('add_options', [])
            mip_args['add_options'].append('option reslim=%s;' % remaining)
        elif config.mip_solver == 'multisolve':
            mip_args['time_limit'] = min(
                mip_args.get('time_limit', float('inf')), remaining)
        results = mip_solver.solve(mip_model, **mip_args)

    subprob_result = SubproblemResult()
    subprob_result.feasible = True
    subprob_result.var_values = list(v.value for v in GDPopt.variable_list)
    subprob_result.pyomo_results = results
    subprob_result.dual_values = list(
        mip_model.dual.get(c, None) for c in GDPopt.constraint_list)

    subprob_terminate_cond = results.solver.termination_condition
    if subprob_terminate_cond is tc.optimal:
        pass
    elif subprob_terminate_cond is tc.infeasible:
        config.logger.info('MIP subproblem was infeasible.')
        subprob_result.feasible = False
    else:
        raise ValueError('GDPopt unable to handle MIP subproblem termination '
                         'condition of %s. Results: %s' %
                         (subprob_terminate_cond, results))

    # Call the NLP post-solve callback
    config.call_after_subproblem_solve(mip_model, solve_data)

    # if feasible, call the NLP post-feasible callback
    if subprob_result.feasible:
        config.call_after_subproblem_feasible(mip_model, solve_data)

    return subprob_result
示例#3
0
def solve_linear_subproblem(mip_model, solve_data, config):
    GDPopt = mip_model.GDPopt_utils

    initialize_subproblem(mip_model, solve_data)

    # Callback immediately before solving NLP subproblem
    config.call_before_subproblem_solve(mip_model, solve_data)

    mip_solver = SolverFactory(config.mip_solver)
    if not mip_solver.available():
        raise RuntimeError("MIP solver %s is not available." %
                           config.mip_solver)
    with SuppressInfeasibleWarning():
        results = mip_solver.solve(mip_model, **config.mip_solver_args)

    subprob_result = SubproblemResult()
    subprob_result.feasible = True
    subprob_result.var_values = list(v.value for v in GDPopt.variable_list)
    subprob_result.pyomo_results = results
    subprob_result.dual_values = list(
        mip_model.dual.get(c, None) for c in GDPopt.constraint_list)

    subprob_terminate_cond = results.solver.termination_condition
    if subprob_terminate_cond is tc.optimal:
        pass
    elif subprob_terminate_cond is tc.infeasible:
        config.logger.info('MIP subproblem was infeasible.')
        subprob_result.feasible = False
    else:
        raise ValueError('GDPopt unable to handle MIP subproblem termination '
                         'condition of %s. Results: %s' %
                         (subprob_terminate_cond, results))

    # Call the NLP post-solve callback
    config.call_after_subproblem_solve(mip_model, solve_data)

    # if feasible, call the NLP post-feasible callback
    if subprob_result.feasible:
        config.call_after_subproblem_feasible(mip_model, solve_data)

    return subprob_result
示例#4
0
def solve_NLP(nlp_model, solve_data, config):
    """Solve the NLP subproblem."""
    config.logger.info('Solving nonlinear subproblem for '
                       'fixed binaries and logical realizations.')

    # Error checking for unfixed discrete variables
    unfixed_discrete_vars = detect_unfixed_discrete_vars(nlp_model)
    assert len(unfixed_discrete_vars) == 0, \
        "Unfixed discrete variables exist on the NLP subproblem: {0}".format(
        list(v.name for v in unfixed_discrete_vars))

    GDPopt = nlp_model.GDPopt_utils

    initialize_subproblem(nlp_model, solve_data)

    # Callback immediately before solving NLP subproblem
    config.call_before_subproblem_solve(nlp_model, solve_data)

    nlp_solver = SolverFactory(config.nlp_solver)
    if not nlp_solver.available():
        raise RuntimeError("NLP solver %s is not available." %
                           config.nlp_solver)
    with SuppressInfeasibleWarning():
        try:
            results = nlp_solver.solve(nlp_model, **config.nlp_solver_args)
        except ValueError as err:
            if 'Cannot load SolverResults object with bad status: error' in str(
                    err):
                results = SolverResults()
                results.solver.termination_condition = tc.error
                results.solver.message = str(err)
            else:
                raise

    nlp_result = SubproblemResult()
    nlp_result.feasible = True
    nlp_result.var_values = list(v.value for v in GDPopt.variable_list)
    nlp_result.pyomo_results = results
    nlp_result.dual_values = list(
        nlp_model.dual.get(c, None) for c in GDPopt.constraint_list)

    term_cond = results.solver.termination_condition
    if any(term_cond == cond
           for cond in (tc.optimal, tc.locallyOptimal, tc.feasible)):
        pass
    elif term_cond == tc.infeasible:
        config.logger.info('NLP subproblem was infeasible.')
        nlp_result.feasible = False
    elif term_cond == tc.maxIterations:
        # TODO try something else? Reinitialize with different initial
        # value?
        config.logger.info(
            'NLP subproblem failed to converge within iteration limit.')
        if is_feasible(nlp_model, config):
            config.logger.info(
                'NLP solution is still feasible. '
                'Using potentially suboptimal feasible solution.')
        else:
            nlp_result.feasible = False
    elif term_cond == tc.internalSolverError:
        # Possible that IPOPT had a restoration failure
        config.logger.info("NLP solver had an internal failure: %s" %
                           results.solver.message)
        nlp_result.feasible = False
    elif (term_cond == tc.other
          and "Too few degrees of freedom" in str(results.solver.message)):
        # Possible IPOPT degrees of freedom error
        config.logger.info("IPOPT has too few degrees of freedom: %s" %
                           results.solver.message)
        nlp_result.feasible = False
    elif term_cond == tc.other:
        config.logger.info(
            "NLP solver had a termination condition of 'other': %s" %
            results.solver.message)
        nlp_result.feasible = False
    elif term_cond == tc.error:
        config.logger.info(
            "NLP solver had a termination condition of 'error': %s" %
            results.solver.message)
        nlp_result.feasible = False
    elif term_cond == tc.maxTimeLimit:
        config.logger.info(
            "NLP solver ran out of time. Assuming infeasible for now.")
        nlp_result.feasible = False
    else:
        raise ValueError('GDPopt unable to handle NLP subproblem termination '
                         'condition of %s. Results: %s' % (term_cond, results))

    # Call the NLP post-solve callback
    config.call_after_subproblem_solve(nlp_model, solve_data)

    # if feasible, call the NLP post-feasible callback
    if nlp_result.feasible:
        config.call_after_subproblem_feasible(nlp_model, solve_data)

    return nlp_result
示例#5
0
def solve_MINLP(model, solve_data, config):
    """Solve the MINLP subproblem."""
    config.logger.info(
        "Solving MINLP subproblem for fixed logical realizations.")

    GDPopt = model.GDPopt_utils

    initialize_subproblem(model, solve_data)

    # Callback immediately before solving MINLP subproblem
    config.call_before_subproblem_solve(model, solve_data)

    minlp_solver = SolverFactory(config.minlp_solver)
    if not minlp_solver.available():
        raise RuntimeError("MINLP solver %s is not available." %
                           config.minlp_solver)
    with SuppressInfeasibleWarning():
        results = minlp_solver.solve(model, **config.minlp_solver_args)

    subprob_result = SubproblemResult()
    subprob_result.feasible = True
    subprob_result.var_values = list(v.value for v in GDPopt.variable_list)
    subprob_result.pyomo_results = results
    subprob_result.dual_values = list(
        model.dual.get(c, None) for c in GDPopt.constraint_list)

    term_cond = results.solver.termination_condition
    if any(term_cond == cond
           for cond in (tc.optimal, tc.locallyOptimal, tc.feasible)):
        pass
    elif term_cond == tc.infeasible:
        config.logger.info('MINLP subproblem was infeasible.')
        subprob_result.feasible = False
    elif term_cond == tc.maxIterations:
        # TODO try something else? Reinitialize with different initial
        # value?
        config.logger.info(
            'MINLP subproblem failed to converge within iteration limit.')
        if is_feasible(model, config):
            config.logger.info(
                'MINLP solution is still feasible. '
                'Using potentially suboptimal feasible solution.')
        else:
            subprob_result.feasible = False
    elif term_cond == tc.intermediateNonInteger:
        config.logger.info(
            "MINLP solver could not find feasible integer solution: %s" %
            results.solver.message)
        subprob_result.feasible = False
    else:
        raise ValueError(
            'GDPopt unable to handle MINLP subproblem termination '
            'condition of %s. Results: %s' % (term_cond, results))

    # Call the subproblem post-solve callback
    config.call_after_subproblem_solve(model, solve_data)

    # if feasible, call the subproblem post-feasible callback
    if subprob_result.feasible:
        config.call_after_subproblem_feasible(model, solve_data)

    return subprob_result
示例#6
0
文件: nlp_solve.py 项目: xfLee/pyomo
def solve_NLP(nlp_model, solve_data, config):
    """Solve the NLP subproblem."""
    config.logger.info('Solving nonlinear subproblem for '
                       'fixed binaries and logical realizations.')
    unfixed_discrete_vars = detect_unfixed_discrete_vars(nlp_model)
    if unfixed_discrete_vars:
        discrete_var_names = list(v.name for v in unfixed_discrete_vars)
        config.logger.warning(
            "Unfixed discrete variables exist on the NLP subproblem: %s" %
            (discrete_var_names, ))

    GDPopt = nlp_model.GDPopt_utils

    preprocessing_transformations = [
        # Propagate variable bounds
        'contrib.propagate_eq_var_bounds',
        # Detect fixed variables
        'contrib.detect_fixed_vars',
        # Propagate fixed variables
        'contrib.propagate_fixed_vars',
        # Remove zero terms in linear expressions
        'contrib.remove_zero_terms',
        # Remove terms in equal to zero summations
        'contrib.propagate_zero_sum',
        # Transform bound constraints
        'contrib.constraints_to_var_bounds',
        # Detect fixed variables
        'contrib.detect_fixed_vars',
        # Remove terms in equal to zero summations
        'contrib.propagate_zero_sum',
        # Remove trivial constraints
        'contrib.deactivate_trivial_constraints'
    ]
    for xfrm in preprocessing_transformations:
        TransformationFactory(xfrm).apply_to(nlp_model)

    initialize_NLP(nlp_model, solve_data)

    # Callback immediately before solving NLP subproblem
    config.call_before_subproblem_solve(nlp_model, solve_data)

    nlp_solver = SolverFactory(config.nlp_solver)
    if not nlp_solver.available():
        raise RuntimeError("NLP solver %s is not available." %
                           config.nlp_solver)
    with SuppressInfeasibleWarning():
        results = nlp_solver.solve(nlp_model, **config.nlp_solver_args)

    nlp_result = SubproblemResult()
    nlp_result.feasible = True
    nlp_result.var_values = list(v.value for v in GDPopt.working_var_list)
    nlp_result.pyomo_results = results
    nlp_result.dual_values = list(
        nlp_model.dual.get(c, None) for c in GDPopt.working_constraints_list)

    subprob_terminate_cond = results.solver.termination_condition
    if subprob_terminate_cond is tc.optimal:
        pass
    elif subprob_terminate_cond is tc.infeasible:
        config.logger.info('NLP subproblem was locally infeasible.')
        nlp_result.feasible = False
    elif subprob_terminate_cond is tc.maxIterations:
        # TODO try something else? Reinitialize with different initial
        # value?
        config.logger.info(
            'NLP subproblem failed to converge within iteration limit.')
        if is_feasible(nlp_model, config):
            config.logger.info(
                'NLP solution is still feasible. '
                'Using potentially suboptimal feasible solution.')
        else:
            nlp_result.feasible = False
    elif subprob_terminate_cond is tc.internalSolverError:
        # Possible that IPOPT had a restoration failture
        config.logger.info("NLP solver had an internal failure: %s" %
                           results.solver.message)
        nlp_result.feasible = False
    else:
        raise ValueError('GDPopt unable to handle NLP subproblem termination '
                         'condition of %s. Results: %s' %
                         (subprob_terminate_cond, results))

    # Call the NLP post-solve callback
    config.call_after_subproblem_solve(nlp_model, solve_data)

    # if feasible, call the NLP post-feasible callback
    if nlp_result.feasible:
        config.call_after_subproblem_feasible(nlp_model, solve_data)

    return nlp_result
示例#7
0
文件: nlp_solve.py 项目: CanLi1/pyomo
def solve_NLP(nlp_model, solve_data, config):
    """Solve the NLP subproblem."""
    config.logger.info('Solving nonlinear subproblem for '
                       'fixed binaries and logical realizations.')

    # Error checking for unfixed discrete variables
    unfixed_discrete_vars = detect_unfixed_discrete_vars(nlp_model)
    assert len(unfixed_discrete_vars) == 0, \
        "Unfixed discrete variables exist on the NLP subproblem: {0}".format(
        list(v.name for v in unfixed_discrete_vars))

    GDPopt = nlp_model.GDPopt_utils

    if config.subproblem_presolve:
        preprocess_subproblem(nlp_model, config)

    initialize_subproblem(nlp_model, solve_data)

    # Callback immediately before solving NLP subproblem
    config.call_before_subproblem_solve(nlp_model, solve_data)

    nlp_solver = SolverFactory(config.nlp_solver)
    if not nlp_solver.available():
        raise RuntimeError("NLP solver %s is not available." %
                           config.nlp_solver)
    with SuppressInfeasibleWarning():
        results = nlp_solver.solve(nlp_model, **config.nlp_solver_args)

    nlp_result = SubproblemResult()
    nlp_result.feasible = True
    nlp_result.var_values = list(v.value for v in GDPopt.variable_list)
    nlp_result.pyomo_results = results
    nlp_result.dual_values = list(
        nlp_model.dual.get(c, None) for c in GDPopt.constraint_list)

    subprob_terminate_cond = results.solver.termination_condition
    if (subprob_terminate_cond is tc.optimal
            or subprob_terminate_cond is tc.locallyOptimal
            or subprob_terminate_cond is tc.feasible):
        pass
    elif subprob_terminate_cond is tc.infeasible:
        config.logger.info('NLP subproblem was infeasible.')
        nlp_result.feasible = False
    elif subprob_terminate_cond is tc.maxIterations:
        # TODO try something else? Reinitialize with different initial
        # value?
        config.logger.info(
            'NLP subproblem failed to converge within iteration limit.')
        if is_feasible(nlp_model, config):
            config.logger.info(
                'NLP solution is still feasible. '
                'Using potentially suboptimal feasible solution.')
        else:
            nlp_result.feasible = False
    elif subprob_terminate_cond is tc.internalSolverError:
        # Possible that IPOPT had a restoration failure
        config.logger.info("NLP solver had an internal failure: %s" %
                           results.solver.message)
        nlp_result.feasible = False
    else:
        raise ValueError('GDPopt unable to handle NLP subproblem termination '
                         'condition of %s. Results: %s' %
                         (subprob_terminate_cond, results))

    # Call the NLP post-solve callback
    config.call_after_subproblem_solve(nlp_model, solve_data)

    # if feasible, call the NLP post-feasible callback
    if nlp_result.feasible:
        config.call_after_subproblem_feasible(nlp_model, solve_data)

    return nlp_result
示例#8
0
文件: nlp_solve.py 项目: Pyomo/pyomo
def solve_NLP(nlp_model, solve_data, config):
    """Solve the NLP subproblem."""
    config.logger.info(
        'Solving nonlinear subproblem for '
        'fixed binaries and logical realizations.')

    # Error checking for unfixed discrete variables
    unfixed_discrete_vars = detect_unfixed_discrete_vars(nlp_model)
    assert len(unfixed_discrete_vars) == 0, \
        "Unfixed discrete variables exist on the NLP subproblem: {0}".format(
        list(v.name for v in unfixed_discrete_vars))

    GDPopt = nlp_model.GDPopt_utils

    if config.subproblem_presolve:
        preprocess_subproblem(nlp_model, config)

    initialize_subproblem(nlp_model, solve_data)

    # Callback immediately before solving NLP subproblem
    config.call_before_subproblem_solve(nlp_model, solve_data)

    nlp_solver = SolverFactory(config.nlp_solver)
    if not nlp_solver.available():
        raise RuntimeError("NLP solver %s is not available." %
                           config.nlp_solver)
    with SuppressInfeasibleWarning():
        results = nlp_solver.solve(nlp_model, **config.nlp_solver_args)

    nlp_result = SubproblemResult()
    nlp_result.feasible = True
    nlp_result.var_values = list(v.value for v in GDPopt.variable_list)
    nlp_result.pyomo_results = results
    nlp_result.dual_values = list(
        nlp_model.dual.get(c, None)
        for c in GDPopt.constraint_list)

    subprob_terminate_cond = results.solver.termination_condition
    if (subprob_terminate_cond is tc.optimal or
            subprob_terminate_cond is tc.locallyOptimal or
            subprob_terminate_cond is tc.feasible):
        pass
    elif subprob_terminate_cond is tc.infeasible:
        config.logger.info('NLP subproblem was infeasible.')
        nlp_result.feasible = False
    elif subprob_terminate_cond is tc.maxIterations:
        # TODO try something else? Reinitialize with different initial
        # value?
        config.logger.info(
            'NLP subproblem failed to converge within iteration limit.')
        if is_feasible(nlp_model, config):
            config.logger.info(
                'NLP solution is still feasible. '
                'Using potentially suboptimal feasible solution.')
        else:
            nlp_result.feasible = False
    elif subprob_terminate_cond is tc.internalSolverError:
        # Possible that IPOPT had a restoration failure
        config.logger.info(
            "NLP solver had an internal failure: %s" % results.solver.message)
        nlp_result.feasible = False
    else:
        raise ValueError(
            'GDPopt unable to handle NLP subproblem termination '
            'condition of %s. Results: %s'
            % (subprob_terminate_cond, results))

    # Call the NLP post-solve callback
    config.call_after_subproblem_solve(nlp_model, solve_data)

    # if feasible, call the NLP post-feasible callback
    if nlp_result.feasible:
        config.call_after_subproblem_feasible(nlp_model, solve_data)

    return nlp_result
示例#9
0
文件: nlp_solve.py 项目: Pyomo/pyomo
def solve_MINLP(model, solve_data, config):
    """Solve the MINLP subproblem."""
    config.logger.info(
        "Solving MINLP subproblem for fixed logical realizations."
    )

    GDPopt = model.GDPopt_utils

    if config.subproblem_presolve:
        preprocess_subproblem(model, config)

    initialize_subproblem(model, solve_data)

    # Callback immediately before solving NLP subproblem
    config.call_before_subproblem_solve(model, solve_data)

    minlp_solver = SolverFactory(config.minlp_solver)
    if not minlp_solver.available():
        raise RuntimeError("MINLP solver %s is not available." %
                           config.minlp_solver)
    with SuppressInfeasibleWarning():
        results = minlp_solver.solve(model, **config.minlp_solver_args)

    subprob_result = SubproblemResult()
    subprob_result.feasible = True
    subprob_result.var_values = list(v.value for v in GDPopt.variable_list)
    subprob_result.pyomo_results = results
    subprob_result.dual_values = list(
        model.dual.get(c, None)
        for c in GDPopt.constraint_list)

    subprob_terminate_cond = results.solver.termination_condition
    if (subprob_terminate_cond is tc.optimal or
            subprob_terminate_cond is tc.locallyOptimal or
            subprob_terminate_cond is tc.feasible):
        pass
    elif subprob_terminate_cond is tc.infeasible:
        config.logger.info('MINLP subproblem was infeasible.')
        subprob_result.feasible = False
    elif subprob_terminate_cond is tc.maxIterations:
        # TODO try something else? Reinitialize with different initial
        # value?
        config.logger.info(
            'MINLP subproblem failed to converge within iteration limit.')
        if is_feasible(model, config):
            config.logger.info(
                'MINLP solution is still feasible. '
                'Using potentially suboptimal feasible solution.')
        else:
            subprob_result.feasible = False
    elif subprob_terminate_cond is tc.intermediateNonInteger:
        config.logger.info(
            "MINLP solver could not find feasible integer solution: %s" % results.solver.message)
        subprob_result.feasible = False
    else:
        raise ValueError(
            'GDPopt unable to handle MINLP subproblem termination '
            'condition of %s. Results: %s'
            % (subprob_terminate_cond, results))

    # Call the subproblem post-solve callback
    config.call_after_subproblem_solve(model, solve_data)

    # if feasible, call the subproblem post-feasible callback
    if subprob_result.feasible:
        config.call_after_subproblem_feasible(model, solve_data)

    return subprob_result