Ejemplo n.º 1
0
    def add_lazy_oa_cuts(self,
                         target_model,
                         dual_values,
                         solve_data,
                         config,
                         opt,
                         linearize_active=True,
                         linearize_violated=True):
        """
        Linearizes nonlinear constraints; add the OA cuts through Cplex inherent function self.add()
        For nonconvex problems, turn on 'config.add_slack'. Slack variables will
        always be used for nonlinear equality constraints.
        Parameters
        ----------
        target_model:
            this is the MIP/MILP model for the OA algorithm; we want to add the OA cuts to 'target_model'
        dual_values:
            contains the value of the duals for each constraint
        solve_data: MindtPy Data Container
            data container that holds solve-instance data
        config: ConfigBlock
            contains the specific configurations for the algorithm
        opt: SolverFactory
            the mip solver
        linearize_active: bool, optional
            this parameter acts as a Boolean flag that signals whether the linearized constraint is active
        linearize_violated: bool, optional
            this parameter acts as a Boolean flag that signals whether the nonlinear constraint represented by the
            linearized constraint has been violated
        """

        config.logger.info("Adding OA cuts")
        for (constr,
             dual_value) in zip(target_model.MindtPy_utils.constraint_list,
                                dual_values):
            if constr.body.polynomial_degree() in (0, 1):
                continue

            constr_vars = list(identify_variables(constr.body))
            jacs = solve_data.jacobians

            # Equality constraint (makes the problem nonconvex)
            if constr.has_ub() and constr.has_lb(
            ) and constr.upper == constr.lower:
                sign_adjust = -1 if solve_data.objective_sense == minimize else 1
                rhs = constr.lower if constr.has_lb() and constr.has_ub(
                ) else rhs

                # since the cplex requires the lazy cuts in cplex type, we need to transform the pyomo expression into cplex expression
                pyomo_expr = copysign(
                    1, sign_adjust * dual_value) * (sum(
                        value(jacs[constr][var]) * (var - value(var))
                        for var in list(EXPR.identify_variables(constr.body)))
                                                    + value(constr.body) - rhs)
                cplex_expr, _ = opt._get_expr_from_pyomo_expr(pyomo_expr)
                cplex_rhs = -generate_standard_repn(pyomo_expr).constant
                self.add(constraint=cplex.SparsePair(
                    ind=cplex_expr.variables, val=cplex_expr.coefficients),
                         sense="L",
                         rhs=cplex_rhs)
            else:  # Inequality constraint (possibly two-sided)
                if constr.has_ub() \
                    and (linearize_active and abs(constr.uslack()) < config.bound_tolerance) \
                        or (linearize_violated and constr.uslack() < 0) \
                        or (config.linearize_inactive and constr.uslack() > 0):

                    pyomo_expr = sum(
                        value(jacs[constr][var]) * (var - var.value)
                        for var in constr_vars) + value(constr.body)
                    cplex_rhs = -generate_standard_repn(pyomo_expr).constant
                    cplex_expr, _ = opt._get_expr_from_pyomo_expr(pyomo_expr)
                    self.add(constraint=cplex.SparsePair(
                        ind=cplex_expr.variables, val=cplex_expr.coefficients),
                             sense="L",
                             rhs=constr.upper.value + cplex_rhs)
                if constr.has_lb() \
                    and (linearize_active and abs(constr.lslack()) < config.bound_tolerance) \
                        or (linearize_violated and constr.lslack() < 0) \
                        or (config.linearize_inactive and constr.lslack() > 0):
                    pyomo_expr = sum(
                        value(jacs[constr][var]) * (var - self.get_values(
                            opt._pyomo_var_to_solver_var_map[var]))
                        for var in constr_vars) + value(constr.body)
                    cplex_rhs = -generate_standard_repn(pyomo_expr).constant
                    cplex_expr, _ = opt._get_expr_from_pyomo_expr(pyomo_expr)
                    self.add(constraint=cplex.SparsePair(
                        ind=cplex_expr.variables, val=cplex_expr.coefficients),
                             sense="G",
                             rhs=constr.lower.value + cplex_rhs)
Ejemplo n.º 2
0
def add_affine_cuts(solve_data, config):
    """Adds affine cuts using MCPP.

    Parameters
    ----------
    solve_data : MindtPySolveData
        Data container that holds solve-instance data.
    config : ConfigBlock
        The specific configurations for MindtPy.
    """
    with time_code(solve_data.timing, 'Affine cut generation'):
        m = solve_data.mip
        config.logger.debug('Adding affine cuts')
        counter = 0

        for constr in m.MindtPy_utils.nonlinear_constraint_list:
            vars_in_constr = list(
                identify_variables(constr.body))
            if any(var.value is None for var in vars_in_constr):
                continue  # a variable has no values

            # mcpp stuff
            try:
                mc_eqn = mc(constr.body)
            except MCPP_Error as e:
                config.logger.debug(
                    'Skipping constraint %s due to MCPP error %s' % (constr.name, str(e)))
                continue  # skip to the next constraint

            ccSlope = mc_eqn.subcc()
            cvSlope = mc_eqn.subcv()
            ccStart = mc_eqn.concave()
            cvStart = mc_eqn.convex()

            # check if the value of ccSlope and cvSlope is not Nan or inf. If so, we skip this.
            concave_cut_valid = True
            convex_cut_valid = True
            for var in vars_in_constr:
                if not var.fixed:
                    if ccSlope[var] == float('nan') or ccSlope[var] == float('inf'):
                        concave_cut_valid = False
                    if cvSlope[var] == float('nan') or cvSlope[var] == float('inf'):
                        convex_cut_valid = False
            # check if the value of ccSlope and cvSlope all equals zero. if so, we skip this.
            if not any(list(ccSlope.values())):
                concave_cut_valid = False
            if not any(list(cvSlope.values())):
                convex_cut_valid = False
            if ccStart == float('nan') or ccStart == float('inf'):
                concave_cut_valid = False
            if cvStart == float('nan') or cvStart == float('inf'):
                convex_cut_valid = False
            if not (concave_cut_valid or convex_cut_valid):
                continue

            ub_int = min(value(constr.upper), mc_eqn.upper()
                         ) if constr.has_ub() else mc_eqn.upper()
            lb_int = max(value(constr.lower), mc_eqn.lower()
                         ) if constr.has_lb() else mc_eqn.lower()

            aff_cuts = m.MindtPy_utils.cuts.aff_cuts
            if concave_cut_valid:
                concave_cut = sum(ccSlope[var] * (var - var.value)
                                  for var in vars_in_constr
                                  if not var.fixed) + ccStart >= lb_int
                aff_cuts.add(expr=concave_cut)
                counter += 1
            if convex_cut_valid:
                convex_cut = sum(cvSlope[var] * (var - var.value)
                                 for var in vars_in_constr
                                 if not var.fixed) + cvStart <= ub_int
                aff_cuts.add(expr=convex_cut)
                counter += 1

        config.logger.debug('Added %s affine cuts' % counter)
Ejemplo n.º 3
0
    def add_lazy_affine_cuts(self, solve_data, config, opt):
        """
        Adds affine cuts using MCPP; add affine cuts through Cplex inherent function self.add()

        Parameters
        ----------
        solve_data: MindtPy Data Container
            data container that holds solve-instance data
        config: ConfigBlock
            contains the specific configurations for the algorithm
        opt: SolverFactory
            the mip solver
        """
        m = solve_data.mip
        config.logger.info("Adding affine cuts")
        counter = 0

        for constr in m.MindtPy_utils.constraint_list:
            if constr.body.polynomial_degree() in (1, 0):
                continue

            vars_in_constr = list(identify_variables(constr.body))
            if any(var.value is None for var in vars_in_constr):
                continue  # a variable has no values

            # mcpp stuff
            try:
                mc_eqn = mc(constr.body)
            except MCPP_Error as e:
                config.logger.debug(
                    "Skipping constraint %s due to MCPP error %s" %
                    (constr.name, str(e)))
                continue  # skip to the next constraint
            # TODO: check if the value of ccSlope and cvSlope is not Nan or inf. If so, we skip this.
            ccSlope = mc_eqn.subcc()
            cvSlope = mc_eqn.subcv()
            ccStart = mc_eqn.concave()
            cvStart = mc_eqn.convex()

            concave_cut_valid = True
            convex_cut_valid = True
            for var in vars_in_constr:
                if not var.fixed:
                    if ccSlope[var] == float('nan') or ccSlope[var] == float(
                            'inf'):
                        concave_cut_valid = False
                    if cvSlope[var] == float('nan') or cvSlope[var] == float(
                            'inf'):
                        convex_cut_valid = False
            if ccStart == float('nan') or ccStart == float('inf'):
                concave_cut_valid = False
            if cvStart == float('nan') or cvStart == float('inf'):
                convex_cut_valid = False
            # check if the value of ccSlope and cvSlope all equals zero. if so, we skip this.
            if not any(list(ccSlope.values())):
                concave_cut_valid = False
            if not any(list(cvSlope.values())):
                convex_cut_valid = False
            if (concave_cut_valid or convex_cut_valid) is False:
                continue

            ub_int = min(
                constr.upper,
                mc_eqn.upper()) if constr.has_ub() else mc_eqn.upper()
            lb_int = max(
                constr.lower,
                mc_eqn.lower()) if constr.has_lb() else mc_eqn.lower()

            parent_block = constr.parent_block()
            # Create a block on which to put outer approximation cuts.
            # TODO: create it at the beginning.
            aff_utils = parent_block.component('MindtPy_aff')
            if aff_utils is None:
                aff_utils = parent_block.MindtPy_aff = Block(
                    doc="Block holding affine constraints")
                aff_utils.MindtPy_aff_cons = ConstraintList()
            aff_cuts = aff_utils.MindtPy_aff_cons
            if concave_cut_valid:
                pyomo_concave_cut = sum(ccSlope[var] * (var - var.value)
                                        for var in vars_in_constr
                                        if not var.fixed) + ccStart
                cplex_concave_rhs = generate_standard_repn(
                    pyomo_concave_cut).constant
                cplex_concave_cut, _ = opt._get_expr_from_pyomo_expr(
                    pyomo_concave_cut)
                self.add(constraint=cplex.SparsePair(
                    ind=cplex_concave_cut.variables,
                    val=cplex_concave_cut.coefficients),
                         sense="G",
                         rhs=lb_int - cplex_concave_rhs)
                counter += 1
            if convex_cut_valid:
                pyomo_convex_cut = sum(cvSlope[var] * (var - var.value)
                                       for var in vars_in_constr
                                       if not var.fixed) + cvStart
                cplex_convex_rhs = generate_standard_repn(
                    pyomo_convex_cut).constant
                cplex_convex_cut, _ = opt._get_expr_from_pyomo_expr(
                    pyomo_convex_cut)
                self.add(constraint=cplex.SparsePair(
                    ind=cplex_convex_cut.variables,
                    val=cplex_convex_cut.coefficients),
                         sense="L",
                         rhs=ub_int - cplex_convex_rhs)
                # aff_cuts.add(expr=convex_cut)
                counter += 1

        config.logger.info("Added %s affine cuts" % counter)
Ejemplo n.º 4
0
def add_ecp_cuts(target_model, solve_data, config,
                 linearize_active=True,
                 linearize_violated=True):
    """Linearizes nonlinear constraints. Adds the cuts for the ECP method.

    Parameters
    ----------
    target_model : Pyomo model
        The relaxed linear model.
    solve_data : MindtPySolveData
        Data container that holds solve-instance data.
    config : ConfigBlock
        The specific configurations for MindtPy.
    linearize_active : bool, optional
        Whether to linearize the active nonlinear constraints, by default True.
    linearize_violated : bool, optional
        Whether to linearize the violated nonlinear constraints, by default True.
    """
    with time_code(solve_data.timing, 'ECP cut generation'):
        for constr in target_model.MindtPy_utils.nonlinear_constraint_list:
            constr_vars = list(identify_variables(constr.body))
            jacs = solve_data.jacobians

            if constr.has_lb() and constr.has_ub():
                config.logger.warning(
                    'constraint {} has both a lower '
                    'and upper bound.'
                    '\n'.format(
                        constr))
                continue
            if constr.has_ub():
                try:
                    upper_slack = constr.uslack()
                except (ValueError, OverflowError):
                    config.logger.warning(
                        'constraint {} has caused either a '
                        'ValueError or OverflowError.'
                        '\n'.format(
                            constr))
                    continue
                if (linearize_active and abs(upper_slack) < config.ecp_tolerance) \
                        or (linearize_violated and upper_slack < 0) \
                        or (config.linearize_inactive and upper_slack > 0):
                    if config.add_slack:
                        slack_var = target_model.MindtPy_utils.cuts.slack_vars.add()

                    target_model.MindtPy_utils.cuts.ecp_cuts.add(
                        expr=(sum(value(jacs[constr][var])*(var - var.value)
                                  for var in constr_vars)
                              - (slack_var if config.add_slack else 0)
                              <= upper_slack)
                    )

            if constr.has_lb():
                try:
                    lower_slack = constr.lslack()
                except (ValueError, OverflowError):
                    config.logger.warning(
                        'constraint {} has caused either a '
                        'ValueError or OverflowError.'
                        '\n'.format(
                            constr))
                    continue
                if (linearize_active and abs(lower_slack) < config.ecp_tolerance) \
                        or (linearize_violated and lower_slack < 0) \
                        or (config.linearize_inactive and lower_slack > 0):
                    if config.add_slack:
                        slack_var = target_model.MindtPy_utils.cuts.slack_vars.add()

                    target_model.MindtPy_utils.cuts.ecp_cuts.add(
                        expr=(sum(value(jacs[constr][var])*(var - var.value)
                                  for var in constr_vars)
                              + (slack_var if config.add_slack else 0)
                              >= -lower_slack)
                    )
Ejemplo n.º 5
0
def add_oa_cuts(target_model, dual_values, solve_data, config,
                cb_opt=None,
                linearize_active=True,
                linearize_violated=True):
    """Adds OA cuts.

    Generates and adds OA cuts (linearizes nonlinear constraints).
    For nonconvex problems, turn on 'config.add_slack'. 
    Slack variables will always be used for nonlinear equality constraints.

    Parameters
    ----------
    target_model : Pyomo model
        The relaxed linear model.
    dual_values : list
        The value of the duals for each constraint.
    solve_data : MindtPySolveData
        Data container that holds solve-instance data.
    config : ConfigBlock
        The specific configurations for MindtPy.
    cb_opt : SolverFactory, optional
        Gurobi_persistent solver, by default None.
    linearize_active : bool, optional
        Whether to linearize the active nonlinear constraints, by default True.
    linearize_violated : bool, optional
        Whether to linearize the violated nonlinear constraints, by default True.
    """
    with time_code(solve_data.timing, 'OA cut generation'):
        for index, constr in enumerate(target_model.MindtPy_utils.constraint_list):
            # TODO: here the index is correlated to the duals, try if this can be fixed when temp duals are removed.
            if constr.body.polynomial_degree() in {0, 1}:
                continue

            constr_vars = list(identify_variables(constr.body))
            jacs = solve_data.jacobians

            # Equality constraint (makes the problem nonconvex)
            if constr.has_ub() and constr.has_lb() and value(constr.lower) == value(constr.upper) and config.equality_relaxation:
                sign_adjust = -1 if solve_data.objective_sense == minimize else 1
                rhs = constr.lower
                if config.add_slack:
                    slack_var = target_model.MindtPy_utils.cuts.slack_vars.add()
                target_model.MindtPy_utils.cuts.oa_cuts.add(
                    expr=copysign(1, sign_adjust * dual_values[index])
                    * (sum(value(jacs[constr][var]) * (var - value(var))
                           for var in EXPR.identify_variables(constr.body))
                        + value(constr.body) - rhs)
                    - (slack_var if config.add_slack else 0) <= 0)
                if config.single_tree and config.mip_solver == 'gurobi_persistent' and solve_data.mip_iter > 0 and cb_opt is not None:
                    cb_opt.cbLazy(
                        target_model.MindtPy_utils.cuts.oa_cuts[len(target_model.MindtPy_utils.cuts.oa_cuts)])

            else:  # Inequality constraint (possibly two-sided)
                if (constr.has_ub()
                    and (linearize_active and abs(constr.uslack()) < config.zero_tolerance)
                        or (linearize_violated and constr.uslack() < 0)
                        or (config.linearize_inactive and constr.uslack() > 0)) or ('MindtPy_utils.objective_constr' in constr.name and constr.has_ub()):
                    # always add the linearization for the epigraph of the objective
                    if config.add_slack:
                        slack_var = target_model.MindtPy_utils.cuts.slack_vars.add()

                    target_model.MindtPy_utils.cuts.oa_cuts.add(
                        expr=(sum(value(jacs[constr][var])*(var - var.value)
                                  for var in constr_vars) + value(constr.body)
                              - (slack_var if config.add_slack else 0)
                              <= value(constr.upper))
                    )
                    if config.single_tree and config.mip_solver == 'gurobi_persistent' and solve_data.mip_iter > 0 and cb_opt is not None:
                        cb_opt.cbLazy(
                            target_model.MindtPy_utils.cuts.oa_cuts[len(target_model.MindtPy_utils.cuts.oa_cuts)])

                if (constr.has_lb()
                    and (linearize_active and abs(constr.lslack()) < config.zero_tolerance)
                        or (linearize_violated and constr.lslack() < 0)
                        or (config.linearize_inactive and constr.lslack() > 0)) or ('MindtPy_utils.objective_constr' in constr.name and constr.has_lb()):
                    if config.add_slack:
                        slack_var = target_model.MindtPy_utils.cuts.slack_vars.add()

                    target_model.MindtPy_utils.cuts.oa_cuts.add(
                        expr=(sum(value(jacs[constr][var])*(var - var.value)
                                  for var in constr_vars) + value(constr.body)
                              + (slack_var if config.add_slack else 0)
                              >= value(constr.lower))
                    )
                    if config.single_tree and config.mip_solver == 'gurobi_persistent' and solve_data.mip_iter > 0 and cb_opt is not None:
                        cb_opt.cbLazy(
                            target_model.MindtPy_utils.cuts.oa_cuts[len(target_model.MindtPy_utils.cuts.oa_cuts)])
Ejemplo n.º 6
0
    def add_lazy_affine_cuts(self, solve_data, config, opt):
        """Adds affine cuts using MCPP.

        Add affine cuts through Cplex inherent function self.add().

        Parameters
        ----------
        solve_data : MindtPySolveData
            Data container that holds solve-instance data.
        config : ConfigBlock
            The specific configurations for MindtPy.
        opt : SolverFactory
            The cplex_persistent solver.
        """
        with time_code(solve_data.timing, 'Affine cut generation'):
            m = solve_data.mip
            config.logger.debug('Adding affine cuts')
            counter = 0

            for constr in m.MindtPy_utils.nonlinear_constraint_list:

                vars_in_constr = list(identify_variables(constr.body))
                if any(var.value is None for var in vars_in_constr):
                    continue  # a variable has no values

                # mcpp stuff
                try:
                    mc_eqn = mc(constr.body)
                except MCPP_Error as e:
                    config.logger.debug(
                        'Skipping constraint %s due to MCPP error %s' %
                        (constr.name, str(e)))
                    continue  # skip to the next constraint
                # TODO: check if the value of ccSlope and cvSlope is not Nan or inf. If so, we skip this.
                ccSlope = mc_eqn.subcc()
                cvSlope = mc_eqn.subcv()
                ccStart = mc_eqn.concave()
                cvStart = mc_eqn.convex()

                concave_cut_valid = True
                convex_cut_valid = True
                for var in vars_in_constr:
                    if not var.fixed:
                        if ccSlope[var] == float(
                                'nan') or ccSlope[var] == float('inf'):
                            concave_cut_valid = False
                        if cvSlope[var] == float(
                                'nan') or cvSlope[var] == float('inf'):
                            convex_cut_valid = False
                if ccStart == float('nan') or ccStart == float('inf'):
                    concave_cut_valid = False
                if cvStart == float('nan') or cvStart == float('inf'):
                    convex_cut_valid = False
                # check if the value of ccSlope and cvSlope all equals zero. if so, we skip this.
                if not any(ccSlope.values()):
                    concave_cut_valid = False
                if not any(cvSlope.values()):
                    convex_cut_valid = False
                if not (concave_cut_valid or convex_cut_valid):
                    continue

                ub_int = min(
                    value(constr.upper),
                    mc_eqn.upper()) if constr.has_ub() else mc_eqn.upper()
                lb_int = max(
                    value(constr.lower),
                    mc_eqn.lower()) if constr.has_lb() else mc_eqn.lower()

                if concave_cut_valid:
                    pyomo_concave_cut = sum(ccSlope[var] * (var - var.value)
                                            for var in vars_in_constr
                                            if not var.fixed) + ccStart
                    cplex_concave_rhs = generate_standard_repn(
                        pyomo_concave_cut).constant
                    cplex_concave_cut, _ = opt._get_expr_from_pyomo_expr(
                        pyomo_concave_cut)
                    self.add(constraint=cplex.SparsePair(
                        ind=cplex_concave_cut.variables,
                        val=cplex_concave_cut.coefficients),
                             sense='G',
                             rhs=lb_int - cplex_concave_rhs)
                    counter += 1
                if convex_cut_valid:
                    pyomo_convex_cut = sum(cvSlope[var] * (var - var.value)
                                           for var in vars_in_constr
                                           if not var.fixed) + cvStart
                    cplex_convex_rhs = generate_standard_repn(
                        pyomo_convex_cut).constant
                    cplex_convex_cut, _ = opt._get_expr_from_pyomo_expr(
                        pyomo_convex_cut)
                    self.add(constraint=cplex.SparsePair(
                        ind=cplex_convex_cut.variables,
                        val=cplex_convex_cut.coefficients),
                             sense='L',
                             rhs=ub_int - cplex_convex_rhs)
                    counter += 1

            config.logger.info('Added %s affine cuts' % counter)
Ejemplo n.º 7
0
    def add_lazy_oa_cuts(self,
                         target_model,
                         dual_values,
                         solve_data,
                         config,
                         opt,
                         linearize_active=True,
                         linearize_violated=True):
        """Linearizes nonlinear constraints; add the OA cuts through Cplex inherent function self.add()
        For nonconvex problems, turn on 'config.add_slack'. Slack variables will always be used for 
        nonlinear equality constraints.

        Parameters
        ----------
        target_model : Pyomo model
            The MIP main problem.
        dual_values : list
            The value of the duals for each constraint.
        solve_data : MindtPySolveData
            Data container that holds solve-instance data.
        config : ConfigBlock
            The specific configurations for MindtPy.
        opt : SolverFactory
            The cplex_persistent solver.
        linearize_active : bool, optional
            Whether to linearize the active nonlinear constraints, by default True.
        linearize_violated : bool, optional
            Whether to linearize the violated nonlinear constraints, by default True.
        """
        config.logger.debug('Adding OA cuts')
        with time_code(solve_data.timing, 'OA cut generation'):
            for index, constr in enumerate(
                    target_model.MindtPy_utils.constraint_list):
                if constr.body.polynomial_degree(
                ) in solve_data.mip_constraint_polynomial_degree:
                    continue

                constr_vars = list(identify_variables(constr.body))
                jacs = solve_data.jacobians

                # Equality constraint (makes the problem nonconvex)
                if constr.has_ub() and constr.has_lb() and value(
                        constr.lower) == value(constr.upper):
                    sign_adjust = -1 if solve_data.objective_sense == minimize else 1
                    rhs = constr.lower

                    # since the cplex requires the lazy cuts in cplex type, we need to transform the pyomo expression into cplex expression
                    pyomo_expr = copysign(
                        1, sign_adjust *
                        dual_values[index]) * (sum(
                            value(jacs[constr][var]) * (var - value(var))
                            for var in EXPR.identify_variables(constr.body)) +
                                               value(constr.body) - rhs)
                    cplex_expr, _ = opt._get_expr_from_pyomo_expr(pyomo_expr)
                    cplex_rhs = -generate_standard_repn(pyomo_expr).constant
                    self.add(constraint=cplex.SparsePair(
                        ind=cplex_expr.variables, val=cplex_expr.coefficients),
                             sense='L',
                             rhs=cplex_rhs)
                else:  # Inequality constraint (possibly two-sided)
                    if (constr.has_ub() and
                        (linearize_active
                         and abs(constr.uslack()) < config.zero_tolerance) or
                        (linearize_violated and constr.uslack() < 0) or
                        (config.linearize_inactive and constr.uslack() > 0)
                        ) or ('MindtPy_utils.objective_constr' in constr.name
                              and constr.has_ub()):

                        pyomo_expr = sum(
                            value(jacs[constr][var]) * (var - var.value)
                            for var in constr_vars) + value(constr.body)
                        cplex_rhs = - \
                            generate_standard_repn(pyomo_expr).constant
                        cplex_expr, _ = opt._get_expr_from_pyomo_expr(
                            pyomo_expr)
                        self.add(constraint=cplex.SparsePair(
                            ind=cplex_expr.variables,
                            val=cplex_expr.coefficients),
                                 sense='L',
                                 rhs=value(constr.upper) + cplex_rhs)
                    if (constr.has_lb() and
                        (linearize_active
                         and abs(constr.lslack()) < config.zero_tolerance) or
                        (linearize_violated and constr.lslack() < 0) or
                        (config.linearize_inactive and constr.lslack() > 0)
                        ) or ('MindtPy_utils.objective_constr' in constr.name
                              and constr.has_lb()):
                        pyomo_expr = sum(
                            value(jacs[constr][var]) * (var - self.get_values(
                                opt._pyomo_var_to_solver_var_map[var]))
                            for var in constr_vars) + value(constr.body)
                        cplex_rhs = - \
                            generate_standard_repn(pyomo_expr).constant
                        cplex_expr, _ = opt._get_expr_from_pyomo_expr(
                            pyomo_expr)
                        self.add(constraint=cplex.SparsePair(
                            ind=cplex_expr.variables,
                            val=cplex_expr.coefficients),
                                 sense='G',
                                 rhs=value(constr.lower) + cplex_rhs)
Ejemplo n.º 8
0
    def add_lazy_oa_cuts(self,
                         target_model,
                         dual_values,
                         solve_data,
                         config,
                         opt,
                         linearize_active=True,
                         linearize_violated=True,
                         linearize_inactive=False):
        """Add oa_cuts through Cplex inherent function self.add()"""

        for (constr,
             dual_value) in zip(target_model.MindtPy_utils.constraint_list,
                                dual_values):
            if constr.body.polynomial_degree() in (0, 1):
                continue

            constr_vars = list(identify_variables(constr.body))
            jacs = solve_data.jacobians

            # Equality constraint (makes the problem nonconvex)
            if constr.has_ub() and constr.has_lb(
            ) and constr.upper == constr.lower:
                sign_adjust = -1 if solve_data.objective_sense == minimize else 1
                rhs = ((0 if constr.upper is None else constr.upper) +
                       (0 if constr.lower is None else constr.lower))
                rhs = constr.lower if constr.has_lb() and constr.has_ub(
                ) else rhs

                # since the cplex requires the lazy cuts in cplex type, we need to transform the pyomo expression into cplex expression
                pyomo_expr = copysign(
                    1, sign_adjust * dual_value) * (sum(
                        value(jacs[constr][var]) * (var - value(var))
                        for var in list(EXPR.identify_variables(constr.body)))
                                                    + value(constr.body) - rhs)
                cplex_expr, _ = opt._get_expr_from_pyomo_expr(pyomo_expr)
                cplex_rhs = -generate_standard_repn(pyomo_expr).constant
                self.add(constraint=cplex.SparsePair(
                    ind=cplex_expr.variables, val=cplex_expr.coefficients),
                         sense="L",
                         rhs=cplex_rhs)
            else:  # Inequality constraint (possibly two-sided)
                if constr.has_ub() \
                    and (linearize_active and abs(constr.uslack()) < config.zero_tolerance) \
                        or (linearize_violated and constr.uslack() < 0) \
                        or (linearize_inactive and constr.uslack() > 0):

                    pyomo_expr = sum(
                        value(jacs[constr][var]) * (var - var.value)
                        for var in constr_vars) + value(constr.body)
                    cplex_rhs = -generate_standard_repn(pyomo_expr).constant
                    cplex_expr, _ = opt._get_expr_from_pyomo_expr(pyomo_expr)
                    self.add(constraint=cplex.SparsePair(
                        ind=cplex_expr.variables, val=cplex_expr.coefficients),
                             sense="L",
                             rhs=constr.upper.value + cplex_rhs)
                if constr.has_lb() \
                    and (linearize_active and abs(constr.lslack()) < config.zero_tolerance) \
                        or (linearize_violated and constr.lslack() < 0) \
                        or (linearize_inactive and constr.lslack() > 0):
                    pyomo_expr = sum(
                        value(jacs[constr][var]) * (var - self.get_values(
                            opt._pyomo_var_to_solver_var_map[var]))
                        for var in constr_vars) + value(constr.body)
                    cplex_rhs = -generate_standard_repn(pyomo_expr).constant
                    cplex_expr, _ = opt._get_expr_from_pyomo_expr(pyomo_expr)
                    self.add(constraint=cplex.SparsePair(
                        ind=cplex_expr.variables, val=cplex_expr.coefficients),
                             sense="G",
                             rhs=constr.lower.value + cplex_rhs)
Ejemplo n.º 9
0
def add_oa_cuts(target_model,
                dual_values,
                solve_data,
                config,
                linearize_active=True,
                linearize_violated=True,
                linearize_inactive=False,
                use_slack_var=False):
    """Linearizes nonlinear constraints.

    For nonconvex problems, turn on 'use_slack_var'. Slack variables will
    always be used for nonlinear equality constraints.
    """
    for (constr, dual_value) in zip(target_model.MindtPy_utils.constraint_list,
                                    dual_values):
        if constr.body.polynomial_degree() in (0, 1):
            continue

        constr_vars = list(identify_variables(constr.body))
        jacs = solve_data.jacobians

        # Equality constraint (makes the problem nonconvex)
        if constr.has_ub() and constr.has_lb(
        ) and constr.upper == constr.lower:
            sign_adjust = -1 if solve_data.objective_sense == minimize else 1
            rhs = ((0 if constr.upper is None else constr.upper) +
                   (0 if constr.lower is None else constr.lower))
            rhs = constr.lower if constr.has_lb() and constr.has_ub() else rhs
            slack_var = target_model.MindtPy_utils.MindtPy_linear_cuts.slack_vars.add(
            )
            target_model.MindtPy_utils.MindtPy_linear_cuts.oa_cuts.add(
                expr=copysign(1, sign_adjust * dual_value) *
                (sum(
                    value(jacs[constr][var]) * (var - value(var))
                    for var in list(EXPR.identify_variables(constr.body))) +
                 value(constr.body) - rhs) - slack_var <= 0)

        else:  # Inequality constraint (possibly two-sided)
            if constr.has_ub() \
               and (linearize_active and abs(constr.uslack()) < config.zero_tolerance) \
                    or (linearize_violated and constr.uslack() < 0) \
                    or (linearize_inactive and constr.uslack() > 0):
                if use_slack_var:
                    slack_var = target_model.MindtPy_utils.MindtPy_linear_cuts.slack_vars.add(
                    )

                target_model.MindtPy_utils.MindtPy_linear_cuts.oa_cuts.add(
                    expr=(sum(
                        value(jacs[constr][var]) * (var - var.value)
                        for var in constr_vars) -
                          (slack_var if use_slack_var else 0) <= constr.upper))

            if constr.has_lb() \
               and (linearize_active and abs(constr.lslack()) < config.zero_tolerance) \
                    or (linearize_violated and constr.lslack() < 0) \
                    or (linearize_inactive and constr.lslack() > 0):
                if use_slack_var:
                    slack_var = target_model.MindtPy_utils.MindtPy_linear_cuts.slack_vars.add(
                    )

                target_model.MindtPy_utils.MindtPy_linear_cuts.oa_cuts.add(
                    expr=(sum(
                        value(jacs[constr][var]) * (var - var.value)
                        for var in constr_vars) +
                          (slack_var if use_slack_var else 0) >= constr.lower))
Ejemplo n.º 10
0
def add_affine_cuts(solve_data, config):
    """
    Adds affine cuts using MCPP; modifies the model to include affine cuts

    Parameters
    ----------
    solve_data: MindtPy Data Container
        data container that holds solve-instance data
    config: ConfigBlock
        contains the specific configurations for the algorithm
    """

    m = solve_data.mip
    config.logger.info("Adding affine cuts")
    counter = 0

    for constr in m.MindtPy_utils.constraint_list:
        if constr.body.polynomial_degree() in (1, 0):
            continue

        vars_in_constr = list(identify_variables(constr.body))
        if any(var.value is None for var in vars_in_constr):
            continue  # a variable has no values

        # mcpp stuff
        try:
            mc_eqn = mc(constr.body)
        except MCPP_Error as e:
            config.logger.debug("Skipping constraint %s due to MCPP error %s" %
                                (constr.name, str(e)))
            continue  # skip to the next constraint

        ccSlope = mc_eqn.subcc()
        cvSlope = mc_eqn.subcv()
        ccStart = mc_eqn.concave()
        cvStart = mc_eqn.convex()

        # check if the value of ccSlope and cvSlope is not Nan or inf. If so, we skip this.
        concave_cut_valid = True
        convex_cut_valid = True
        for var in vars_in_constr:
            if not var.fixed:
                if ccSlope[var] == float('nan') or ccSlope[var] == float(
                        'inf'):
                    concave_cut_valid = False
                if cvSlope[var] == float('nan') or cvSlope[var] == float(
                        'inf'):
                    convex_cut_valid = False
        # check if the value of ccSlope and cvSlope all equals zero. if so, we skip this.
        if not any(list(ccSlope.values())):
            concave_cut_valid = False
        if not any(list(cvSlope.values())):
            convex_cut_valid = False
        if ccStart == float('nan') or ccStart == float('inf'):
            concave_cut_valid = False
        if cvStart == float('nan') or cvStart == float('inf'):
            convex_cut_valid = False
        if (concave_cut_valid or convex_cut_valid) is False:
            continue

        ub_int = min(constr.upper,
                     mc_eqn.upper()) if constr.has_ub() else mc_eqn.upper()
        lb_int = max(constr.lower,
                     mc_eqn.lower()) if constr.has_lb() else mc_eqn.lower()

        parent_block = constr.parent_block()
        # Create a block on which to put outer approximation cuts.
        # TODO: create it at the beginning.
        aff_utils = parent_block.component('MindtPy_aff')
        if aff_utils is None:
            aff_utils = parent_block.MindtPy_aff = Block(
                doc="Block holding affine constraints")
            aff_utils.MindtPy_aff_cons = ConstraintList()
        aff_cuts = aff_utils.MindtPy_aff_cons
        if concave_cut_valid:
            concave_cut = sum(ccSlope[var] * (var - var.value)
                              for var in vars_in_constr
                              if not var.fixed) + ccStart >= lb_int
            aff_cuts.add(expr=concave_cut)
            counter += 1
        if convex_cut_valid:
            convex_cut = sum(cvSlope[var] * (var - var.value)
                             for var in vars_in_constr
                             if not var.fixed) + cvStart <= ub_int
            aff_cuts.add(expr=convex_cut)
            counter += 1

    config.logger.info("Added %s affine cuts" % counter)
Ejemplo n.º 11
0
def add_ecp_cuts(target_model,
                 solve_data,
                 config,
                 linearize_active=True,
                 linearize_violated=True):
    """
    Linearizes nonlinear constraints. Adds the cuts for the ECP method.

    For nonconvex problems, turn on 'config.add_slack'. Slack variables will
    always be used for nonlinear equality constraints.

    Parameters
    ----------
    target_model:
        this is the MIP/MILP model for the OA algorithm; we want to add the OA cuts to 'target_model'
    solve_data: MindtPy Data Container
        data container that holds solve-instance data
    config: ConfigBlock
        contains the specific configurations for the algorithm
    linearize_active: bool, optional
        this parameter acts as a Boolean flag that signals whether the linearized constraint is active
    linearize_violated: bool, optional
        this parameter acts as a Boolean flag that signals whether the nonlinear constraint represented by the
        linearized constraint has been violated
    """
    for constr in target_model.MindtPy_utils.constraint_list:

        if constr.body.polynomial_degree() in (0, 1):
            continue

        constr_vars = list(identify_variables(constr.body))
        jacs = solve_data.jacobians

        if constr.has_lb() and constr.has_ub():
            config.logger.warning('constraint {} has both a lower '
                                  'and upper bound.'
                                  '\n'.format(constr))
            continue
        if constr.has_ub():
            try:
                upper_slack = constr.uslack()
            except (ValueError, OverflowError):
                config.logger.warning('constraint {} has caused either a '
                                      'ValueError or OverflowError.'
                                      '\n'.format(constr))
                continue
            if (linearize_active and abs(upper_slack) < config.ecp_tolerance) \
                    or (linearize_violated and upper_slack < 0) \
                    or (config.linearize_inactive and upper_slack > 0):
                if config.add_slack:
                    slack_var = target_model.MindtPy_utils.MindtPy_linear_cuts.slack_vars.add(
                    )

                target_model.MindtPy_utils.MindtPy_linear_cuts.ecp_cuts.add(
                    expr=(
                        sum(
                            value(jacs[constr][var]) * (var - var.value)
                            for var in constr_vars) -
                        (slack_var if config.add_slack else 0) <= upper_slack))

        if constr.has_lb():
            try:
                lower_slack = constr.lslack()
            except (ValueError, OverflowError):
                config.logger.warning('constraint {} has caused either a '
                                      'ValueError or OverflowError.'
                                      '\n'.format(constr))
                continue
            if (linearize_active and abs(lower_slack) < config.ecp_tolerance) \
                    or (linearize_violated and lower_slack < 0) \
                    or (config.linearize_inactive and lower_slack > 0):
                if config.add_slack:
                    slack_var = target_model.MindtPy_utils.MindtPy_linear_cuts.slack_vars.add(
                    )

                target_model.MindtPy_utils.MindtPy_linear_cuts.ecp_cuts.add(
                    expr=(
                        sum(
                            value(jacs[constr][var]) * (var - var.value)
                            for var in constr_vars) +
                        (slack_var if config.add_slack else 0) >= -lower_slack
                    ))
Ejemplo n.º 12
0
def add_oa_cuts(target_model,
                dual_values,
                solve_data,
                config,
                linearize_active=True,
                linearize_violated=True):
    """
    Linearizes nonlinear constraints; modifies the model to include the OA cuts
    For nonconvex problems, turn on 'config.add_slack'. Slack variables will
    always be used for nonlinear equality constraints.
    Parameters
    ----------
    target_model:
        this is the MIP/MILP model for the OA algorithm; we want to add the OA cuts to 'target_model'
    dual_values:
        contains the value of the duals for each constraint
    solve_data: MindtPy Data Container
        data container that holds solve-instance data
    config: ConfigBlock
        contains the specific configurations for the algorithm
    linearize_active: bool, optional
        this parameter acts as a Boolean flag that signals whether the linearized constraint is active
    linearize_violated: bool, optional
        this parameter acts as a Boolean flag that signals whether the nonlinear constraint represented by the
        linearized constraint has been violated
    """
    for index, constr in enumerate(target_model.MindtPy_utils.constraint_list):
        if constr.body.polynomial_degree() in (0, 1):
            continue

        constr_vars = list(identify_variables(constr.body))
        jacs = solve_data.jacobians

        # Equality constraint (makes the problem nonconvex)
        if constr.has_ub() and constr.has_lb(
        ) and constr.upper == constr.lower and config.use_dual:
            sign_adjust = -1 if solve_data.objective_sense == minimize else 1
            rhs = constr.lower if constr.has_lb() and constr.has_ub() else rhs
            if config.add_slack:
                slack_var = target_model.MindtPy_utils.MindtPy_linear_cuts.slack_vars.add(
                )
            target_model.MindtPy_utils.MindtPy_linear_cuts.oa_cuts.add(
                expr=copysign(1, sign_adjust * dual_values[index]) *
                (sum(
                    value(jacs[constr][var]) * (var - value(var))
                    for var in list(EXPR.identify_variables(constr.body))) +
                 value(constr.body) - rhs) -
                (slack_var if config.add_slack else 0) <= 0)

        else:  # Inequality constraint (possibly two-sided)
            if constr.has_ub() \
                and (linearize_active and abs(constr.uslack()) < config.zero_tolerance) \
                    or (linearize_violated and constr.uslack() < 0) \
                    or (config.linearize_inactive and constr.uslack() > 0):
                if config.add_slack:
                    slack_var = target_model.MindtPy_utils.MindtPy_linear_cuts.slack_vars.add(
                    )

                target_model.MindtPy_utils.MindtPy_linear_cuts.oa_cuts.add(
                    expr=(
                        sum(
                            value(jacs[constr][var]) * (var - var.value)
                            for var in constr_vars) + value(constr.body) -
                        (slack_var if config.add_slack else 0) <= constr.upper
                    ))

            if constr.has_lb() \
                and (linearize_active and abs(constr.lslack()) < config.zero_tolerance) \
                    or (linearize_violated and constr.lslack() < 0) \
                    or (config.linearize_inactive and constr.lslack() > 0):
                if config.add_slack:
                    slack_var = target_model.MindtPy_utils.MindtPy_linear_cuts.slack_vars.add(
                    )

                target_model.MindtPy_utils.MindtPy_linear_cuts.oa_cuts.add(
                    expr=(
                        sum(
                            value(jacs[constr][var]) * (var - var.value)
                            for var in constr_vars) + value(constr.body) +
                        (slack_var if config.add_slack else 0) >= constr.lower
                    ))