示例#1
0
def create_base_cutting_stock(demand, W):

    initial_patterns = dict()

    ## cutting stock base problem
    cs = pyo.ConcreteModel()

    cs.pattern = pyo.VarList(domain=pyo.NonNegativeReals)

    # add initial columns for each
    # demanded width
    for i, width in enumerate(demand):
        cs.pattern.add()
        initial_patterns[i + 1] = {width: int(W // width)}

    # add the demand constraints; supply initial identity columns;
    # filling in as many of a single width on a pattern as possible
    cs.demand = pyo.Constraint(demand.keys())
    for i, (width, quantity) in enumerate(demand.items()):
        cs.demand[width] = initial_patterns[i + 1][width] * cs.pattern[
            i + 1] >= quantity

    cs.obj = pyo.Objective(expr=pyo.quicksum(cs.pattern.values()),
                           sense=pyo.minimize)

    cs.dual = pyo.Suffix(direction=pyo.Suffix.IMPORT)

    ## knapsack cut generator
    ks = pyo.ConcreteModel()

    ks.widths = pyo.Var(demand.keys(), within=pyo.NonNegativeIntegers)

    ks.knapsack = pyo.Constraint(expr=pyo.quicksum(width * ks.widths[width]
                                                   for width in demand) <= W)

    # blank objective, set by the dual values of cs
    ks.obj = pyo.Objective(expr=0, sense=pyo.maximize)

    return cs, ks, initial_patterns
示例#2
0
def relax(model, data, use_linear_relaxation=True):
    new_model = model.clone()

    for var in model.component_data_objects(pe.Var,
                                            active=True,
                                            descend_into=True):
        new_var = new_model.find_component(var)
        data.original_to_new_var_map[var] = new_var

    model = new_model

    model.relaxations = pe.Block()
    model.aux_vars = pe.VarList()
    model.aux_cons = pe.ConstraintList()

    for obj in nonrelaxation_component_data_objects(model, ctype=pe.Objective, active=True):
        degree = polynomial_degree(obj.expr)
        if degree is not None:
            if degree <= 1:
                continue

        assert obj.is_minimizing()

        # relaxation_side = RelaxationSide.UNDER
        relaxation_side = RelaxationSide.BOTH

        new_body = relax_expression(model, obj.expr, relaxation_side, data)
        obj._expr = new_body

    for cons in nonrelaxation_component_data_objects(model, ctype=pe.Constraint, active=True):
        relax_constraint(model, cons, data, inplace=True)

    update_relaxation_data(model, data)
    rebuild_relaxations(model, data, use_linear_relaxation)

    return model
示例#3
0
def relax(model, descend_into=None, in_place=False, use_fbbt=True):
    if not in_place:
        m = model.clone()
    else:
        m = model

    if descend_into is None:
        descend_into = (pe.Block, Disjunct)

    aux_var_map = dict()
    counter_dict = dict()
    degree_map = ComponentMap()

    for c in m.component_data_objects(ctype=Constraint, active=True, descend_into=descend_into, sort=True):
        body_degree = polynomial_degree(c.body)
        if body_degree is not None:
            if body_degree <= 1:
                continue

        if c.lower is not None and c.upper is not None:
            relaxation_side = RelaxationSide.BOTH
        elif c.lower is not None:
            relaxation_side = RelaxationSide.OVER
        elif c.upper is not None:
            relaxation_side = RelaxationSide.UNDER
        else:
            raise ValueError('Encountered a constraint without a lower or an upper bound: ' + str(c))

        parent_block = c.parent_block()
        relaxation_side_map = ComponentMap()
        relaxation_side_map[c.body] = relaxation_side

        if parent_block in counter_dict:
            counter = counter_dict[parent_block]
        else:
            parent_block.relaxations = pe.Block()
            parent_block.aux_vars = pe.VarList()
            parent_block.aux_cons = pe.ConstraintList()
            counter = RelaxationCounter()
            counter_dict[parent_block] = counter

        new_body = _relax_expr(expr=c.body, aux_var_map=aux_var_map, parent_block=parent_block,
                               relaxation_side_map=relaxation_side_map, counter=counter, degree_map=degree_map)
        lb = c.lower
        ub = c.upper
        parent_block.aux_cons.add(pe.inequality(lb, new_body, ub))
        parent_component = c.parent_component()
        if parent_component.is_indexed():
            del parent_component[c.index()]
        else:
            parent_block.del_component(c)

    for c in m.component_data_objects(ctype=pe.Objective, active=True, descend_into=descend_into, sort=True):
        degree = polynomial_degree(c.expr)
        if degree is not None:
            if degree <= 1:
                continue

        if c.sense == pe.minimize:
            relaxation_side = RelaxationSide.UNDER
        elif c.sense == pe.maximize:
            relaxation_side = RelaxationSide.OVER
        else:
            raise ValueError('Encountered an objective with an unrecognized sense: ' + str(c))

        parent_block = c.parent_block()
        relaxation_side_map = ComponentMap()
        relaxation_side_map[c.expr] = relaxation_side

        if parent_block in counter_dict:
            counter = counter_dict[parent_block]
        else:
            parent_block.relaxations = pe.Block()
            parent_block.aux_vars = pe.VarList()
            parent_block.aux_cons = pe.ConstraintList()
            counter = RelaxationCounter()
            counter_dict[parent_block] = counter

        if not hasattr(parent_block, 'aux_objectives'):
            parent_block.aux_objectives = pe.ObjectiveList()

        new_body = _relax_expr(expr=c.expr, aux_var_map=aux_var_map, parent_block=parent_block,
                               relaxation_side_map=relaxation_side_map, counter=counter, degree_map=degree_map)
        sense = c.sense
        parent_block.aux_objectives.add(new_body, sense=sense)
        parent_component = c.parent_component()
        if parent_component.is_indexed():
            del parent_component[c.index()]
        else:
            parent_block.del_component(c)

    if use_fbbt:
        for _aux_var, relaxation in aux_var_map.values():
            relaxation.rebuild(build_nonlinear_constraint=True)

        fbbt(m, deactivate_satisfied_constraints=True)

        for _aux_var, relaxation in aux_var_map.values():
            relaxation.use_linear_relaxation = True
            relaxation.rebuild()
    else:
        for _aux_var, relaxation in aux_var_map.values():
            relaxation.use_linear_relaxation = True
            relaxation.rebuild()

    return m
示例#4
0
def relax(model, descend_into=None, in_place=False, use_fbbt=True, fbbt_options=None):
    """
    Create a convex relaxation of the model.

    Parameters
    ----------
    model: pyomo.core.base.block._BlockData or pyomo.core.base.PyomoModel.ConcreteModel
        The model or block to be relaxed
    descend_into: type or tuple of type, optional
        The types of pyomo components that should be checked for constraints to be relaxed. The
        default is (Block, Disjunct).
    in_place: bool, optional
        If False (default=False), model will be cloned, and the clone will be relaxed. 
        If True, then model will be modified in place.
    use_fbbt: bool, optional
        If True (default=True), then FBBT will be used to tighten variable bounds. If False, 
        FBBT will not be used.
    fbbt_options: dict, optional
        The options to pass to the call to fbbt. See pyomo.contrib.fbbt.fbbt.fbbt for details.

    Returns
    -------
    m: pyomo.core.base.block._BlockData or pyomo.core.base.PyomoModel.ConcreteModel
        The relaxed model
    """
    """
    For now, we will use FBBT both before relaxing the model and after relaxing the model. The reason we need to 
    do it before relaxing the model is that the variable bounds will affect the structure of the relaxation. For 
    example, if we need to relax x**3 and x >= 0, then we know x**3 is convex, and we can relax it as a 
    convex, univariate function. However, if x can be positive or negative, then x**3 is neither convex nor concave.
    In this case, we relax it by reformulating it as x * x**2. The hope is that performing FBBT before relaxing 
    the model will help identify things like x >= 0 and therefore x**3 is convex. The correct way to do this is to 
    update the relaxation classes so that the original expression is known, and the best relaxation can be used 
    anytime the variable bounds are updated. For example, suppose the model is relaxed and, only after OBBT is 
    performed, we find out x >= 0. We should be able to easily update the relaxation so that x**3 is then relaxed 
    as a convex univariate function. The reason FBBT needs to be performed after relaxing the model is that 
    we want to make sure that all of the auxilliary variables introduced get tightened bounds. The correct way to 
    handle this is to perform FBBT with the original model with suspect, which forms a DAG. Each auxilliary variable 
    introduced in the relaxed model corresponds to a node in the DAG. If we use suspect, then we can easily 
    update the bounds of the auxilliary variables without performing FBBT a second time.
    """
    if not in_place:
        m = model.clone()
    else:
        m = model

    if fbbt_options is None:
        fbbt_options = dict()

    if use_fbbt:
        fbbt(m, **fbbt_options)

    if descend_into is None:
        descend_into = (pe.Block, Disjunct)

    aux_var_map = dict()
    counter_dict = dict()
    degree_map = ComponentMap()

    for c in nonrelaxation_component_data_objects(m, ctype=Constraint, active=True, descend_into=descend_into, sort=True):
        body_degree = polynomial_degree(c.body)
        if body_degree is not None:
            if body_degree <= 1:
                continue

        if c.lower is not None and c.upper is not None:
            relaxation_side = RelaxationSide.BOTH
        elif c.lower is not None:
            relaxation_side = RelaxationSide.OVER
        elif c.upper is not None:
            relaxation_side = RelaxationSide.UNDER
        else:
            raise ValueError('Encountered a constraint without a lower or an upper bound: ' + str(c))

        parent_block = c.parent_block()
        relaxation_side_map = ComponentMap()
        relaxation_side_map[c.body] = relaxation_side

        if parent_block in counter_dict:
            counter = counter_dict[parent_block]
        else:
            parent_block.relaxations = pe.Block()
            parent_block.aux_vars = pe.VarList()
            parent_block.aux_cons = pe.ConstraintList()
            counter = RelaxationCounter()
            counter_dict[parent_block] = counter

        new_body = _relax_expr(expr=c.body, aux_var_map=aux_var_map, parent_block=parent_block,
                               relaxation_side_map=relaxation_side_map, counter=counter, degree_map=degree_map)
        lb = c.lower
        ub = c.upper
        parent_block.aux_cons.add(pe.inequality(lb, new_body, ub))
        parent_component = c.parent_component()
        if parent_component.is_indexed():
            del parent_component[c.index()]
        else:
            parent_block.del_component(c)

    for c in nonrelaxation_component_data_objects(m, ctype=pe.Objective, active=True, descend_into=descend_into, sort=True):
        degree = polynomial_degree(c.expr)
        if degree is not None:
            if degree <= 1:
                continue

        if c.sense == pe.minimize:
            relaxation_side = RelaxationSide.UNDER
        elif c.sense == pe.maximize:
            relaxation_side = RelaxationSide.OVER
        else:
            raise ValueError('Encountered an objective with an unrecognized sense: ' + str(c))

        parent_block = c.parent_block()
        relaxation_side_map = ComponentMap()
        relaxation_side_map[c.expr] = relaxation_side

        if parent_block in counter_dict:
            counter = counter_dict[parent_block]
        else:
            parent_block.relaxations = pe.Block()
            parent_block.aux_vars = pe.VarList()
            parent_block.aux_cons = pe.ConstraintList()
            counter = RelaxationCounter()
            counter_dict[parent_block] = counter

        if not hasattr(parent_block, 'aux_objectives'):
            parent_block.aux_objectives = pe.ObjectiveList()

        new_body = _relax_expr(expr=c.expr, aux_var_map=aux_var_map, parent_block=parent_block,
                               relaxation_side_map=relaxation_side_map, counter=counter, degree_map=degree_map)
        sense = c.sense
        parent_block.aux_objectives.add(new_body, sense=sense)
        parent_component = c.parent_component()
        if parent_component.is_indexed():
            del parent_component[c.index()]
        else:
            parent_block.del_component(c)

    if use_fbbt:
        for _aux_var, relaxation in aux_var_map.values():
            relaxation.rebuild(build_nonlinear_constraint=True)

        tmp_fbbt_options = dict(fbbt_options)
        tmp_fbbt_options['deactivate_satisfied_constraints'] = False
        fbbt(m, **tmp_fbbt_options)

        for _aux_var, relaxation in aux_var_map.values():
            relaxation.use_linear_relaxation = True
            relaxation.rebuild()
    else:
        for _aux_var, relaxation in aux_var_map.values():
            relaxation.use_linear_relaxation = True
            relaxation.rebuild()

    return m
示例#5
0
    def _initialize_QP_subproblems(self):
        ''' Instantiates the (convex) QP subproblems (eqn. (13) in the Boland
            paper) for each scenario. Does not create/attach an objective.

            Attachs a local_QP_subproblems dict to self. Keys are scenario
            names (or bundle names), values are Pyomo ConcreteModel objects
            corresponding to the QP subproblems. 

            QP subproblems are in their original form, without the x and y
            variables eliminated. Rationale: pre-solve will get this, easier
            bookkeeping (objective does not need to be changed at each inner
            iteration this way).
        '''
        self.local_QP_subproblems = dict()
        has_init_pts = hasattr(self, 'local_initial_points')
        for (name, model) in self.local_subproblems.items():
            if (self.bundling):
                xr_indices = model.ref_vars.keys()
                nonant_indices = model.nonant_vars.keys()
                leaf_indices = model.leaf_vars.keys()
                if (has_init_pts):
                    raise RuntimeError('Cannot currently specify '
                                       'initial points while using bundles')
            else:
                nonant_indices = model._nonant_indexes.keys()
                leaf_indices = model.leaf_vars.keys()
            ''' Convex comb. coefficients '''
            QP = pyo.ConcreteModel()
            QP.a = pyo.VarList(domain=pyo.NonNegativeReals)
            if (has_init_pts):
                for _ in range(len(self.local_initial_points[name])):
                    QP.a.add()
            else:
                QP.a.add()  # Just one variable (1-based index!) to start
            ''' Other variables '''
            QP.x = pyo.Var(nonant_indices, within=pyo.Reals)
            QP.y = pyo.Var(leaf_indices, within=pyo.Reals)
            if (self.bundling):
                QP.xr = pyo.Var(xr_indices, within=pyo.Reals)
            ''' Non-anticipativity constraint '''
            if (self.bundling):

                def nonant_rule(m, scenario_name, node_name, ix):
                    return m.x[scenario_name, node_name, ix] == \
                            m.xr[node_name, ix]

                QP.na = pyo.Constraint(nonant_indices, rule=nonant_rule)
            ''' (x,y) constraints '''
            if (self.bundling):

                def x_rule(m, node_name, ix):
                    return -m.xr[node_name, ix] + m.a[1] * \
                            model.ref_vars[node_name, ix].value == 0

                def y_rule(m, scenario_name, node_name, ix):
                    return -m.y[scenario_name, node_name, ix] + m.a[1]\
                        * model.leaf_vars[scenario_name,node_name,ix].value == 0

                QP.eqx = pyo.Constraint(xr_indices, rule=x_rule)
            else:
                if (has_init_pts):
                    pts = self.local_initial_points[name]

                    def x_rule(m, node_name, ix):
                        nm = model.nonant_vars[node_name, ix].name
                        return -m.x[node_name, ix] + \
                            pyo.quicksum(m.a[i+1] * pts[i][nm]
                                for i in range(len(pts))) == 0

                    def y_rule(m, node_name, ix):
                        nm = model.leaf_vars[node_name, ix].name
                        return -m.y[node_name,ix] + \
                            pyo.quicksum(m.a[i+1] * pts[i][nm]
                                for i in range(len(pts))) == 0
                else:

                    def x_rule(m, node_name, ix):
                        return -m.x[node_name, ix] + m.a[1] * \
                                model.nonant_vars[node_name, ix].value == 0

                    def y_rule(m, node_name, ix):
                        return -m.y[node_name,ix] + m.a[1] * \
                                model.leaf_vars['LEAF', ix].value == 0

                QP.eqx = pyo.Constraint(nonant_indices, rule=x_rule)

            QP.eqy = pyo.Constraint(leaf_indices, rule=y_rule)
            QP.sum_one = pyo.Constraint(expr=pyo.quicksum(QP.a.values()) == 1)

            self.local_QP_subproblems[name] = QP
示例#6
0
文件: convert.py 项目: whart222/pao
def collect_multilevel_tree(block,
                            var,
                            vidmap={},
                            sortOrder=SortComponents.unsorted,
                            fixed=set(),
                            inequalities=None):
    """
    Traverse the model and generate a tree of the SubModel components
    """
    #
    # Roof of the current subtree, defined by the block
    #
    curr = Node(block)
    #
    # Recurse, collecting Submodel components
    #
    fixedvars = fixed | curr.fixedvars
    curr.children = \
        [collect_multilevel_tree(submodel, var, vidmap, fixed=fixedvars, inequalities=inequalities)
         for submodel in block.component_objects(SubModel, active=True, descend_into=True, sort=sortOrder)]
    #
    # Collect objectives and constraints in the current submodel.
    # Note that we do not recurse into SubModel blocks.
    #
    # Objectives
    #
    for odata in block.component_data_objects(pe.Objective,
                                              active=True,
                                              sort=sortOrder,
                                              descend_into=True):
        repn = generate_standard_repn(odata.expr)
        degree = repn.polynomial_degree()
        assert (
            degree is not None
        ), "Objective '%s' has a body that is not linear or quadratic" % odata.name
        if degree == 0:
            continue  # trivial, so skip
        curr.orepn.append((repn, odata.sense))
        if degree == 2:
            curr.linear = False
    #
    # Constraints
    #
    # If we call conversion twice, then we delete the variables from the previous conversion
    #
    block.del_component('zzz_PAO_SlackVariables')
    block.del_component('zzz_PAO_SlackVariables_index')
    block.zzz_PAO_SlackVariables = pe.VarList(domain=pe.NonNegativeReals)
    for cdata in block.component_data_objects(pe.Constraint,
                                              active=True,
                                              sort=sortOrder,
                                              descend_into=True):
        if (not cdata.has_lb()) and (not cdata.has_ub()):
            assert not cdata.equality, "Constraint '%s' is an equality with an infinite right-hand-side" % cdata.name
            # non-binding, so skip
            continue  # pragma: no cover
        repn = generate_standard_repn(cdata.body)
        degree = repn.polynomial_degree()
        assert (
            degree is not None
        ), "Constraint '%s' has a body that is not linear or quadratic " % cdata.name
        if degree == 0:
            if cdata.equality:
                assert pe.value(cdata.body) == pe.value(
                    cdata.lower
                ), "Constraint '%s' is constant but it is not satisfied (equality)" % cdata.name
            else:
                if not cdata.lower is None:
                    assert pe.value(cdata.body) >= pe.value(
                        cdata.lower
                    ), "Constraint '%s' is constant but it is not satisfied (lower-bound)" % cdata.name
                if not cdata.upper is None:
                    assert pe.value(cdata.body) <= pe.value(
                        cdata.upper
                    ), "Constraint '%s' is constant but it is not satisfied (upper-bound)" % cdata.name
            # trivial, so skip
            continue  # pragma: no cover
        else:
            if degree == 2:
                curr.linear = False
            if inequalities:
                if cdata.equality:
                    val = pe.value(cdata.lower)
                    curr.crepn.append((repn, val))
                    curr.crepn.append((negate_repn(repn), -val))
                else:
                    if cdata.lower is None and cdata.upper is None:  #pragma: no cover
                        # unbounded constraint
                        continue
                    if cdata.lower is not None:
                        curr.crepn.append(
                            (negate_repn(repn), -pe.value(cdata.lower)))
                    if cdata.upper is not None:
                        curr.crepn.append((repn, pe.value(cdata.upper)))
            else:
                if cdata.equality:
                    curr.crepn.append((repn, pe.value(cdata.lower)))
                else:
                    if cdata.lower is None and cdata.upper is None:  #pragma: no cover
                        # unbounded constraint
                        continue
                    if cdata.lower is not None:
                        trepn = negate_repn(repn)
                        trepn.linear_coefs.append(1)
                        trepn.linear_vars.append(
                            block.zzz_PAO_SlackVariables.add())
                        curr.crepn.append((trepn, -pe.value(cdata.lower)))
                    if cdata.upper is not None:
                        repn.linear_coefs = list(repn.linear_coefs)
                        repn.linear_vars = list(repn.linear_vars)
                        repn.linear_coefs.append(1)
                        repn.linear_vars.append(
                            block.zzz_PAO_SlackVariables.add())
                        curr.crepn.append((repn, pe.value(cdata.upper)))
    #
    # Collect the variables used by the children
    #
    childvars = set()
    for child in curr.children:
        childvars |= child.unfixedvars
    #
    # Add ids for variables in this block that have not been specified
    # as fixed and which are not used in submodels
    #
    knownvars = curr.fixedvars | childvars
    newvars = []
    for repn in itertools.chain(curr.orepn, curr.crepn):
        for v in repn[0].linear_vars:
            i = id(v)
            if i not in knownvars:
                curr.unfixedvars.add(i)
                var[i] = v
                newvars.append(i)
                knownvars.add(i)
        if True:  # pragma: no cover
            for v, w in repn[0].quadratic_vars:
                i = id(v)
                if i not in knownvars:
                    curr.unfixedvars.add(i)
                    var[i] = v
                    newvars.append(i)
                    knownvars.add(i)
                i = id(w)
                if i not in knownvars:
                    curr.unfixedvars.add(i)
                    var[i] = w
                    newvars.append(i)
                    knownvars.add(i)
    #
    # Categorize the new variables that were found
    #
    if sortOrder == SortComponents.unsorted:
        for i in newvars:
            curr.categorize_variable(i, var[i], vidmap)
    else:
        for k, _, w in sorted(((i, var[i].name, var[i]) for i in newvars),
                              key=lambda arg: arg[1]):
            curr.categorize_variable(k, w, vidmap)
    #
    # Return root of this tree
    #
    return curr
示例#7
0
# @Variables_single
m.v = aml.Var(initialize=1.0,
              bounds=(1,4))

# @Variables_single
# @Variables_dict
m.vd = aml.Var(m.s,
               bounds=(None,9))

# @Variables_dict
# @Variables_list
# used 1-based indexing
def vl_(m, i):
    return (i, None)
m.vl = aml.VarList(bounds=vl_)
for j in m.q:
    m.vl.add()
# @Variables_list

# @Constraints_single
m.c = aml.Constraint(expr=\
    sum(m.vd.values()) <= 9)
# @Constraints_single
# @Constraints_dict
def cd_(m,i,j):
    return m.vd[i] == j
m.cd = aml.Constraint(m.s,
                      m.q,
                      rule=cd_)