def flatten_dae_components(model, time, ctype): """ This function takes in a (hierarchical, block-structured) Pyomo model and a `ContinuousSet` and returns two lists of "flattened" components. The first is a list of all `_ComponentData` that are not indexed by the `ContinuousSet` and the second is a list of `Reference` components such that each reference is indexed only by the specified `ContinuousSet`. This function is convenient for identifying components that are implicitly indexed by the `ContinuousSet`, for example, a singleton `Component` living on a `Block` that is indexed by the `ContinuousSet`. Parameters ---------- model : Concrete Pyomo model time : ``pyomo.dae.ContinuousSet`` ctype : Pyomo Component type Returns ------- Two lists """ assert time.model() is model.model() block_queue = [model] regular_comps = [] time_indexed_comps = [] while block_queue: b = block_queue.pop(0) b_sets = b.index_set().subsets() if time in b_sets: for _slice in generate_time_indexed_block_slices(b, time, ctype): time_indexed_comps.append(Reference(_slice)) continue for blkdata in b.values(): block_queue.extend( blkdata.component_objects(Block, descend_into=False)) for blkdata in b.values(): for v in blkdata.component_objects(SubclassOf(ctype), descend_into=False): v_sets = v.index_set().subsets() if time in v_sets: for _slice in generate_time_only_slices(v, time): time_indexed_comps.append(Reference(_slice)) else: regular_comps.extend(v.values()) return regular_comps, time_indexed_comps
def _transform_block_components(self, block, disjunct, bigM, arg_list, suffix_list): # We find any transformed disjunctions that might be here because we # need to move their transformation blocks up onto the parent block # before we transform anything else on this block. Note that we do this # before we create references to local variables because we do not want # duplicate references to indicator variables and local variables on # nested disjuncts. disjunctBlock = disjunct._transformation_block() destinationBlock = disjunctBlock.parent_block() for obj in block.component_data_objects( Disjunction, sort=SortComponents.deterministic, descend_into=(Block)): if obj.algebraic_constraint is None: # This could be bad if it's active since that means its # untransformed, but we'll wait to yell until the next loop continue # get this disjunction's relaxation block. transBlock = obj.algebraic_constraint().parent_block() # move transBlock up to parent component self._transfer_transBlock_data(transBlock, destinationBlock) # we leave the transformation block because it still has the XOR # constraints, which we want to be on the parent disjunct. # Find all the variables declared here (including the indicator_var) and # add a reference on the transformation block so these will be # accessible when the Disjunct is deactivated. We don't descend into # Disjuncts because we just moved the references to their local # variables up in the previous loop. varRefBlock = disjunctBlock.localVarReferences for v in block.component_objects(Var, descend_into=Block, active=None): varRefBlock.add_component( unique_component_name( varRefBlock, v.getname(fully_qualified=True, name_buffer=NAME_BUFFER)), Reference(v)) # Now look through the component map of block and transform everything # we have a handler for. Yell if we don't know how to handle it. (Note # that because we only iterate through active components, this means # non-ActiveComponent types cannot have handlers.) for obj in block.component_objects(active=True, descend_into=False): handler = self.handlers.get(obj.ctype, None) if not handler: if handler is None: raise GDP_Error( "No BigM transformation handler registered " "for modeling components of type %s. If your " "disjuncts contain non-GDP Pyomo components that " "require transformation, please transform them first." % obj.ctype) continue # obj is what we are transforming, we pass disjunct # through so that we will have access to the indicator # variables down the line. handler(obj, disjunct, bigM, arg_list, suffix_list)
def _transform_block_components(self, block, disjunct, var_substitute_map, zero_substitute_map): # As opposed to bigm, in hull the only special thing we need to do for # nested Disjunctions is to make sure that we move up local var # references and also references to the disaggregated variables so that # all will be accessible after we transform this Disjunct. The indicator # variables and disaggregated variables of the inner disjunction will # need to be disaggregated again, but the transformed constraints will # not be. But this way nothing will get double-bigm-ed. (If an # untransformed disjunction is lurking here, we will catch it below). disjunctBlock = disjunct._transformation_block() destinationBlock = disjunctBlock.parent_block() for obj in block.component_data_objects( Disjunction, sort=SortComponents.deterministic, descend_into=(Block)): if obj.algebraic_constraint is None: # This could be bad if it's active since that means its # untransformed, but we'll wait to yell until the next loop continue # get this disjunction's relaxation block. transBlock = obj.algebraic_constraint().parent_block() self._transfer_var_references(transBlock, destinationBlock) # add references to all local variables on block (including the # indicator_var). Note that we do this after we have moved up the # transformation blocks for nested disjunctions, so that we don't have # duplicate references. varRefBlock = disjunctBlock.localVarReferences for v in block.component_objects(Var, descend_into=Block, active=None): varRefBlock.add_component( unique_component_name( varRefBlock, v.getname(fully_qualified=True, name_buffer=NAME_BUFFER)), Reference(v)) # Look through the component map of block and transform everything we # have a handler for. Yell if we don't know how to handle it. (Note that # because we only iterate through active components, this means # non-ActiveComponent types cannot have handlers.) for obj in block.component_objects(active=True, descend_into=False): handler = self.handlers.get(obj.ctype, None) if not handler: if handler is None: raise GDP_Error( "No hull transformation handler registered " "for modeling components of type %s. If your " "disjuncts contain non-GDP Pyomo components that " "require transformation, please transform them first." % obj.ctype) continue # obj is what we are transforming, we pass disjunct # through so that we will have access to the indicator # variables down the line. handler(obj, disjunct, var_substitute_map, zero_substitute_map)
def categorize_variables(model, time): assert time.model() is model.model() block_queue = [model] regular_vars = [] time_indexed_vars = [] while block_queue: b = block_queue.pop(0) b_sets = identify_member_sets(b.index_set()) if time in b_sets: for _slice in generate_time_indexed_block_slices(b, time): time_indexed_vars.append(Reference(_slice)) continue block_queue.extend(list(b.component_objects(Block, descend_into=False))) for v in b.component_objects(SubclassOf(Var), descend_into=False): v_sets = identify_member_sets(v.index_set()) if time in v_sets: for _slice in generate_time_only_slices(v, time): time_indexed_vars.append(Reference(_slice)) else: regular_vars.extend(list(v.values())) return regular_vars, time_indexed_vars
def flatten_components_along_sets(m, sets, ctype, indices=None): """ This function iterates over components (recursively) contained in a block and partitions their data objects into components indexed only by the specified sets. Args: m : Block whose components (and their sub-components) will be partitioned sets : Possible indexing sets for the returned components ctype : Type of component to identify and partition indices : indices of sets to use when descending into subblocks Returns: tuple: The first entry is a list of tuples of Pyomo Sets. The second is a list of lists of components, each indexed by the corresponding sets in the first entry. """ if indices is None: index_map = ComponentMap() elif type(indices) is ComponentMap: index_map = indices else: index_map = ComponentMap(zip(sets, indices)) for s, idx in index_map.items(): if not idx in s: raise ValueError( "%s is a bad index for set %s. \nPlease provide an index " "that is in the set." % (idx, s.name) ) index_stack = [] set_of_sets = ComponentSet(sets) # Using these two `OrderedDict`s is a workaround because I can't # reliably use tuples of components as keys in a `ComponentMap`. sets_dict = OrderedDict() comps_dict = OrderedDict() for index_sets, slice_ in generate_sliced_components(m, index_stack, m, set_of_sets, ctype, index_map): # Note that index_sets should always be a tuple, never a scalar. # TODO: Potentially re-order sets at this point. # In this way (time, space) would have the same key as (space, time). # They we'd have to somehow "swap indexing sets" when we create # the reference below. key = tuple(id(c) for c in index_sets) if key not in sets_dict: if len(key) == 0: sets_dict[key] = (UnindexedComponent_set,) else: sets_dict[key] = index_sets if key not in comps_dict: comps_dict[key] = [] if len(key) == 0: comps_dict[key].append(slice_) else: # If the user wants to change these flags, they can access the # slice via the `referent` attribute of each reference component. slice_.attribute_errors_generate_exceptions = False slice_.key_errors_generate_exceptions = False comps_dict[key].append(Reference(slice_)) # list-of-tuples of Sets: sets_list = list(sets for sets in sets_dict.values()) # list-of-lists of components: comps_list = list(comps for comps in comps_dict.values()) # E.g. we return: ( # [(time, space), (time,)], # [[some_component, ...], [other, ...]], # ) ^ These components are indexed by time # ^ These components are indexed by time and space return sets_list, comps_list
def apply_basic_step(disjunctions_or_constraints): # # Basic steps only apply to XOR'd disjunctions # disjunctions = list(obj for obj in disjunctions_or_constraints if obj.ctype == Disjunction) constraints = list(obj for obj in disjunctions_or_constraints if obj.ctype == Constraint) for d in disjunctions: if not d.xor: raise ValueError( "Basic steps can only be applied to XOR'd disjunctions\n\t" "(raised by disjunction %s)" % (d.name, )) if not d.active: logger.warning("Warning: applying basic step to a previously " "deactivated disjunction (%s)" % (d.name, )) ans = Block(concrete=True) ans.DISJUNCTIONS = Set(initialize=range(len(disjunctions))) ans.INDEX = Set(dimen=len(disjunctions), initialize=_squish_singletons( itertools.product(*tuple( range(len(d.disjuncts)) for d in disjunctions)))) # # Form the individual disjuncts for the new basic step # ans.disjuncts = Disjunct(ans.INDEX) for idx in ans.INDEX: # # Each source disjunct will be copied (cloned) into its own # subblock # ans.disjuncts[idx].src = Block(ans.DISJUNCTIONS) for i in ans.DISJUNCTIONS: tmp = _clone_all_but_indicator_vars( disjunctions[i].disjuncts[idx[i] if isinstance(idx, tuple ) else idx]) for k, v in list(tmp.component_map().items()): if v.parent_block() is not tmp: # Skip indicator_var and binary_indicator_var continue tmp.del_component(k) ans.disjuncts[idx].src[i].add_component(k, v) # Copy in the constraints corresponding to the improper disjunctions ans.disjuncts[idx].improper_constraints = ConstraintList() for constr in constraints: if constr.is_indexed(): for indx in constr: ans.disjuncts[idx].improper_constraints.add( (constr[indx].lower, constr[indx].body, constr[indx].upper)) constr[indx].deactivate() # need this so that we can take an improper basic step with a # ConstraintData else: ans.disjuncts[idx].improper_constraints.add( (constr.lower, constr.body, constr.upper)) constr.deactivate() # # Link the new disjunct indicator_var's to the original # indicator_var's. Since only one of the new # NAME_BUFFER = {} ans.indicator_links = ConstraintList() for i in ans.DISJUNCTIONS: for j in range(len(disjunctions[i].disjuncts)): orig_var = disjunctions[i].disjuncts[j].indicator_var orig_binary_var = orig_var.get_associated_binary() ans.indicator_links.add(orig_binary_var == sum( ans.disjuncts[idx].binary_indicator_var for idx in ans.INDEX if (idx[i] if isinstance(idx, tuple) else idx) == j)) # and throw on a Reference to original on the block for v in (orig_var, orig_binary_var): name_base = v.getname(fully_qualified=True, name_buffer=NAME_BUFFER) ans.add_component(unique_component_name(ans, name_base), Reference(v)) # Form the new disjunction ans.disjunction = Disjunction(expr=[ans.disjuncts[i] for i in ans.INDEX]) # # Deactivate the old disjunctions / disjuncts # for i in ans.DISJUNCTIONS: disjunctions[i].deactivate() for d in disjunctions[i].disjuncts: d._deactivate_without_fixing_indicator() return ans
def _transform_block_components(self, block, disjunct, bigM, arg_list, suffix_list): # We find any transformed disjunctions that might be here because we # need to move their transformation blocks up onto the parent block # before we transform anything else on this block. Note that we do this # before we create references to local variables because we do not want # duplicate references to indicator variables and local variables on # nested disjuncts. disjunctBlock = disjunct._transformation_block() destinationBlock = disjunctBlock.parent_block() for obj in block.component_data_objects( Disjunction, sort=SortComponents.deterministic, descend_into=(Block)): if obj.algebraic_constraint is None: # This could be bad if it's active since that means its # untransformed, but we'll wait to yell until the next loop continue # get this disjunction's relaxation block. transBlock = obj.algebraic_constraint().parent_block() # move transBlock up to parent component self._transfer_transBlock_data(transBlock, destinationBlock) # we leave the transformation block because it still has the XOR # constraints, which we want to be on the parent disjunct. # Transform any logical constraints here. We need to do this before we # create the variable references! TransformationFactory('core.logical_to_linear').apply_to(block) # We don't know where all the BooleanVars are used, so if there are any # that the above transformation didn't transform, we need to do it now, # so that the Reference gets moved up. This won't be necessary when the # writers are willing to find Vars not in the active subtree. for boolean in block.component_data_objects(BooleanVar, descend_into=Block, active=None): if isinstance(boolean._associated_binary, _DeprecatedImplicitAssociatedBinaryVariable): parent_block = boolean.parent_block() new_var = Var(domain=Binary) parent_block.add_component( unique_component_name(parent_block, boolean.local_name + "_asbinary"), new_var) boolean.associate_binary_var(new_var) # Find all the variables declared here (including the indicator_var) and # add a reference on the transformation block so these will be # accessible when the Disjunct is deactivated. We don't descend into # Disjuncts because we just moved the references to their local # variables up in the previous loop. varRefBlock = disjunctBlock.localVarReferences for v in block.component_objects(Var, descend_into=Block, active=None): varRefBlock.add_component( unique_component_name(varRefBlock, v.getname(fully_qualified=True)), Reference(v)) # Now look through the component map of block and transform everything # we have a handler for. Yell if we don't know how to handle it. (Note # that because we only iterate through active components, this means # non-ActiveComponent types cannot have handlers.) for obj in block.component_objects(active=True, descend_into=False): handler = self.handlers.get(obj.ctype, None) if not handler: if handler is None: raise GDP_Error( "No BigM transformation handler registered " "for modeling components of type %s. If your " "disjuncts contain non-GDP Pyomo components that " "require transformation, please transform them first." % obj.ctype) continue # obj is what we are transforming, we pass disjunct # through so that we will have access to the indicator # variables down the line. handler(obj, disjunct, bigM, arg_list, suffix_list)