def _apply_to_impl(self, instance, **kwds): config = self.CONFIG(kwds.pop('options', {})) # We will let args override suffixes and estimate as a last # resort. More specific args/suffixes override ones anywhere in # the tree. Suffixes lower down in the tree override ones higher # up. if 'default_bigM' in kwds: logger.warn("DEPRECATED: the 'default_bigM=' argument has been " "replaced by 'bigM='") config.bigM = kwds.pop('default_bigM') config.set_value(kwds) bigM = config.bigM # make a transformation block to put transformed disjuncts on transBlockName = unique_component_name( instance, '_pyomo_gdp_bigm_relaxation') transBlock = Block() instance.add_component(transBlockName, transBlock) transBlock.relaxedDisjuncts = Block(Any) transBlock.lbub = Set(initialize=['lb', 'ub']) # this is a dictionary for keeping track of IndexedDisjuncts # and IndexedDisjunctions so that, at the end of the # transformation, we can check that the ones with no active # DisjstuffDatas are deactivated. transBlock.disjContainers = ComponentSet() targets = config.targets if targets is None: targets = (instance, ) _HACK_transform_whole_instance = True else: _HACK_transform_whole_instance = False for _t in targets: t = _t.find_component(instance) if t is None: raise GDP_Error( "Target %s is not a component on the instance!" % _t) if t.type() is Disjunction: if t.parent_component() is t: self._transformDisjunction(t, transBlock, bigM) else: self._transformDisjunctionData( t, transBlock, bigM, t.index()) elif t.type() in (Block, Disjunct): if t.parent_component() is t: self._transformBlock(t, transBlock, bigM) else: self._transformBlockData(t, transBlock, bigM) else: raise GDP_Error( "Target %s was not a Block, Disjunct, or Disjunction. " "It was of type %s and can't be transformed." % (t.name, type(t))) # Go through our dictionary of indexed things and deactivate # the containers that don't have any active guys inside of # them. So the invalid component logic will tell us if we # missed something getting transformed. for obj in transBlock.disjContainers: if not obj.active: continue for i in obj: if obj[i].active: break else: # HACK due to active flag implementation. # # Ideally we would not have to do any of this (an # ActiveIndexedComponent would get its active status by # querring the active status of all the contained Data # objects). As a fallback, we would like to call: # # obj._deactivate_without_fixing_indicator() # # However, the sreaightforward implementation of that # method would have unintended side effects (fixing the # contained _DisjunctData's indicator_vars!) due to our # class hierarchy. Instead, we will directly call the # relevant base class (safe-ish since we are verifying # that all the contained _DisjunctionData are # deactivated directly above). ActiveComponent.deactivate(obj) # HACK for backwards compatibility with the older GDP transformations # # Until the writers are updated to find variables on things # other than active blocks, we need to reclassify the Disjuncts # as Blocks after transformation so that the writer will pick up # all the variables that it needs (in this case, indicator_vars). if _HACK_transform_whole_instance: HACK_GDP_Disjunct_Reclassifier().apply_to(instance)
def _bigM_relax_disjunct(self, obj, transBlock, bigM, suffix_list): if hasattr(obj, "_gdp_transformation_info"): infodict = obj._gdp_transformation_info # If the user has something with our name that is not a dict, we # scream. If they have a dict with this name then we are just going # to use it... if type(infodict) is not dict: raise GDP_Error( "Disjunct %s contains an attribute named " "_gdp_transformation_info. The transformation requires " "that it can create this attribute!" % obj.name) else: infodict = obj._gdp_transformation_info = {} # deactivated -> either we've already transformed or user deactivated if not obj.active: if obj.indicator_var.is_fixed(): if value(obj.indicator_var) == 0: # The user cleanly deactivated the disjunct: there # is nothing for us to do here. return else: raise GDP_Error( "The disjunct %s is deactivated, but the " "indicator_var is fixed to %s. This makes no sense." % ( obj.name, value(obj.indicator_var) )) if not infodict.get('relaxed', False): raise GDP_Error( "The disjunct %s is deactivated, but the " "indicator_var is not fixed and the disjunct does not " "appear to have been relaxed. This makes no sense." % ( obj.name, )) if 'bigm' in infodict: # we've transformed it (with BigM), so don't do it again. return # add reference to original disjunct to info dict on transformation # block relaxedDisjuncts = transBlock.relaxedDisjuncts relaxationBlock = relaxedDisjuncts[len(relaxedDisjuncts)] relaxationBlock._gdp_transformation_info = { 'src': obj, 'srcConstraints': ComponentMap(), } # add reference to transformation block on original disjunct assert 'bigm' not in infodict infodict['bigm'] = { 'relaxationBlock': relaxationBlock, 'relaxedConstraints': ComponentMap() } # if this is a disjunctData from an indexed disjunct, we are # going to want to check at the end that the container is # deactivated if everything in it is. So we save it in our # dictionary of things to check if it isn't there already. disjParent = obj.parent_component() if disjParent.is_indexed() and \ disjParent not in transBlock.disjContainers: transBlock.disjContainers.add(disjParent) # This is crazy, but if the disjunction has been previously # relaxed, the disjunct *could* be deactivated. This is a big # deal for CHull, as it uses the component_objects / # component_data_objects generators. For BigM, that is OK, # because we never use those generators with active=True. I am # only noting it here for the future when someone (me?) is # comparing the two relaxations. # # Transform each component within this disjunct self._transform_block_components(obj, obj, infodict, bigM, suffix_list) # deactivate disjunct so we know we've relaxed it obj._deactivate_without_fixing_indicator() infodict['relaxed'] = True
def _transformDisjunctionData(self, obj, transBlock, index): # Convex hull doesn't work if this is an or constraint. So if # xor is false, give up if not obj.xor: raise GDP_Error( "Cannot do convex hull transformation for " "disjunction %s with or constraint. Must be an xor!" % obj.name) parent_component = obj.parent_component() transBlock.disjContainers.add(parent_component) orConstraint, disaggregationConstraint \ = self._getDisjunctionConstraints(parent_component) # We first go through and collect all the variables that we # are going to disaggregate. varOrder_set = ComponentSet() varOrder = [] varsByDisjunct = ComponentMap() for disjunct in obj.disjuncts: # This is crazy, but if the disjunct has been previously # relaxed, the disjunct *could* be deactivated. not_active = not disjunct.active if not_active: disjunct._activate_without_unfixing_indicator() try: disjunctVars = varsByDisjunct[disjunct] = ComponentSet() for cons in disjunct.component_data_objects( Constraint, active=True, sort=SortComponents.deterministic, descend_into=Block): # we aren't going to disaggregate fixed # variables. This means there is trouble if they are # unfixed later... for var in EXPR.identify_variables(cons.body, include_fixed=False): # Note the use of a list so that we will # eventually disaggregate the vars in a # deterministic order (the order that we found # them) disjunctVars.add(var) if var not in varOrder_set: varOrder.append(var) varOrder_set.add(var) finally: if not_active: disjunct._deactivate_without_fixing_indicator() # We will only disaggregate variables that # 1) appear in multiple disjuncts, or # 2) are not contained in this disjunct, or # 3) are not themselves disaggregated variables varSet = [] localVars = ComponentMap((d, []) for d in obj.disjuncts) for var in varOrder: disjuncts = [d for d in varsByDisjunct if var in varsByDisjunct[d]] if len(disjuncts) > 1: varSet.append(var) elif self._contained_in(var, disjuncts[0]): localVars[disjuncts[0]].append(var) elif self._contained_in(var, transBlock): # There is nothing to do here: these are already # disaggregated vars that can/will be forced to 0 when # their disjunct is not active. pass else: varSet.append(var) # Now that we know who we need to disaggregate, we will do it # while we also transform the disjuncts. or_expr = 0 for disjunct in obj.disjuncts: or_expr += disjunct.indicator_var self._transform_disjunct(disjunct, transBlock, varSet, localVars[disjunct]) orConstraint.add(index, (or_expr, 1)) for i, var in enumerate(varSet): disaggregatedExpr = 0 for disjunct in obj.disjuncts: if 'chull' not in disjunct._gdp_transformation_info: if not disjunct.indicator_var.is_fixed() \ or value(disjunct.indicator_var) != 0: raise RuntimeError( "GDP chull: disjunct was not relaxed, but " "does not appear to be correctly deactivated.") continue disaggregatedVar = disjunct._gdp_transformation_info['chull'][ 'disaggregatedVars'][var] disaggregatedExpr += disaggregatedVar if type(index) is tuple: consIdx = index + (i, ) elif parent_component.is_indexed(): consIdx = (index, ) + (i, ) else: consIdx = i disaggregationConstraint.add(consIdx, var == disaggregatedExpr)
def _transform_disjunct(self, obj, transBlock, varSet, localVars): if hasattr(obj, "_gdp_transformation_info"): infodict = obj._gdp_transformation_info # If the user has something with our name that is not a dict, we # scream. If they have a dict with this name then we are just going # to use it... if type(infodict) is not dict: raise GDP_Error( "Disjunct %s contains an attribute named " "_gdp_transformation_info. The transformation requires " "that it can create this attribute!" % obj.name) else: infodict = obj._gdp_transformation_info = {} # deactivated means either we've already transformed or user deactivated if not obj.active: if obj.indicator_var.is_fixed(): if value(obj.indicator_var) == 0: # The user cleanly deactivated the disjunct: there # is nothing for us to do here. return else: raise GDP_Error( "The disjunct %s is deactivated, but the " "indicator_var is fixed to %s. This makes no sense." % (obj.name, value(obj.indicator_var))) if not infodict.get('relaxed', False): raise GDP_Error( "The disjunct %s is deactivated, but the " "indicator_var is not fixed and the disjunct does not " "appear to have been relaxed. This makes no sense." % (obj.name, )) if 'chull' in infodict: # we've transformed it (with CHull), so don't do it again. return # add reference to original disjunct to info dict on # transformation block relaxedDisjuncts = transBlock.relaxedDisjuncts relaxationBlock = relaxedDisjuncts[len(relaxedDisjuncts)] relaxationBlockInfo = relaxationBlock._gdp_transformation_info = { 'src': obj, 'srcVars': ComponentMap(), 'srcConstraints': ComponentMap(), 'boundConstraintToSrcVar': ComponentMap(), } infodict['chull'] = chull = { 'relaxationBlock': relaxationBlock, 'relaxedConstraints': ComponentMap(), 'disaggregatedVars': ComponentMap(), 'bigmConstraints': ComponentMap(), } # if this is a disjunctData from an indexed disjunct, we are # going to want to check at the end that the container is # deactivated if everything in it is. So we save it in our # dictionary of things to check if it isn't there already. disjParent = obj.parent_component() if disjParent.is_indexed() and \ disjParent not in transBlock.disjContainers: transBlock.disjContainers.add(disjParent) # add the disaggregated variables and their bigm constraints # to the relaxationBlock for var in varSet: lb = var.lb ub = var.ub if lb is None or ub is None: raise GDP_Error("Variables that appear in disjuncts must be " "bounded in order to use the chull " "transformation! Missing bound for %s." % (var.name)) disaggregatedVar = Var(within=Reals, bounds=(min(0, lb), max(0, ub)), initialize=var.value) # naming conflicts are possible here since this is a bunch # of variables from different blocks coming together, so we # get a unique name disaggregatedVarName = unique_component_name( relaxationBlock, var.local_name) relaxationBlock.add_component(disaggregatedVarName, disaggregatedVar) chull['disaggregatedVars'][var] = disaggregatedVar relaxationBlockInfo['srcVars'][disaggregatedVar] = var bigmConstraint = Constraint(transBlock.lbub) relaxationBlock.add_component(disaggregatedVarName + "_bounds", bigmConstraint) if lb: bigmConstraint.add('lb', obj.indicator_var * lb <= disaggregatedVar) if ub: bigmConstraint.add('ub', disaggregatedVar <= obj.indicator_var * ub) chull['bigmConstraints'][var] = bigmConstraint relaxationBlockInfo['boundConstraintToSrcVar'][ bigmConstraint] = var for var in localVars: lb = var.lb ub = var.ub if lb is None or ub is None: raise GDP_Error("Variables that appear in disjuncts must be " "bounded in order to use the chull " "transformation! Missing bound for %s." % (var.name)) if value(lb) > 0: var.setlb(0) if value(ub) < 0: var.setub(0) # naming conflicts are possible here since this is a bunch # of variables from different blocks coming together, so we # get a unique name conName = unique_component_name(relaxationBlock, var.local_name + "_bounds") bigmConstraint = Constraint(transBlock.lbub) relaxationBlock.add_component(conName, bigmConstraint) bigmConstraint.add('lb', obj.indicator_var * lb <= var) bigmConstraint.add('ub', var <= obj.indicator_var * ub) chull['bigmConstraints'][var] = bigmConstraint relaxationBlockInfo['boundConstraintToSrcVar'][ bigmConstraint] = var var_substitute_map = dict( (id(v), newV) for v, newV in iteritems(chull['disaggregatedVars'])) zero_substitute_map = dict( (id(v), NumericConstant(0)) for v, newV in iteritems(chull['disaggregatedVars'])) zero_substitute_map.update( (id(v), NumericConstant(0)) for v in localVars) # Transform each component within this disjunct self._transform_block_components(obj, obj, infodict, var_substitute_map, zero_substitute_map) # deactivate disjunct so we know we've relaxed it obj._deactivate_without_fixing_indicator() infodict['relaxed'] = True
def _transform_disjunct(self, obj, transBlock, bigM, arg_list, suffix_list): # deactivated -> either we've already transformed or user deactivated if not obj.active: if obj.indicator_var.is_fixed(): if value(obj.indicator_var) == 0: # The user cleanly deactivated the disjunct: there # is nothing for us to do here. return else: raise GDP_Error( "The disjunct '%s' is deactivated, but the " "indicator_var is fixed to %s. This makes no sense." % ( obj.name, value(obj.indicator_var) )) if obj._transformation_block is None: raise GDP_Error( "The disjunct '%s' is deactivated, but the " "indicator_var is not fixed and the disjunct does not " "appear to have been relaxed. This makes no sense. " "(If the intent is to deactivate the disjunct, fix its " "indicator_var to 0.)" % ( obj.name, )) if obj._transformation_block is not None: # we've transformed it, which means this is the second time it's # appearing in a Disjunction raise GDP_Error( "The disjunct '%s' has been transformed, but a disjunction " "it appears in has not. Putting the same disjunct in " "multiple disjunctions is not supported." % obj.name) # add reference to original disjunct on transformation block relaxedDisjuncts = transBlock.relaxedDisjuncts relaxationBlock = relaxedDisjuncts[len(relaxedDisjuncts)] # we will keep a map of constraints (hashable, ha!) to a tuple to # indicate what their M value is and where it came from, of the form: # ((lower_value, lower_source, lower_key), (upper_value, upper_source, # upper_key)), where the first tuple is the information for the lower M, # the second tuple is the info for the upper M, source is the Suffix or # argument dictionary and None if the value was calculated, and key is # the key in the Suffix or argument dictionary, and None if it was # calculated. (Note that it is possible the lower or upper is # user-specified and the other is not, hence the need to store # information for both.) relaxationBlock.bigm_src = {} relaxationBlock.localVarReferences = Block() obj._transformation_block = weakref_ref(relaxationBlock) relaxationBlock._srcDisjunct = weakref_ref(obj) # This is crazy, but if the disjunction has been previously # relaxed, the disjunct *could* be deactivated. This is a big # deal for Hull, as it uses the component_objects / # component_data_objects generators. For BigM, that is OK, # because we never use those generators with active=True. I am # only noting it here for the future when someone (me?) is # comparing the two relaxations. # # Transform each component within this disjunct self._transform_block_components(obj, obj, bigM, arg_list, suffix_list) # deactivate disjunct to keep the writers happy obj._deactivate_without_fixing_indicator()
def _transform_disjunct(self, disjunct, partition, transBlock): # deactivated -> either we've already transformed or user deactivated if not disjunct.active: if disjunct.indicator_var.is_fixed(): if not value(disjunct.indicator_var): # The user cleanly deactivated the disjunct: there # is nothing for us to do here. return else: raise GDP_Error( "The disjunct '%s' is deactivated, but the " "indicator_var is fixed to %s. This makes no sense." % (disjunct.name, value(disjunct.indicator_var))) if disjunct._transformation_block is None: raise GDP_Error( "The disjunct '%s' is deactivated, but the " "indicator_var is not fixed and the disjunct does not " "appear to have been relaxed. This makes no sense. " "(If the intent is to deactivate the disjunct, fix its " "indicator_var to False.)" % (disjunct.name, )) if disjunct._transformation_block is not None: # we've transformed it, which means this is the second time it's # appearing in a Disjunction raise GDP_Error( "The disjunct '%s' has been transformed, but a disjunction " "it appears in has not. Putting the same disjunct in " "multiple disjunctions is not supported." % disjunct.name) transformed_disjunct = Disjunct() disjunct._transformation_block = weakref_ref(transformed_disjunct) transBlock.add_component( unique_component_name( transBlock, disjunct.getname(fully_qualified=True, name_buffer=NAME_BUFFER)), transformed_disjunct) # If the original has an indicator_var fixed to something, fix this one # too. if disjunct.indicator_var.fixed: transformed_disjunct.indicator_var.fix( value(disjunct.indicator_var)) # need to transform inner Disjunctions first (before we complain about # active Disjuncts) for disjunction in disjunct.component_data_objects( Disjunction, active=True, sort=SortComponents.deterministic, descend_into=Block): self._transform_disjunctionData(disjunction, disjunction.index(), None, transformed_disjunct) # create references to any variables declared here on the transformed # Disjunct (this will include the indicator_var) NOTE that we will not # have to do this when #1032 is implemented for the writers. But right # now, we are going to deactivate this and hide it from the active # subtree, so we need to be safe. for var in disjunct.component_objects(Var, descend_into=Block, active=None): transformed_disjunct.add_component( unique_component_name( transformed_disjunct, var.getname(fully_qualified=True, name_buffer=NAME_BUFFER)), Reference(var)) # Since this transformation is GDP -> GDP and it is based on # partitioning algebraic expressions, we will copy over # LogicalConstraints that may be on the Disjuncts, without transforming # them. This is consistent with our handling of nested Disjunctions, # which also remain nested, though their algebraic constraints may be # transformed. Note that we are not using References because when asked # who their parent block is, we would like these constraints to answer # that it is the transformed Disjunct. logical_constraints = LogicalConstraintList() transformed_disjunct.add_component( unique_component_name(transformed_disjunct, 'logical_constraints'), logical_constraints) for cons in disjunct.component_data_objects(LogicalConstraint, descend_into=Block, active=None): # Add a copy of it on the new Disjunct logical_constraints.add(cons.expr) # deactivate to mark as transformed (so we don't hit it in the loop # below) cons.deactivate() # transform everything else for obj in disjunct.component_data_objects( active=True, sort=SortComponents.deterministic, descend_into=Block): handler = self.handlers.get(obj.ctype, None) if not handler: if handler is None: raise GDP_Error( "No partition_disjuncts transformation handler " "registered " "for modeling components of type %s. If your " "disjuncts contain non-GDP Pyomo components that " "require transformation, please transform them first." % obj.ctype) continue # we are really only transforming constraints and checking for # anything nutty (active Disjuncts, etc) here, so pass through what # is necessary for transforming Constraints handler(obj, disjunct, transformed_disjunct, transBlock, partition) disjunct._deactivate_without_fixing_indicator() return transformed_disjunct
def _transform_disjunctionData(self, obj, index, transBlock=None): if not obj.active: return # Hull reformulation doesn't work if this is an OR constraint. So if # xor is false, give up if not obj.xor: raise GDP_Error("Cannot do hull reformulation for " "Disjunction '%s' with OR constraint. " "Must be an XOR!" % obj.name) if transBlock is None: # It's possible that we have already created a transformation block # for another disjunctionData from this same container. If that's # the case, let's use the same transformation block. (Else it will # be really confusing that the XOR constraint goes to that old block # but we create a new one here.) if obj.parent_component()._algebraic_constraint is not None: transBlock = obj.parent_component()._algebraic_constraint().\ parent_block() else: transBlock = self._add_transformation_block(obj.parent_block()) parent_component = obj.parent_component() orConstraint = self._add_xor_constraint(parent_component, transBlock) disaggregationConstraint = transBlock.disaggregationConstraints disaggregationConstraintMap = transBlock._disaggregationConstraintMap # Just because it's unlikely this is what someone meant to do... if len(obj.disjuncts) == 0: raise GDP_Error( "Disjunction '%s' is empty. This is " "likely indicative of a modeling error." % obj.getname(fully_qualified=True, name_buffer=NAME_BUFFER)) # We first go through and collect all the variables that we # are going to disaggregate. varOrder_set = ComponentSet() varOrder = [] varsByDisjunct = ComponentMap() localVarsByDisjunct = ComponentMap() include_fixed_vars = not self._config.assume_fixed_vars_permanent for disjunct in obj.disjuncts: disjunctVars = varsByDisjunct[disjunct] = ComponentSet() for cons in disjunct.component_data_objects( Constraint, active=True, sort=SortComponents.deterministic, descend_into=Block): # [ESJ 02/14/2020] By default, we disaggregate fixed variables # on the philosophy that fixing is not a promise for the future # and we are mathematically wrong if we don't transform these # correctly and someone later unfixes them and keeps playing # with their transformed model. However, the user may have set # assume_fixed_vars_permanent to True in which case we will skip # them for var in EXPR.identify_variables( cons.body, include_fixed=include_fixed_vars): # Note the use of a list so that we will # eventually disaggregate the vars in a # deterministic order (the order that we found # them) disjunctVars.add(var) if not var in varOrder_set: varOrder.append(var) varOrder_set.add(var) # check for LocalVars Suffix localVarsByDisjunct = self._get_local_var_suffixes( disjunct, localVarsByDisjunct) # We will disaggregate all variables which are not explicitly declared # as being local. Note however, that we do declare our own disaggregated # variables as local, so they will not be re-disaggregated. varSet = [] # values of localVarsByDisjunct are ComponentSets, so we need this for # determinism (we iterate through the localVars later) localVars = [] for var in varOrder: disjuncts = [d for d in varsByDisjunct if var in varsByDisjunct[d]] # clearly not local if used in more than one disjunct if len(disjuncts) > 1: if __debug__ and logger.isEnabledFor(logging.DEBUG): logger.debug("Assuming '%s' is not a local var since it is" "used in multiple disjuncts." % var.getname(fully_qualified=True, name_buffer=NAME_BUFFER)) varSet.append(var) elif localVarsByDisjunct.get(disjuncts[0]) is not None: if var in localVarsByDisjunct[disjuncts[0]]: localVars.append(var) else: varSet.append(var) else: varSet.append(var) # Now that we know who we need to disaggregate, we will do it # while we also transform the disjuncts. or_expr = 0 for disjunct in obj.disjuncts: or_expr += disjunct.indicator_var self._transform_disjunct(disjunct, transBlock, varSet, localVars) orConstraint.add(index, (or_expr, 1)) # map the DisjunctionData to its XOR constraint to mark it as # transformed obj._algebraic_constraint = weakref_ref(orConstraint[index]) for i, var in enumerate(varSet): disaggregatedExpr = 0 for disjunct in obj.disjuncts: if disjunct._transformation_block is None: # Because we called _transform_disjunct in the loop above, # we know that if this isn't transformed it is because it # was cleanly deactivated, and we can just skip it. continue disaggregatedVar = disjunct._transformation_block().\ _disaggregatedVarMap['disaggregatedVar'][var] disaggregatedExpr += disaggregatedVar disaggregationConstraint.add((i, index), var == disaggregatedExpr) # and update the map so that we can find this later. We index by # variable and the particular disjunction because there is a # different one for each disjunction if disaggregationConstraintMap.get(var) is not None: disaggregationConstraintMap[var][ obj] = disaggregationConstraint[(i, index)] else: thismap = disaggregationConstraintMap[var] = ComponentMap() thismap[obj] = disaggregationConstraint[(i, index)] # deactivate for the writers obj.deactivate()
def _generate_cuttingplanes(self, instance, instance_rBigM, instance_rHull, var_info, transBlockName): opt = SolverFactory(SOLVER) improving = True iteration = 0 prev_obj = float("inf") epsilon = 0.01 transBlock = instance.component(transBlockName) transBlock_rBigM = instance_rBigM.component(transBlockName) # We try to grab the first active objective. If there is more # than one, the writer will yell when we try to solve below. If # there are 0, we will yell here. rBigM_obj = next( instance_rBigM.component_data_objects(Objective, active=True), None) if rBigM_obj is None: raise GDP_Error("Cannot apply cutting planes transformation " "without an active objective in the model!") while (improving): # solve rBigM, solution is xstar results = opt.solve(instance_rBigM, tee=stream_solvers) if verify_successful_solve(results) is not NORMAL: logger.warning("GDP.cuttingplane: Relaxed BigM subproblem " "did not solve normally. Stopping cutting " "plane generation.\n\n%s" % (results, )) return rBigM_objVal = value(rBigM_obj) logger.warning("gdp.cuttingplane: rBigM objective = %s" % (rBigM_objVal, )) # copy over xstar for x_bigm, x_rbigm, x_hull, x_star in var_info: x_star.value = x_rbigm.value # initialize the X values x_hull.value = x_rbigm.value # solve separation problem to get xhat. results = opt.solve(instance_rHull, tee=stream_solvers) if verify_successful_solve(results) is not NORMAL: logger.warning("GDP.cuttingplane: Hull separation subproblem " "did not solve normally. Stopping cutting " "plane generation.\n\n%s" % (results, )) return self._add_cut(var_info, transBlock, transBlock_rBigM) # decide whether or not to keep going: check absolute difference # close to 0, relative difference further from 0. obj_diff = prev_obj - rBigM_objVal improving = math.isinf(obj_diff) or \ ( abs(obj_diff) > epsilon if abs(rBigM_objVal) < 1 else abs(obj_diff/prev_obj) > epsilon ) prev_obj = rBigM_objVal iteration += 1
def _xform_constraint(self, obj, disjunct, infodict, bigMargs, suffix_list): # add constraint to the transformation block, we'll transform it there. relaxationBlock = infodict['bigm']['relaxationBlock'] transBlock = relaxationBlock.parent_block() # Though rare, it is possible to get naming conflicts here # since constraints from all blocks are getting moved onto the # same block. So we get a unique name name = unique_component_name(relaxationBlock, obj.name) if obj.is_indexed(): try: newConstraint = Constraint(obj.index_set(), transBlock.lbub) except TypeError: # The original constraint may have been indexed by a # non-concrete set (like an Any). We will give up on # strict index verification and just blindly proceed. newConstraint = Constraint(Any) else: newConstraint = Constraint(transBlock.lbub) relaxationBlock.add_component(name, newConstraint) # add mapping of original constraint to transformed constraint # in transformation info dictionary infodict['bigm']['relaxedConstraints'][obj] = newConstraint # add mapping of transformed constraint back to original constraint (we # know that the info dict is already created because this only got # called if we were transforming a disjunct...) relaxationBlock._gdp_transformation_info['srcConstraints'][ newConstraint] = obj for i in sorted(iterkeys(obj)): c = obj[i] if not c.active: continue # first, we see if an M value was specified in the arguments. # (This returns None if not) M = self._get_M_from_args(c, bigMargs) if __debug__ and logger.isEnabledFor(logging.DEBUG): logger.debug("GDP(BigM): The value for M for constraint %s " "from the BigM argument is %s." % (obj.name, str(M))) # if we didn't get something from args, try suffixes: if M is None: M = self._get_M_from_suffixes(c, suffix_list) if __debug__ and logger.isEnabledFor(logging.DEBUG): logger.debug("GDP(BigM): The value for M for constraint %s " "after checking suffixes is %s." % (obj.name, str(M))) if not isinstance(M, (tuple, list)): if M is None: M = (None, None) else: try: M = (-M, M) except: logger.error("Error converting scalar M-value %s " "to (-M,M). Is %s not a numeric type?" % (M, type(M))) raise if len(M) != 2: raise GDP_Error("Big-M %s for constraint %s is not of " "length two. " "Expected either a single value or " "tuple or list of length two for M." % (str(M), name)) if c.lower is not None and M[0] is None: M = (self._estimate_M(c.body, name)[0] - c.lower, M[1]) if c.upper is not None and M[1] is None: M = (M[0], self._estimate_M(c.body, name)[1] - c.upper) if __debug__ and logger.isEnabledFor(logging.DEBUG): logger.debug("GDP(BigM): The value for M for constraint %s " "after estimating (if needed) is %s." % (obj.name, str(M))) # Handle indices for both SimpleConstraint and IndexedConstraint if i.__class__ is tuple: i_lb = i + ('lb', ) i_ub = i + ('ub', ) elif obj.is_indexed(): i_lb = ( i, 'lb', ) i_ub = ( i, 'ub', ) else: i_lb = 'lb' i_ub = 'ub' if c.lower is not None: if M[0] is None: raise GDP_Error("Cannot relax disjunctive constraint %s " "because M is not defined." % name) M_expr = M[0] * (1 - disjunct.indicator_var) newConstraint.add(i_lb, c.lower <= c.body - M_expr) if c.upper is not None: if M[1] is None: raise GDP_Error("Cannot relax disjunctive constraint %s " "because M is not defined." % name) M_expr = M[1] * (1 - disjunct.indicator_var) newConstraint.add(i_ub, c.body - M_expr <= c.upper)
def _apply_to(self, instance, **kwds): assert not kwds # no keywords expected to the transformation disjunct_generator = instance.component_objects( Disjunct, descend_into=(Block, Disjunct), descent_order=TraversalStrategy.PostfixDFS) for disjunct_component in disjunct_generator: # Check that the disjuncts being reclassified are all relaxed or # are not on an active block. for disjunct in disjunct_component.values(): if (disjunct.active and self._disjunct_not_relaxed(disjunct) and self._disjunct_on_active_block(disjunct) and self._disjunct_not_fixed_true(disjunct)): # First, do a couple checks in order to give a more # useful error message disjunction_set = { i for i in instance.component_data_objects( Disjunction, descend_into=True, active=None) } active_disjunction_set = { i for i in instance.component_data_objects( Disjunction, descend_into=True, active=True) } disjuncts_in_disjunctions = set() for i in disjunction_set: disjuncts_in_disjunctions.update(i.disjuncts) disjuncts_in_active_disjunctions = set() for i in active_disjunction_set: disjuncts_in_active_disjunctions.update(i.disjuncts) if disjunct not in disjuncts_in_disjunctions: raise GDP_Error( 'Disjunct "%s" is currently active, ' 'but was not found in any Disjunctions. ' 'This is generally an error as the model ' 'has not been fully relaxed to a ' 'pure algebraic form.' % (disjunct.name, )) elif disjunct not in disjuncts_in_active_disjunctions: raise GDP_Error( 'Disjunct "%s" is currently active. While ' 'it participates in a Disjunction, ' 'that Disjunction is currently deactivated. ' 'This is generally an error as the ' 'model has not been fully relaxed to a pure ' 'algebraic form. Did you deactivate ' 'the Disjunction without addressing the ' 'individual Disjuncts?' % (disjunct.name, )) else: raise GDP_Error(""" Reclassifying active Disjunct "%s" as a Block. This is generally an error as it indicates that the model was not completely relaxed before applying the gdp.reclassify transformation""" % (disjunct.name, )) # Reclassify this disjunct as a block disjunct_component.parent_block().reclassify_component_type( disjunct_component, Block) # HACK: activate teh block, but do not activate the # _BlockData objects super(ActiveIndexedComponent, disjunct_component).activate() # Deactivate all constraints. Note that we only need to # descend into blocks: we will catch disjuncts in the outer # loop. # # Note that we defer this until AFTER we reactivate the # block, as the component_objects generator will not # return anything when active=True and the block is # deactivated. for disjunct in disjunct_component._data.values(): if self._disjunct_not_relaxed(disjunct): disjunct._deactivate_without_fixing_indicator() else: disjunct._activate_without_unfixing_indicator() cons_in_disjunct = disjunct.component_objects( Constraint, descend_into=Block, active=True) for con in cons_in_disjunct: con.deactivate()
def _setup_subproblems(self, instance, bigM, tighten_relaxation_callback): # create transformation block transBlockName, transBlock = self._add_transformation_block(instance) # We store a list of all vars so that we can efficiently # generate maps among the subproblems transBlock.all_vars = list(v for v in instance.component_data_objects( Var, descend_into=(Block, Disjunct), sort=SortComponents.deterministic) if not v.is_fixed()) # we'll store all the cuts we add together nm = self._config.cuts_name if nm is None: cuts_obj = transBlock.cuts = Constraint(NonNegativeIntegers) else: # check that this really is an available name if instance.component(nm) is not None: raise GDP_Error("cuts_name was specified as '%s', but this is " "already a component on the instance! Please " "specify a unique name." % nm) instance.add_component(nm, Constraint(NonNegativeIntegers)) cuts_obj = instance.component(nm) # get bigM and hull relaxations bigMRelaxation = TransformationFactory('gdp.bigm') hullRelaxation = TransformationFactory('gdp.hull') relaxIntegrality = TransformationFactory('core.relax_integer_vars') # # Generate the Hull relaxation (used for the separation # problem to generate cutting planes) # tighter_instance = tighten_relaxation_callback(instance) instance_rHull = hullRelaxation.create_using(tighter_instance) relaxIntegrality.apply_to(instance_rHull, transform_deactivated_blocks=True) # # Reformulate the instance using the BigM relaxation (this will # be the final instance returned to the user) # bigMRelaxation.apply_to(instance, bigM=bigM) # # Generate the continuous relaxation of the BigM transformation. We'll # restore it at the end. # relaxIntegrality.apply_to(instance, transform_deactivated_blocks=True) # # Add the xstar parameter for the Hull problem # transBlock_rHull = instance_rHull.component(transBlockName) # # this will hold the solution to rbigm each time we solve it. We # add it to the transformation block so that we don't have to # worry about name conflicts. transBlock_rHull.xstar = Param( range(len(transBlock.all_vars)), mutable=True, default=0, within=Reals) # we will add a block that we will deactivate to use to store the # extended space cuts. We never need to solve these, but we need them to # be constructed for the sake of Fourier-Motzkin Elimination extendedSpaceCuts = transBlock_rHull.extendedSpaceCuts = Block() extendedSpaceCuts.deactivate() extendedSpaceCuts.cuts = Constraint(Any) # # Generate the mapping between the variables on all the # instances and the xstar parameter. # var_info = [ (v, # this is the bigM variable transBlock_rHull.all_vars[i], transBlock_rHull.xstar[i]) for i,v in enumerate(transBlock.all_vars)] # NOTE: we wait to add the separation objective to the rHull problem # because it is best to do it in the first iteration, so that we can # skip stale variables. return (instance, cuts_obj, instance_rHull, var_info, transBlockName)
def _transform_block_components(self, block, disjunct, bigM, arg_list, suffix_list): # We find any transformed disjunctions that might be here because we # need to move their transformation blocks up onto the parent block # before we transform anything else on this block. Note that we do this # before we create references to local variables because we do not want # duplicate references to indicator variables and local variables on # nested disjuncts. disjunctBlock = disjunct._transformation_block() destinationBlock = disjunctBlock.parent_block() for obj in block.component_data_objects( Disjunction, sort=SortComponents.deterministic, descend_into=(Block)): if obj.algebraic_constraint is None: # This could be bad if it's active since that means its # untransformed, but we'll wait to yell until the next loop continue # get this disjunction's relaxation block. transBlock = obj.algebraic_constraint().parent_block() # move transBlock up to parent component self._transfer_transBlock_data(transBlock, destinationBlock) # we leave the transformation block because it still has the XOR # constraints, which we want to be on the parent disjunct. # Transform any logical constraints here. We need to do this before we # create the variable references! TransformationFactory('core.logical_to_linear').apply_to(block) # We don't know where all the BooleanVars are used, so if there are any # that the above transformation didn't transform, we need to do it now, # so that the Reference gets moved up. This won't be necessary when the # writers are willing to find Vars not in the active subtree. for boolean in block.component_data_objects(BooleanVar, descend_into=Block, active=None): if isinstance(boolean._associated_binary, _DeprecatedImplicitAssociatedBinaryVariable): parent_block = boolean.parent_block() new_var = Var(domain=Binary) parent_block.add_component( unique_component_name(parent_block, boolean.local_name + "_asbinary"), new_var) boolean.associate_binary_var(new_var) # Find all the variables declared here (including the indicator_var) and # add a reference on the transformation block so these will be # accessible when the Disjunct is deactivated. We don't descend into # Disjuncts because we just moved the references to their local # variables up in the previous loop. varRefBlock = disjunctBlock.localVarReferences for v in block.component_objects(Var, descend_into=Block, active=None): varRefBlock.add_component( unique_component_name(varRefBlock, v.getname(fully_qualified=True)), Reference(v)) # Now look through the component map of block and transform everything # we have a handler for. Yell if we don't know how to handle it. (Note # that because we only iterate through active components, this means # non-ActiveComponent types cannot have handlers.) for obj in block.component_objects(active=True, descend_into=False): handler = self.handlers.get(obj.ctype, None) if not handler: if handler is None: raise GDP_Error( "No BigM transformation handler registered " "for modeling components of type %s. If your " "disjuncts contain non-GDP Pyomo components that " "require transformation, please transform them first." % obj.ctype) continue # obj is what we are transforming, we pass disjunct # through so that we will have access to the indicator # variables down the line. handler(obj, disjunct, bigM, arg_list, suffix_list)
def _transform_constraint(self, cons, disjunct, transformed_disjunct, transBlock, partition): instance = disjunct.model() cons_name = cons.getname(fully_qualified=True, name_buffer=NAME_BUFFER) # create place on transformed Disjunct for the new constraint and # for the auxiliary variables transformed_constraint = Constraint(NonNegativeIntegers) transformed_disjunct.add_component( unique_component_name(transformed_disjunct, cons_name), transformed_constraint) aux_vars = Var(NonNegativeIntegers, dense=False) transformed_disjunct.add_component( unique_component_name(transformed_disjunct, cons_name + "_aux_vars"), aux_vars) # create a place on the transBlock for the split constraints split_constraints = Constraint(NonNegativeIntegers) transBlock.add_component( unique_component_name(transBlock, cons_name + "_split_constraints"), split_constraints) # this is a list which might have two constraints in it if we had # both a lower and upper value. leq_constraints = self._get_leq_constraints(cons) for (body, rhs) in leq_constraints: repn = generate_standard_repn(body, compute_values=True) nonlinear_repn = None if repn.nonlinear_expr is not None: nonlinear_repn = _generate_additively_separable_repn( repn.nonlinear_expr) split_exprs = [] split_aux_vars = [] vars_not_accounted_for = ComponentSet( v for v in EXPR.identify_variables(body, include_fixed=False)) vars_accounted_for = ComponentSet() for idx, var_list in enumerate(partition): # we are going to recreate the piece of the expression # involving the vars in var_list split_exprs.append(0) expr = split_exprs[-1] for i, v in enumerate(repn.linear_vars): if v in var_list: expr += repn.linear_coefs[i] * v vars_accounted_for.add(v) for i, (v1, v2) in enumerate(repn.quadratic_vars): if v1 in var_list: if v2 not in var_list: raise GDP_Error("Variables '%s' and '%s' are " "multiplied in Constraint '%s', " "but they are in different " "partitions! Please ensure that " "all the constraints in the " "disjunction are " "additively separable with " "respect to the specified " "partition." % (v1.name, v2.name, cons.name)) expr += repn.quadratic_coefs[i] * v1 * v2 vars_accounted_for.add(v1) vars_accounted_for.add(v2) if nonlinear_repn is not None: for i, expr_var_set in enumerate( nonlinear_repn['nonlinear_vars']): # check if v_list is a subset of var_list. If it is # not and there is no intersection, we move on. If # it is not and there is an intersection, we raise # an error: It's not a valid partition. If it is, # then we add this piece of the expression. # subset? if all(v in var_list for v in list(expr_var_set)): expr += nonlinear_repn['nonlinear_exprs'][i] for var in expr_var_set: vars_accounted_for.add(var) # intersection? elif len(ComponentSet(expr_var_set) & var_list) != 0: raise GDP_Error( "Variables which appear in the " "expression %s are in different " "partitions, but this " "expression doesn't appear " "additively separable. Please " "expand it if it is additively " "separable or, more likely, " "ensure that all the " "constraints in the disjunction " "are additively separable with " "respect to the specified " "partition. If you did not " "specify a partition, only " "a value of P, note that to " "automatically partition the " "variables, we assume all the " "expressions are additively " "separable." % nonlinear_repn['nonlinear_exprs'][i]) expr_lb, expr_ub = self._config.compute_bounds_method( expr, self._global_constraints, self._config.compute_bounds_solver) if expr_lb is None or expr_ub is None: raise GDP_Error("Expression %s from constraint '%s' " "is unbounded! Please ensure all " "variables that appear " "in the constraint are bounded or " "specify compute_bounds_method=" "compute_optimal_bounds" " if the expression is bounded by the " "global constraints." % (expr, cons.name)) # if the expression was empty wrt the partition, we don't # need to bother with any of this. The aux_var doesn't need # to exist because it would be 0. if type(expr) is not int or expr != 0: aux_var = aux_vars[len(aux_vars)] aux_var.setlb(expr_lb) aux_var.setub(expr_ub) split_aux_vars.append(aux_var) split_constraints[len(split_constraints)] = expr <= aux_var if len(vars_accounted_for) < len(vars_not_accounted_for): orphans = vars_not_accounted_for - vars_accounted_for orphan_string = "" for v in orphans: orphan_string += "'%s', " % v.name orphan_string = orphan_string[:-2] raise GDP_Error("Partition specified for disjunction " "containing Disjunct '%s' does not " "include all the variables that appear " "in the disjunction. The following " "variables are not assigned to any part " "of the partition: %s" % (disjunct.name, orphan_string)) transformed_constraint[ len(transformed_constraint)] = sum(v for v in split_aux_vars) <= \ rhs - repn.constant # deactivate the constraint since we've transformed it cons.deactivate()
def _apply_to_impl(self, instance, **kwds): config = self.CONFIG(kwds.pop('options', {})) # We will let args override suffixes and estimate as a last # resort. More specific args/suffixes override ones anywhere in # the tree. Suffixes lower down in the tree override ones higher # up. if 'default_bigM' in kwds: logger.warn("DEPRECATED: the 'default_bigM=' argument has been " "replaced by 'bigM='") config.bigM = kwds.pop('default_bigM') config.set_value(kwds) bigM = config.bigM targets = config.targets if targets is None: targets = (instance, ) _HACK_transform_whole_instance = True else: _HACK_transform_whole_instance = False # We need to check that all the targets are in fact on instance. As we # do this, we will use the set below to cache components we know to be # in the tree rooted at instance. knownBlocks = {} for t in targets: # check that t is in fact a child of instance if not is_child_of(parent=instance, child=t, knownBlocks=knownBlocks): raise GDP_Error("Target %s is not a component on instance %s!" % (t.name, instance.name)) elif t.ctype is Disjunction: if t.parent_component() is t: self._transform_disjunction(t, bigM) else: self._transform_disjunctionData( t, bigM, t.index()) elif t.ctype in (Block, Disjunct): if t.parent_component() is t: self._transform_block(t, bigM) else: self._transform_blockData(t, bigM) else: raise GDP_Error( "Target %s was not a Block, Disjunct, or Disjunction. " "It was of type %s and can't be transformed." % (t.name, type(t))) # issue warnings about anything that was in the bigM args dict that we # didn't use if bigM is not None: unused_args = ComponentSet(bigM.keys()) - \ ComponentSet(self.used_args.keys()) if len(unused_args) > 0: warning_msg = ("Unused arguments in the bigM map! " "These arguments were not used by the " "transformation:\n") for component in unused_args: if hasattr(component, 'name'): warning_msg += "\t%s\n" % component.name else: warning_msg += "\t%s\n" % component logger.warn(warning_msg) # HACK for backwards compatibility with the older GDP transformations # # Until the writers are updated to find variables on things # other than active blocks, we need to reclassify the Disjuncts # as Blocks after transformation so that the writer will pick up # all the variables that it needs (in this case, indicator_vars). if _HACK_transform_whole_instance: HACK_GDP_Disjunct_Reclassifier().apply_to(instance)
def _transform_disjunct(self, obj, transBlock, varSet, localVars): # deactivated should only come from the user if not obj.active: if obj.indicator_var.is_fixed(): if value(obj.indicator_var) == 0: # The user cleanly deactivated the disjunct: there # is nothing for us to do here. return else: raise GDP_Error( "The disjunct '%s' is deactivated, but the " "indicator_var is fixed to %s. This makes no sense." % (obj.name, value(obj.indicator_var))) if obj._transformation_block is None: raise GDP_Error( "The disjunct '%s' is deactivated, but the " "indicator_var is not fixed and the disjunct does not " "appear to have been relaxed. This makes no sense. " "(If the intent is to deactivate the disjunct, fix its " "indicator_var to 0.)" % (obj.name, )) if obj._transformation_block is not None: # we've transformed it, which means this is the second time it's # appearing in a Disjunction raise GDP_Error( "The disjunct '%s' has been transformed, but a disjunction " "it appears in has not. Putting the same disjunct in " "multiple disjunctions is not supported." % obj.name) # create a relaxation block for this disjunct relaxedDisjuncts = transBlock.relaxedDisjuncts relaxationBlock = relaxedDisjuncts[len(relaxedDisjuncts)] relaxationBlock.localVarReferences = Block() # Put the disaggregated variables all on their own block so that we can # isolate the name collisions and still have complete control over the # names on this block. (This is for peace of mind now, but will matter # in the future for adding the binaries corresponding to Boolean # indicator vars.) relaxationBlock.disaggregatedVars = Block() # add the map that will link back and forth between transformed # constraints and their originals. relaxationBlock._constraintMap = { 'srcConstraints': ComponentMap(), 'transformedConstraints': ComponentMap() } # Map between disaggregated variables for this disjunct and their # originals relaxationBlock._disaggregatedVarMap = { 'srcVar': ComponentMap(), 'disaggregatedVar': ComponentMap(), } # Map between disaggregated variables and their lb*indicator <= var <= # ub*indicator constraints relaxationBlock._bigMConstraintMap = ComponentMap() # add mappings to source disjunct (so we'll know we've relaxed) obj._transformation_block = weakref_ref(relaxationBlock) relaxationBlock._srcDisjunct = weakref_ref(obj) # add Suffix to the relaxation block that disaggregated variables are # local (in case this is nested in another Disjunct) local_var_set = None parent_disjunct = obj.parent_block() while parent_disjunct is not None: if parent_disjunct.ctype is Disjunct: break parent_disjunct = parent_disjunct.parent_block() if parent_disjunct is not None: localVarSuffix = relaxationBlock.LocalVars = Suffix( direction=Suffix.LOCAL) local_var_set = localVarSuffix[parent_disjunct] = ComponentSet() # add the disaggregated variables and their bigm constraints # to the relaxationBlock for var in varSet: lb = var.lb ub = var.ub if lb is None or ub is None: raise GDP_Error("Variables that appear in disjuncts must be " "bounded in order to use the hull " "transformation! Missing bound for %s." % (var.name)) disaggregatedVar = Var(within=Reals, bounds=(min(0, lb), max(0, ub)), initialize=var.value) # naming conflicts are possible here since this is a bunch # of variables from different blocks coming together, so we # get a unique name disaggregatedVarName = unique_component_name( relaxationBlock.disaggregatedVars, var.getname(fully_qualified=False, name_buffer=NAME_BUFFER), ) relaxationBlock.disaggregatedVars.add_component( disaggregatedVarName, disaggregatedVar) # mark this as local because we won't re-disaggregate if this is a # nested disjunction if local_var_set is not None: local_var_set.add(disaggregatedVar) # store the mappings from variables to their disaggregated selves on # the transformation block. relaxationBlock._disaggregatedVarMap['disaggregatedVar'][ var] = disaggregatedVar relaxationBlock._disaggregatedVarMap['srcVar'][ disaggregatedVar] = var bigmConstraint = Constraint(transBlock.lbub) relaxationBlock.add_component(disaggregatedVarName + "_bounds", bigmConstraint) if lb: bigmConstraint.add('lb', obj.indicator_var * lb <= disaggregatedVar) if ub: bigmConstraint.add('ub', disaggregatedVar <= obj.indicator_var * ub) relaxationBlock._bigMConstraintMap[ disaggregatedVar] = bigmConstraint for var in localVars: lb = var.lb ub = var.ub if lb is None or ub is None: raise GDP_Error("Variables that appear in disjuncts must be " "bounded in order to use the hull " "transformation! Missing bound for %s." % (var.name)) if value(lb) > 0: var.setlb(0) if value(ub) < 0: var.setub(0) # map it to itself relaxationBlock._disaggregatedVarMap['disaggregatedVar'][var] = var relaxationBlock._disaggregatedVarMap['srcVar'][var] = var # naming conflicts are possible here since this is a bunch # of variables from different blocks coming together, so we # get a unique name conName = unique_component_name( relaxationBlock, var.getname(fully_qualified=False, name_buffer=NAME_BUFFER) + \ "_bounds") bigmConstraint = Constraint(transBlock.lbub) relaxationBlock.add_component(conName, bigmConstraint) if lb: bigmConstraint.add('lb', obj.indicator_var * lb <= var) if ub: bigmConstraint.add('ub', var <= obj.indicator_var * ub) relaxationBlock._bigMConstraintMap[var] = bigmConstraint var_substitute_map = dict((id(v), newV) for v, newV in iteritems( relaxationBlock._disaggregatedVarMap['disaggregatedVar'])) zero_substitute_map = dict((id(v), ZeroConstant) for v, newV in \ iteritems( relaxationBlock._disaggregatedVarMap[ 'disaggregatedVar'])) zero_substitute_map.update((id(v), ZeroConstant) for v in localVars) # Transform each component within this disjunct self._transform_block_components(obj, obj, var_substitute_map, zero_substitute_map) # deactivate disjunct so writers can be happy obj._deactivate_without_fixing_indicator()
def _transform_constraint(self, obj, disjunct, bigMargs, arg_list, suffix_list): # add constraint to the transformation block, we'll transform it there. transBlock = disjunct._transformation_block() bigm_src = transBlock.bigm_src constraintMap = self._get_constraint_map_dict(transBlock) disjunctionRelaxationBlock = transBlock.parent_block() # Though rare, it is possible to get naming conflicts here # since constraints from all blocks are getting moved onto the # same block. So we get a unique name cons_name = obj.getname(fully_qualified=True, name_buffer=NAME_BUFFER) name = unique_component_name(transBlock, cons_name) if obj.is_indexed(): try: newConstraint = Constraint(obj.index_set(), disjunctionRelaxationBlock.lbub) # HACK: We get burned by #191 here... When #1319 is merged we # can revist this and I think stop catching the AttributeError. except (TypeError, AttributeError): # The original constraint may have been indexed by a # non-concrete set (like an Any). We will give up on # strict index verification and just blindly proceed. newConstraint = Constraint(Any) else: newConstraint = Constraint(disjunctionRelaxationBlock.lbub) transBlock.add_component(name, newConstraint) # add mapping of original constraint to transformed constraint constraintMap['srcConstraints'][newConstraint] = obj constraintMap['transformedConstraints'][obj] = newConstraint for i in sorted(iterkeys(obj)): c = obj[i] if not c.active: continue # first, we see if an M value was specified in the arguments. # (This returns None if not) M = self._get_M_from_args(c, bigMargs, arg_list, bigm_src) if __debug__ and logger.isEnabledFor(logging.DEBUG): _name = obj.getname( fully_qualified=True, name_buffer=NAME_BUFFER) logger.debug("GDP(BigM): The value for M for constraint %s " "from the BigM argument is %s." % (cons_name, str(M))) # if we didn't get something from args, try suffixes: if M is None: M = self._get_M_from_suffixes(c, suffix_list, bigm_src) if __debug__ and logger.isEnabledFor(logging.DEBUG): _name = obj.getname( fully_qualified=True, name_buffer=NAME_BUFFER) logger.debug("GDP(BigM): The value for M for constraint %s " "after checking suffixes is %s." % (cons_name, str(M))) if not isinstance(M, (tuple, list)): if M is None: M = (None, None) else: try: M = (-M, M) except: logger.error("Error converting scalar M-value %s " "to (-M,M). Is %s not a numeric type?" % (M, type(M))) raise if len(M) != 2: raise GDP_Error("Big-M %s for constraint %s is not of " "length two. " "Expected either a single value or " "tuple or list of length two for M." % (str(M), name)) if c.lower is not None and M[0] is None: M = (self._estimate_M(c.body, name)[0] - c.lower, M[1]) bigm_src[c] = M if c.upper is not None and M[1] is None: M = (M[0], self._estimate_M(c.body, name)[1] - c.upper) bigm_src[c] = M if __debug__ and logger.isEnabledFor(logging.DEBUG): _name = obj.getname( fully_qualified=True, name_buffer=NAME_BUFFER) logger.debug("GDP(BigM): The value for M for constraint %s " "after estimating (if needed) is %s." % (cons_name, str(M))) # Handle indices for both SimpleConstraint and IndexedConstraint if i.__class__ is tuple: i_lb = i + ('lb',) i_ub = i + ('ub',) elif obj.is_indexed(): i_lb = (i, 'lb',) i_ub = (i, 'ub',) else: i_lb = 'lb' i_ub = 'ub' if c.lower is not None: if M[0] is None: raise GDP_Error("Cannot relax disjunctive constraint %s " "because M is not defined." % name) M_expr = M[0] * (1 - disjunct.indicator_var) newConstraint.add(i_lb, c.lower <= c. body - M_expr) if c.upper is not None: if M[1] is None: raise GDP_Error("Cannot relax disjunctive constraint %s " "because M is not defined." % name) M_expr = M[1] * (1 - disjunct.indicator_var) newConstraint.add(i_ub, c.body - M_expr <= c.upper) # deactivate because we relaxed c.deactivate()
def _apply_to(self, instance, **kwds): self._config = self.CONFIG(kwds.pop('options', {})) self._config.set_value(kwds) # make a transformation block transBlockName = unique_component_name(instance, '_pyomo_gdp_chull_relaxation') transBlock = Block() instance.add_component(transBlockName, transBlock) transBlock.relaxedDisjuncts = Block(Any) transBlock.lbub = Set(initialize=['lb', 'ub', 'eq']) transBlock.disjContainers = ComponentSet() targets = self._config.targets if targets is None: targets = (instance, ) _HACK_transform_whole_instance = True else: _HACK_transform_whole_instance = False for _t in targets: t = _t.find_component(instance) if t is None: raise GDP_Error( "Target %s is not a component on the instance!" % _t) if t.type() is Disjunction: if t.parent_component() is t: self._transformDisjunction(t, transBlock) else: self._transformDisjunctionData(t, transBlock, t.index()) elif t.type() in (Block, Disjunct): if t.parent_component() is t: self._transformBlock(t, transBlock) else: self._transformBlockData(t, transBlock) else: raise GDP_Error( "Target %s was not a Block, Disjunct, or Disjunction. " "It was of type %s and can't be transformed" % (t.name, type(t))) # Go through our dictionary of indexed things and deactivate # the containers that don't have any active guys inside of # them. So the invalid component logic will tell us if we # missed something getting transformed. for obj in transBlock.disjContainers: if not obj.active: continue for i in obj: if obj[i].active: break else: # HACK due to active flag implementation. # # Ideally we would not have to do any of this (an # ActiveIndexedComponent would get its active status by # querring the active status of all the contained Data # objects). As a fallback, we would like to call: # # obj._deactivate_without_fixing_indicator() # # However, the sreaightforward implementation of that # method would have unintended side effects (fixing the # contained _DisjunctData's indicator_vars!) due to our # class hierarchy. Instead, we will directly call the # relevant base class (safe-ish since we are verifying # that all the contained _DisjunctionData are # deactivated directly above). ActiveComponent.deactivate(obj) # HACK for backwards compatibility with the older GDP transformations # # Until the writers are updated to find variables on things # other than active blocks, we need to reclassify the Disjuncts # as Blocks after transformation so that the writer will pick up # all the variables that it needs (in this case, indicator_vars). if _HACK_transform_whole_instance: HACK_GDP_Disjunct_Reclassifier().apply_to(instance)
def _apply_to_impl(self, instance, **kwds): config = self.CONFIG(kwds.pop('options', {})) # We will let args override suffixes and estimate as a last # resort. More specific args/suffixes override ones anywhere in # the tree. Suffixes lower down in the tree override ones higher # up. if 'default_bigM' in kwds: deprecation_warning("the 'default_bigM=' argument has been " "replaced by 'bigM='", version='5.4') config.bigM = kwds.pop('default_bigM') config.set_value(kwds) bigM = config.bigM self.assume_fixed_vars_permanent = config.assume_fixed_vars_permanent targets = config.targets if targets is None: targets = (instance, ) # We need to check that all the targets are in fact on instance. As we # do this, we will use the set below to cache components we know to be # in the tree rooted at instance. knownBlocks = {} for t in targets: # check that t is in fact a child of instance if not is_child_of(parent=instance, child=t, knownBlocks=knownBlocks): raise GDP_Error( "Target '%s' is not a component on instance '%s'!" % (t.name, instance.name)) elif t.ctype is Disjunction: if t.is_indexed(): self._transform_disjunction(t, bigM) else: self._transform_disjunctionData( t, bigM, t.index()) elif t.ctype in (Block, Disjunct): if t.is_indexed(): self._transform_block(t, bigM) else: self._transform_blockData(t, bigM) else: raise GDP_Error( "Target '%s' was not a Block, Disjunct, or Disjunction. " "It was of type %s and can't be transformed." % (t.name, type(t))) # issue warnings about anything that was in the bigM args dict that we # didn't use if bigM is not None: unused_args = ComponentSet(bigM.keys()) - \ ComponentSet(self.used_args.keys()) if len(unused_args) > 0: warning_msg = ("Unused arguments in the bigM map! " "These arguments were not used by the " "transformation:\n") for component in unused_args: if hasattr(component, 'name'): warning_msg += "\t%s\n" % component.name else: warning_msg += "\t%s\n" % component logger.warning(warning_msg)
def _getDisjunctionConstraints(self, disjunction): # Put the disjunction constraint on its parent block # We never do this for just a DisjunctionData because we need # to know about the index set of its parent component. So if # we called this on a DisjunctionData, we did something wrong. assert isinstance(disjunction, Disjunction) parent = disjunction.parent_block() if hasattr(parent, "_gdp_transformation_info"): infodict = parent._gdp_transformation_info if type(infodict) is not dict: raise GDP_Error( "Component %s contains an attribute named " "_gdp_transformation_info. The transformation requires " "that it can create this attribute!" % parent.name) try: # On the off-chance that another GDP transformation went # first, the infodict may exist, but the specific map we # want will not be present orConstraintMap = infodict['disjunction_or_constraint'] except KeyError: orConstraintMap = infodict['disjunction_or_constraint'] \ = ComponentMap() try: disaggregationConstraintMap = infodict[ 'disjunction_disaggregation_constraints'] except KeyError: disaggregationConstraintMap = infodict[ 'disjunction_disaggregation_constraints'] \ = ComponentMap() else: infodict = parent._gdp_transformation_info = {} orConstraintMap = infodict['disjunction_or_constraint'] \ = ComponentMap() disaggregationConstraintMap = infodict[ 'disjunction_disaggregation_constraints'] \ = ComponentMap() if disjunction in disaggregationConstraintMap: disaggregationConstraint = disaggregationConstraintMap[disjunction] else: # add the disaggregation constraint disaggregationConstraint \ = disaggregationConstraintMap[disjunction] = Constraint(Any) parent.add_component( unique_component_name(parent, '_gdp_chull_relaxation_' + \ disjunction.name + '_disaggregation'), disaggregationConstraint) # If the Constraint already exists, return it if disjunction in orConstraintMap: orC = orConstraintMap[disjunction] else: # add the XOR (or OR) constraints to parent block (with # unique name) It's indexed if this is an # IndexedDisjunction, not otherwise orC = Constraint(disjunction.index_set()) if \ disjunction.is_indexed() else Constraint() parent.add_component( unique_component_name( parent, '_gdp_chull_relaxation_' + disjunction.name + '_xor'), orC) orConstraintMap[disjunction] = orC return orC, disaggregationConstraint
def _transform_constraint(self, obj, disjunct, bigMargs, arg_list, disjunct_suffix_list): # add constraint to the transformation block, we'll transform it there. transBlock = disjunct._transformation_block() bigm_src = transBlock.bigm_src constraintMap = self._get_constraint_map_dict(transBlock) disjunctionRelaxationBlock = transBlock.parent_block() # Though rare, it is possible to get naming conflicts here # since constraints from all blocks are getting moved onto the # same block. So we get a unique name cons_name = obj.getname(fully_qualified=True, name_buffer=NAME_BUFFER) name = unique_component_name(transBlock, cons_name) if obj.is_indexed(): newConstraint = Constraint(obj.index_set(), disjunctionRelaxationBlock.lbub) # we map the container of the original to the container of the # transformed constraint. Don't do this if obj is a SimpleConstraint # because we will treat that like a _ConstraintData and map to a # list of transformed _ConstraintDatas constraintMap['transformedConstraints'][obj] = newConstraint else: newConstraint = Constraint(disjunctionRelaxationBlock.lbub) transBlock.add_component(name, newConstraint) # add mapping of transformed constraint to original constraint constraintMap['srcConstraints'][newConstraint] = obj for i in sorted(obj.keys()): c = obj[i] if not c.active: continue lower = (None, None, None) upper = (None, None, None) # first, we see if an M value was specified in the arguments. # (This returns None if not) lower, upper = self._get_M_from_args(c, bigMargs, arg_list, lower, upper) M = (lower[0], upper[0]) if self._generate_debug_messages: _name = obj.getname( fully_qualified=True, name_buffer=NAME_BUFFER) logger.debug("GDP(BigM): The value for M for constraint '%s' " "from the BigM argument is %s." % (cons_name, str(M))) # if we didn't get something we need from args, try suffixes: if (M[0] is None and c.lower is not None) or \ (M[1] is None and c.upper is not None): # first get anything parent to c but below disjunct suffix_list = self._get_bigm_suffix_list(c.parent_block(), stopping_block=disjunct) # prepend that to what we already collected for the disjunct. suffix_list.extend(disjunct_suffix_list) lower, upper = self._update_M_from_suffixes(c, suffix_list, lower, upper) M = (lower[0], upper[0]) if self._generate_debug_messages: _name = obj.getname( fully_qualified=True, name_buffer=NAME_BUFFER) logger.debug("GDP(BigM): The value for M for constraint '%s' " "after checking suffixes is %s." % (cons_name, str(M))) if c.lower is not None and M[0] is None: M = (self._estimate_M(c.body, name)[0] - c.lower, M[1]) lower = (M[0], None, None) if c.upper is not None and M[1] is None: M = (M[0], self._estimate_M(c.body, name)[1] - c.upper) upper = (M[1], None, None) if self._generate_debug_messages: _name = obj.getname( fully_qualified=True, name_buffer=NAME_BUFFER) logger.debug("GDP(BigM): The value for M for constraint '%s' " "after estimating (if needed) is %s." % (cons_name, str(M))) # save the source information bigm_src[c] = (lower, upper) # Handle indices for both SimpleConstraint and IndexedConstraint if i.__class__ is tuple: i_lb = i + ('lb',) i_ub = i + ('ub',) elif obj.is_indexed(): i_lb = (i, 'lb',) i_ub = (i, 'ub',) else: i_lb = 'lb' i_ub = 'ub' if c.lower is not None: if M[0] is None: raise GDP_Error("Cannot relax disjunctive constraint '%s' " "because M is not defined." % name) M_expr = M[0] * (1 - disjunct.indicator_var) newConstraint.add(i_lb, c.lower <= c. body - M_expr) constraintMap[ 'transformedConstraints'][c] = [newConstraint[i_lb]] constraintMap['srcConstraints'][newConstraint[i_lb]] = c if c.upper is not None: if M[1] is None: raise GDP_Error("Cannot relax disjunctive constraint '%s' " "because M is not defined." % name) M_expr = M[1] * (1 - disjunct.indicator_var) newConstraint.add(i_ub, c.body - M_expr <= c.upper) transformed = constraintMap['transformedConstraints'].get(c) if transformed is not None: constraintMap['transformedConstraints'][ c].append(newConstraint[i_ub]) else: constraintMap[ 'transformedConstraints'][c] = [newConstraint[i_ub]] constraintMap['srcConstraints'][newConstraint[i_ub]] = c # deactivate because we relaxed c.deactivate()
def _transform_disjunctionData(self, obj, idx, transBlock=None, transformed_parent_disjunct=None): if not obj.active: return # Just because it's unlikely this is what someone meant to do... if len(obj.disjuncts) == 0: raise GDP_Error( "Disjunction '%s' is empty. This is " "likely indicative of a modeling error." % obj.getname(fully_qualified=True, name_buffer=NAME_BUFFER)) if transBlock is None and transformed_parent_disjunct is not None: transBlock = self._get_transformation_block( transformed_parent_disjunct) if transBlock is None: transBlock = self._get_transformation_block(obj.parent_block()) variable_partitions = self.variable_partitions partition_method = self.partitioning_method # was the partition specified for the disjunct? partition = variable_partitions.get(obj) if partition is None: # was there a default partition? partition = variable_partitions.get(None) if partition is None: # If not, see what method to use to calculate one method = partition_method.get(obj) if method is None: # was there a default method? method = partition_method.get(None) # if all else fails, set it to our default method = method if method is not None else arbitrary_partition # now figure out P if self._config.num_partitions is None: # This will just end in failure below. (We're checking here # because we don't need a value of P if the partitions were # specified for every Disjunction.) P = None else: P = self._config.num_partitions.get(obj) if P is None: P = self._config.num_partitions.get(None) if P is None: raise GDP_Error("No value for P was given for disjunction " "%s! Please specify a value of P " "(number of " "partitions), if you do not specify the " "partitions directly." % obj.name) # it's this method's job to scream if it can't handle what's # here, we can only assume it worked for now, since it's a # callback. partition = method(obj, P) # these have to be ComponentSets partition = [ComponentSet(var_list) for var_list in partition] transformed_disjuncts = [] for disjunct in obj.disjuncts: transformed_disjunct = self._transform_disjunct( disjunct, partition, transBlock) if transformed_disjunct is not None: transformed_disjuncts.append(transformed_disjunct) # These require transformation, but that's okay because we are # going to a GDP transBlock.indicator_var_equalities[ len(transBlock.indicator_var_equalities)] = \ disjunct.indicator_var.equivalent_to( transformed_disjunct.indicator_var) # make a new disjunction with the transformed guys transformed_disjunction = Disjunction( expr=[disj for disj in transformed_disjuncts]) transBlock.add_component( unique_component_name( transBlock, obj.getname(fully_qualified=True, name_buffer=NAME_BUFFER)), transformed_disjunction) obj._algebraic_constraint = weakref_ref(transformed_disjunction) obj.deactivate()