def test_get_violated_bounds_at_time(): m = ConcreteModel() m.time = Set(initialize=[1, 2, 3]) m.v = Var(m.time, ['a', 'b', 'c'], initialize=5) varlist = [ Reference(m.v[:, 'a']), Reference(m.v[:, 'b']), Reference(m.v[:, 'c']) ] group = NMPCVarGroup(varlist, m.time) group.set_lb(0, 0) group.set_lb(1, 6) group.set_lb(2, 0) group.set_ub(0, 4) group.set_ub(1, 10) group.set_ub(2, 10) violated = get_violated_bounds_at_time(group, [1, 2, 3], tolerance=1e-8) violated_set = ComponentSet(violated) for t in m.time: assert m.v[t, 'a'] in violated_set assert m.v[t, 'b'] in violated_set violated = get_violated_bounds_at_time(group, 2, tolerance=1e-8) violated_set = ComponentSet(violated) assert m.v[2, 'a'] in violated_set assert m.v[2, 'b'] in violated_set
def test_find_slices_in_model(): # Define m1 m1 = ConcreteModel() m1.time = Set(initialize=[1, 2, 3, 4, 5]) m1.v1 = Var(m1.time, initialize=1) @m1.Block(m1.time) def blk(b, t): b.v2 = Var(initialize=1) # Define m2 m2 = ConcreteModel() m2.time = Set(initialize=[1, 2, 3, 4, 5]) m2.v1 = Var(m2.time, initialize=2) @m2.Block(m2.time) def blk(b, t): b.v2 = Var(initialize=2) ### scalar_vars_1, dae_vars_1 = flatten_dae_variables(m1, m1.time) scalar_vars_2, dae_vars_2 = flatten_dae_variables(m2, m2.time) t0_tgt = m1.time.first() group = NMPCVarGroup(dae_vars_1, m1.time) categ = VariableCategory.ALGEBRAIC locator = ComponentMap([(var[t0_tgt], NMPCVarLocator(categ, group, i)) for i, var in enumerate(dae_vars_1)]) tgt_slices = find_slices_in_model(m1, m1.time, m2, m2.time, locator, dae_vars_2) dae_var_set_1 = ComponentSet(dae_vars_1) assert len(dae_var_set_1) == len(tgt_slices) assert len(tgt_slices) == len(dae_vars_2) for i, _slice in enumerate(tgt_slices): assert dae_vars_2[i].name == _slice.name assert _slice in dae_var_set_1
def get_derivatives_at(b, time, pts): """ Finds derivatives with respect to time at points specified. No distinction made for multiple derivatives or mixed partials. Args: b : Block to search for derivatives time : ContinuousSet to look for derivatives with respect to pts : Value or list of values in time set at which to return derivatives Returns Dictionary mapping time points to lists of derivatives at those points """ if not type(pts) is list: pts = [pts] dvdict = {pt: [] for pt in pts} visited = set() for var in b.component_objects(Var): if id(var) in visited: continue visited.add(id(var)) if not isinstance(var, DerivativeVar): continue if time not in ComponentSet(var.get_continuousset_list()): continue info = get_index_set_except(var, time) non_time_set = info['set_except'] index_getter = info['index_getter'] for pt in pts: for non_time_index in non_time_set: index = index_getter(non_time_index, pt) dvdict[pt].append(var[index]) return dvdict
def is_explicitly_indexed_by(comp, *sets, **kwargs): """ Function for determining whether a pyomo component is indexed by a set or group of sets. Args: comp : Some Pyomo component, possibly indexed sets : Pyomo Sets to check indexing by expand_all_set_operators : Whether or not to expand all set operators in the subsets method Returns: A bool that is True if comp is directly indexed by every set in sets. """ if not comp.is_indexed(): return False for s in sets: if isinstance(s, SetProduct): msg = ('Checking for explicit indexing by a SetProduct ' 'is not supported') raise TypeError(msg) expand_all_set_operators = kwargs.pop('expand_all_set_operators', False) if kwargs: keys = kwargs.keys() raise ValueError('Unrecognized keyword arguments: %s' % str(keys)) projected_subsets = comp.index_set().subsets(expand_all_set_operators= expand_all_set_operators) # Expanding all set operators here can be dangerous because it will not # distinguish between operators that contain their operands (e.g. union, # where you might consider the component to be considered indexed by # the operands) and operators that don't. # Ideally would like to check for containment by inclusion and containment # by product in one search of the set operators. subset_set = ComponentSet(projected_subsets) return all([_ in subset_set for _ in sets])
def assert_categorization(model): init_input_set = ComponentSet( [model.mixer.S_inlet.flow_vol[0], model.mixer.E_inlet.flow_vol[0]]) init_deriv_list = [] init_diff_list = [] init_fixed_list = [ model.mixer.E_inlet.temperature[0], model.mixer.S_inlet.temperature[0], model.cstr.control_volume.energy_holdup[0, 'aq'] ] init_ic_list = [model.cstr.control_volume.volume[0]] init_alg_list = [ model.cstr.control_volume.volume[0], model.cstr.outlet.flow_vol[0], model.cstr.outlet.temperature[0], model.cstr.inlet.flow_vol[0], model.cstr.inlet.temperature[0], model.mixer.outlet.flow_vol[0], model.mixer.outlet.temperature[0], model.cstr.control_volume.energy_accumulation[0, 'aq'], ] for j in model.properties.component_list: init_deriv_list.append( model.cstr.control_volume.material_accumulation[0, 'aq', j]) init_diff_list.append(model.cstr.control_volume.material_holdup[0, 'aq', j]) init_fixed_list.append(model.mixer.E_inlet.conc_mol[0, j]) init_fixed_list.append(model.mixer.S_inlet.conc_mol[0, j]) init_alg_list.extend([ model.cstr.control_volume.properties_out[0].flow_mol_comp[j], model.cstr.inlet.conc_mol[0, j], model.cstr.control_volume.properties_in[0].flow_mol_comp[j], model.cstr.control_volume.rate_reaction_generation[0, 'aq', j], model.mixer.mixed_state[0].flow_mol_comp[j], model.mixer.E_inlet_state[0].flow_mol_comp[j], model.mixer.S_inlet_state[0].flow_mol_comp[j] ]) if j != 'Solvent': init_alg_list.append(model.cstr.outlet.conc_mol[0, j]) init_alg_list.append(model.mixer.outlet.conc_mol[0, j]) else: init_fixed_list.append(model.cstr.outlet.conc_mol[0, j]) init_fixed_list.append(model.mixer.outlet.conc_mol[0, j]) if j != 'Solvent': init_ic_list.append(model.cstr.control_volume.material_holdup[0, 'aq', j]) for r in model.reactions.rate_reaction_idx: init_alg_list.extend([ model.cstr.control_volume.reactions[0].reaction_coef[r], model.cstr.control_volume.reactions[0].reaction_rate[r], model.cstr.control_volume.rate_reaction_extent[0, r] ]) init_deriv_set = ComponentSet(init_deriv_list) init_diff_set = ComponentSet(init_diff_list) init_fixed_set = ComponentSet(init_fixed_list) init_ic_set = ComponentSet(init_ic_list) init_alg_set = ComponentSet(init_alg_list) assert model._NMPC_NAMESPACE.input_vars.n_vars == len(init_input_set) for v in model._NMPC_NAMESPACE.input_vars: assert v[0] in init_input_set assert model._NMPC_NAMESPACE.deriv_vars.n_vars == len(init_deriv_set) for v in model._NMPC_NAMESPACE.deriv_vars: assert v[0] in init_deriv_set assert len(model._NMPC_NAMESPACE.diff_vars) == len(init_deriv_set) for v in model._NMPC_NAMESPACE.diff_vars: assert v[0] in init_diff_set assert len(model._NMPC_NAMESPACE.fixed_vars) == len(init_fixed_set) for v in model._NMPC_NAMESPACE.fixed_vars: assert v[0] in init_fixed_set assert len(model._NMPC_NAMESPACE.alg_vars) == len(init_alg_set) for v in model._NMPC_NAMESPACE.alg_vars: assert v[0] in init_alg_set assert len(model._NMPC_NAMESPACE.ic_vars) == len(init_ic_set) for v in model._NMPC_NAMESPACE.ic_vars: assert v[0] in init_ic_set assert len(model._NMPC_NAMESPACE.scalar_vars) == 0 for var in model._NMPC_NAMESPACE.deriv_vars: assert len(var) == len(model._NMPC_NAMESPACE.get_time()) assert var.index_set() is model._NMPC_NAMESPACE.get_time() for var in model._NMPC_NAMESPACE.alg_vars: assert len(var) == len(model._NMPC_NAMESPACE.get_time()) assert var.index_set() is model._NMPC_NAMESPACE.get_time()
def get_index_set_except(comp, *sets): """ Function for getting indices of a component over a product of its indexing sets other than those specified. Indices for the specified sets can be used to construct indices of the proper dimension for the original component via the index_getter function. Args: comp : Component whose indexing sets are to be manipulated sets : Sets to omit from the set_except product Returns: A dictionary. Maps 'set_except' to a Pyomo Set or SetProduct of comp's index set, excluding those in sets. Maps 'index_getter' to a function that returns an index of the proper dimension for comp, given an element of set_except and a value for each set excluded. These values must be provided in the same order their Sets were provided in the sets argument. """ n_set = len(sets) s_set = ComponentSet(sets) try: total_s_dim = sum([s.dimen for s in sets]) except TypeError: msg = ('get_index_set_except does not support sets with ' 'dimen == None, including those with inconsistent dimen') raise TypeError(msg) info = {} if not is_explicitly_indexed_by(comp, *sets): msg = (comp.name + ' is not indexed by at least one of ' + str([s.name for s in sets])) raise ValueError(msg) index_set = comp.index_set() if isinstance(index_set, SetProduct): projection_sets = list(index_set.subsets()) counter = Counter([id(_) for _ in projection_sets]) for s in sets: if counter[id(s)] != 1: msg = 'Cannot omit sets that appear multiple times' raise ValueError(msg) # Need to know the location of each set within comp's index_set # location will map: # location_in_comp_index_set -> location_in_sets location = {} other_ind_sets = [] for ind_loc, ind_set in enumerate(projection_sets): found_set = False for s_loc, s_set in enumerate(sets): if ind_set is s_set: location[ind_loc] = s_loc found_set = True break if not found_set: other_ind_sets.append(ind_set) else: # If index_set has no set_tuple, it must be a SimpleSet, and # len(sets) == 1 (because comp is indexed by every set in sets). # Location in sets and in comp's indexing set are the same. location = {0: 0} other_ind_sets = [] if comp.dim() == total_s_dim: # comp indexed by all sets and having this dimension # is sufficient to know that comp is only indexed by # Sets in *sets # In this case, return the trivial set_except and index_getter # Problem: cannot construct location without a set tuple # is that a problem with this syntax? # Here len(newvals) should == 1 info['set_except'] = [None] # index_getter returns an index corresponding to the values passed to # it, re-ordered according to order of indexing sets in component. info['index_getter'] = (lambda incomplete_index, *newvals: newvals[0] if len(newvals) <= 1 else tuple([newvals[location[i]] for i in location])) return info # Now may assume other_ind_sets is nonempty. if len(other_ind_sets) == 1: set_except = other_ind_sets[0] elif len(other_ind_sets) >= 2: set_except = other_ind_sets[0].cross(*other_ind_sets[1:]) else: raise ValueError('Did not expect this to happen') index_getter = (lambda incomplete_index, *newvals: _complete_index(location, incomplete_index, *newvals)) info['set_except'] = set_except info['index_getter'] = index_getter return info