def add_constraint(self, constraint): responsible_class = self._get_responsible_class(constraint) try: block = self._constraints[responsible_class] except KeyError: block = None else: if constraint in block.constraints: raise ValueError("Constraint already registered") constraint_parameters = constraint.get_parameters() for var in self._constraint_variables(constraint_parameters): variable_constraints = self._variables.setdefault( var, collections_extended.bag() ) variable_constraints.add(constraint) dtype, parameter_values = self._constraint_parameters(constraint_parameters) if block is None: block = _ConstraintBlock(dtype) self._constraints[responsible_class] = block assert block.parameter_array.dtype == dtype block.constraints.append(constraint) block.parameter_array.append(parameter_values) self._constraint_count += 1 assert self._assert_internal_state() self._auto_solve()
def multi_way_partitioning(items, bin_count): """ Greedily divide weighted items equally across bins (multi-way partition problem) This approximately minimises the difference between the largest and smallest sum of weights in a bin. Parameters ---------- items : iterable((item :: any) : (weight :: number)) Weighted items bin_count : int Number of bins Returns ------- bins : bag(bin :: frozenbag(item :: any)) Bins with the items References ---------- .. [1] http://stackoverflow.com/a/6855546/1031434 describes the greedy algorithm .. [2] http://ijcai.org/Proceedings/09/Papers/096.pdf defines the problem and describes algorithms """ bins = [_Bin() for _ in range(bin_count)] for item, weight in sorted(items, key=lambda x: x[1], reverse=True): bin_ = min(bins, key=lambda bin_: bin_.weights_sum) bin_.add(item, weight) return bag(frozenbag(bin_.items) for bin_ in bins)
def __form_derivative_recursive(me, pp, FF, iFF): if pp in iFF: dFdpp = iFF[pp] else: pp0 = bag(pp) p = pp0.pop() pp0 = frozenbag(pp0) dFdpp0 = me.__form_derivative_recursive(pp0, FF, iFF) dFdpp = me.__derivative_of_form(dFdpp0, p) FF[dFdpp] = pp iFF[pp] = dFdpp return dFdpp
def _assert_internal_state(self): """ Asserts that the inner state of the solver is ok and everything is linked where it should. """ self._variables._assert_internal_state() # I still don't 100% trust IndexedDict :) constraints_from_variables = collections_extended.bag() for var_constraints in self._variables.values(): constraints_from_variables |= var_constraints for constraint in constraints_from_variables: responsible_class = self._get_responsible_class(constraint) assert constraint in self._constraints[responsible_class].constraints constraint_count = 0 for responsible_class, block in self._constraints.items(): assert len(block.constraints) == len(block.parameter_array) assert len(block.constraints) > 0, "Empty constraint block" for i, constraint in enumerate(block.constraints): assert constraint in constraints_from_variables constraint_count += 1 constraint_parameters = constraint.get_parameters() constraint_variables = self._constraint_variables(constraint_parameters) assert len(constraint_variables) > 0 for v in constraint_variables: assert v in self._variables assert constraint in self._variables[v] dtype, values = self._constraint_parameters(constraint_parameters) assert block.parameter_array.dtype == dtype assert tuple(block.parameter_array[i]) == values assert self._constraint_count == constraint_count # Returns True to allow using this method as `assert self._assert_internal_state()` return True
def __init__(self, seed=None): self._tiles = bag() random.seed(seed) self.populate_tiles()
from collections_extended import bag b = bag("bananarama") s = set("bananarama") if __name__ == "__main__": print(b.count("a")) b.remove("a") print(b.count("a")) print("a" in b) print(b.count("r")) b.remove("r") print(b.count("r")) print("r" in b) print("") # print(s.count("a")) s.remove("a") # print(s.count("a")) print("a" in s) # print(s.count("r")) s.remove("r") # print(s.count("r")) print("r" in s)
old_stdout = sys.stdout result = StringIO( ) # StringIO is used to redirect stdout to be held into string buffer. sys.stdout = result logParser.logparser() # Calls logparser function from logParser class. sys.stdout = old_stdout bufferstring = result.getvalue() # Parsing into buffer string. lines = bufferstring.split("\n") # Splitting parsed string. print("\nRaw log data was parsed to program buffer.\n") dx_raw = collections.defaultdict(lambda: collections_extended.bag( )) # Must use multiset/bag which allows for duplicate elements. dx_logs = {} dx_hashed = {} for i in range( 0, len(lines)): # Organises raw log input to be mapped by timestamp. line = lines[i] words = line.split() timestamp = " ".join(words[:2]) rest = " ".join(words[2:]) dx_raw[timestamp].add(rest) continue print("Raw log data was mapped to local log dictionary.\n")
def test_one_bin(self): '''When one bin, return single bin containing all items''' assert multi_way_partitioning([(1,2), (2,3)], bin_count=1) == bag([frozenbag([1,2])])
def test_one_item(self): '''When one item, return 1 singleton and x empty bins''' assert multi_way_partitioning([(1,2)], bin_count=2) == bag([frozenbag([1]), frozenbag()])
def test_no_items(self): '''When no items, return empty bins''' assert multi_way_partitioning([], bin_count=2) == bag([frozenbag(), frozenbag()])