def test_genAST_pred_simple_closure(self): # Build AST string_to_file("#PREDICATE f=closure(r)", file_name) ast_string = file_to_AST_str(file_name) root = str_ast_to_python_ast(ast_string) # Test env = Environment() env.add_ids_to_frame(["r","f"]) env.set_value("f", frozenset([(1,3),(3,1),(1,1),(3,3)])) env.set_value("r", frozenset([(1,3),(3,1)])) assert interpret(root.children[0],env) env.set_value("f", frozenset([(1,7),(6,2),(8,4),(1,1),(6,6),(8,8),(2,2),(4,4),(7,7)])) env.set_value("r", frozenset([(1,7),(6,2),(8,4)])) assert interpret(root.children[0],env) env.set_value("f", frozenset([(1,7),(6,4),(8,4),(1,1),(6,6),(8,8),(4,4),(7,7)])) env.set_value("r", frozenset([(1,7),(6,4),(8,4)])) assert interpret(root.children[0],env) env.set_value("f", frozenset([(3,1),(1,1),(3,3)])) env.set_value("r", frozenset([(3,1),(1,1)])) assert interpret(root.children[0],env) env.set_value("f", frozenset([(1,3),(1,1),(3,3)])) env.set_value("r", frozenset([(1,3),(1,1)])) assert interpret(root.children[0],env)
def make_set_of_realtions(S,T): if PRINT_WARNINGS: print "\033[1m\033[91mWARNING\033[00m: (bruteforce) computing set of relations of %s %s " % (S,T) cartSet = frozenset(((x,y) for x in S for y in T)) res = powerset(cartSet) powerlist = list(res) lst = [frozenset(e) for e in powerlist] return frozenset(lst)
def test_all_ids_known6(self): string_to_file("#PREDICATE {(1,2)}:S<->T", file_name) ast_string = file_to_AST_str(file_name) root = str_ast_to_python_ast(ast_string) env = Environment() env.add_ids_to_frame(["S","T"]) env.set_value("S", frozenset([1,2,3,4,5])) env.set_value("T", frozenset([1,2,3,4,5])) assert all_ids_known(root, env)==True
def test_genAST_pred_rel_id(self): # Build AST: string_to_file("#PREDICATE r=id(S)", file_name) ast_string = file_to_AST_str(file_name) root = str_ast_to_python_ast(ast_string) # Test env = Environment() env.add_ids_to_frame(["S","r"]) env.set_value("S", frozenset(["a","b","c"])) env.set_value("r", frozenset([("a","a"),("b","b"),("c","c")])) assert interpret(root.children[0],env)
def test_genAST_pred_seq_append(self): # Build AST: string_to_file("#PREDICATE s:perm(S) => s<-a:seq(S)", file_name) ast_string = file_to_AST_str(file_name) root = str_ast_to_python_ast(ast_string) env = Environment() env.add_ids_to_frame(["s","S","a"]) env.set_value("S", frozenset(["a","b"])) env.set_value("s", frozenset([(2, 'a'), (1, 'b')])) env.set_value("a", "a") assert interpret(root.children[0],env)
def test_genAST_pred_fnc_expr(self): # Build AST string_to_file("#PREDICATE R1 = {(0|->1), (0|->2), (1|->1), (1|->7), (2|->3)} & f= fnc(R1)", file_name) ast_string = file_to_AST_str(file_name) root = str_ast_to_python_ast(ast_string) # Test env = Environment() assert interpret(root,env) f = env.get_value("f") assert f == frozenset([(0,frozenset([1,2])),(1,frozenset([1,7])),(2,frozenset([3]))])
def test_genAST_pred_seq_of_seq(self): # Build AST: string_to_file("#PREDICATE s:perm(perm(S))", file_name) ast_string = file_to_AST_str(file_name) root = str_ast_to_python_ast(ast_string) env = Environment() env._min_int = -1 env._max_int = 5 env.add_ids_to_frame(["S","s"]) env.set_value("S", frozenset(["a","b"])) env.set_value("s", frozenset([(2, frozenset([(1, 'a'), (2, 'b')])), (1, frozenset([(2, 'a'), (1, 'b')]))])) assert interpret(root.children[0],env)
def test_genAST_pred_set_cart(self): # Build AST: string_to_file("#PREDICATE u:S*T", file_name) ast_string = file_to_AST_str(file_name) root = str_ast_to_python_ast(ast_string) # Test env = Environment() env.add_ids_to_frame(["T","S","u"]) env.set_value("S", frozenset(["a","b"])) env.set_value("T", frozenset(["x","y"])) env.set_value("u", ("a","x")) assert interpret(root.children[0],env)
def test_genAST_pred_set_diff2(self): # Build AST: string_to_file("#PREDICATE A={1,2,3,4,5} & B={3,4,5,6,7} & C = A\B", file_name) ast_string = file_to_AST_str(file_name) root = str_ast_to_python_ast(ast_string) # Test env = Environment() env.add_ids_to_frame(["A","B","C"]) env.set_value("A", frozenset([1,2,3,4,5])) env.set_value("B", frozenset([3,4,5,6,7])) env.set_value("C", frozenset([1,2])) assert interpret(root.children[0],env)
def test_simple_set_pred_not_str_subset(self): # Build AST: A/<<:B idExp = AIdentifierExpression("A") idExp2 = AIdentifierExpression("B") notinclstrPred = ANotSubsetStrictPredicate() notinclstrPred.children.append(idExp) notinclstrPred.children.append(idExp2) #Test env = Environment() env.add_ids_to_frame(["A","B"]) env.set_value("A", frozenset(["aa"])) env.set_value("B", frozenset(["aa","bb"])) assert not interpret(notinclstrPred, env) env.set_value("B", frozenset(["aa"])) env.set_value("A", frozenset(["aa","bb"])) assert interpret(notinclstrPred, env) env.set_value("B", frozenset()) env.set_value("A", frozenset()) assert interpret(notinclstrPred, env) env.set_value("B", frozenset(["aa","bb"])) env.set_value("A", frozenset(["aa","bb"])) assert interpret(notinclstrPred, env)
def pyB_ext_split(args): b_string0 = args[0] sep = args[1] if sep=="": return frozenset([]) # raise ValueNotInDomainException("empty seperator in ext. function split") if b_string0=="": # raise ValueNotInDomainException("empty string in ext. function split") return frozenset([]) lst = b_string0.split(sep) result = [] for i in range(len(lst)): result.append(tuple([i+1, lst[i]])) return frozenset(result)
def test_genAST_pred_rel_repr(self): # Build AST: string_to_file("#PREDICATE f={aa|->aa, aa|->bb, bb|->bb, bb|->aa} & f=ID*ID", file_name) ast_string = file_to_AST_str(file_name) root = str_ast_to_python_ast(ast_string) # Test env = Environment() env.add_ids_to_frame(["bb","aa","f","ID"]) env.set_value("aa","aa") # XXX env.set_value("bb","bb") # XXX env.set_value("ID", frozenset(["aa","bb"])) env.set_value("f", frozenset([("aa","bb"),("aa","aa"),("bb","aa"),("bb","bb")])) assert interpret(root.children[0],env)
def test_genAST_pred_exist4(self): # Build AST: string_to_file("#PREDICATE T<:POW(ID) & #(X).(X<:POW(ID) => X=T )", file_name) ast_string = file_to_AST_str(file_name) root = str_ast_to_python_ast(ast_string) # Test env = Environment() env.add_ids_to_frame(["ID","T"]) env.set_value("ID", frozenset(["a","b"])) env.set_value("T", frozenset([frozenset(["a","b"]),frozenset(["a"]),frozenset(["b"]),frozenset([])])) lst = [("ID", PowerSetType(SetType("ID")))] type_with_known_types(root.children[0], env, lst, ["T"]) assert interpret(root.children[0],env)
def all_values_by_type_RPYTHON(atype, env, node): if PRINT_WARNINGS: print "\033[1m\033[91mWARNING\033[00m:",pretty_print(node), "caused brute force enumeration. MIN_INT:%s MAX_INT:%s" % (env._min_int, env._max_int) if isinstance(atype, IntegerType): L = [] for i in range(env._min_int, env._max_int+1): L.append(W_Integer(i)) return L elif isinstance(atype, BoolType): return [W_Boolean(True), W_Boolean(False)] elif isinstance(atype, StringType): # FIXME:(#ISSUE 21) only some strings are returned here L = [] for s in env.all_strings: L.append(W_String(s)) return L elif isinstance(atype, SetType): type_name = atype.name #print type_name #env.state_space.get_state().print_bstate() value = env.get_value(type_name) assert isinstance(value, frozenset) return value.to_list() elif isinstance(atype, PowerSetType): from enumeration_lazy import generate_powerset if PRINT_WARNINGS: print "\033[1m\033[91mWARNING\033[00m: (bruteforce) computing powerset of %s %s" % (iterable,name) val_list = all_values_by_type_RPYTHON(atype.data, env, node) card = len(val_list) powerlist = [frozenset([])] i = 0 while i!=card: for lst in generate_powerset(frozenset(val_list), card=i+1, skip=0): assert len(lst)==i+1 powerlist.append(frozenset(lst)) i = i+1 #print powerlist return powerlist elif isinstance(atype, CartType): val_domain = all_values_by_type_RPYTHON(atype.left.data, env, node) val_image = all_values_by_type_RPYTHON(atype.right.data, env, node) L = [] for x in val_domain: for y in val_image: L.append(W_Tuple((x,y))) return L string = "Unknown Type / Not Implemented: %s" % atype #print string raise Exception(string)
def test_genAST_not_member(self): # Build AST string_to_file("#PREDICATE x/:S", file_name) ast_string = file_to_AST_str(file_name) root = str_ast_to_python_ast(ast_string) # Test env = Environment() env.add_ids_to_frame(["x","S"]) env.set_value("x", "x") env.set_value("S", frozenset(["x","y","z"])) assert not interpret(root.children[0], env) env.set_value("S", frozenset(["a","b","c"])) assert interpret(root.children[0], env)
def SymbolicTransFunction_generator(self): for tup in self.relation: image = [] if USE_RPYTHON_CODE: preimage = tup.tvalue[0] for tup2 in self.relation: if tup2.tvalue[0].__eq__(preimage): image.append(tup2.tvalue[1]) yield W_Tuple((preimage,frozenset(image))) else: preimage = tup[0] for tup2 in self.relation: if tup2[0]==preimage: image.append(tup2[1]) yield tuple([preimage,frozenset(image)])
def make_explicit_set_of_realtion_lists(S,T): # card = |S|*|T| try: card = len(S)*len(T) except InfiniteSetLengthException: card = 0 # empty relation yield frozenset([]) # calc all permutations i=0 while i!=card: for lst in _generate_relation(S,T, card=i+1, skip=0): assert len(lst)==i+1 yield frozenset(lst) # removes double entries i = i+1
def test_genAST_pred_fun_app2(self): # Build AST: string_to_file("#PREDICATE f:S*T>->>V & x:S*T & f(x)=y", file_name) ast_string = file_to_AST_str(file_name) root = str_ast_to_python_ast(ast_string) env = Environment() env.add_ids_to_frame(["S","T","f","V","x","y"]) env.set_value("S", frozenset(["x1","x2"])) env.set_value("T", frozenset(["y1","y2"])) env.set_value("V", frozenset(["z1","z2","z3","z4"])) env.set_value("x", ("x1","y1")) env.set_value("f", frozenset([(("x1","y1"),"z1"),(("x2","y2"),"z2"),(("x1","y2"),"z3"),(("x2","y1"),"z4")])) env.set_value("y", "z1") assert interpret(root.children[0],env)
def test_genAST_pred_forall3(self): # Build AST: string_to_file("#PREDICATE T<:S & S:POW(ID) & !(x).(x:T => x:S)", file_name) ast_string = file_to_AST_str(file_name) root = str_ast_to_python_ast(ast_string) # Test env = Environment() env.add_ids_to_frame(["S","T","ID"]) env.set_value("ID", frozenset(["a","b"])) env.set_value("S", frozenset(["a","b"])) env.set_value("T", frozenset(["a"])) lst = [("ID", PowerSetType(SetType("ID")))] type_with_known_types(root.children[0], env, lst, ["T","S"]) assert interpret(root.children[0],env)
def test_genAST_pred_total_inj_fun(self): # Build AST: string_to_file("#PREDICATE F=S>->T", file_name) ast_string = file_to_AST_str(file_name) root = str_ast_to_python_ast(ast_string) l = [] l.append(frozenset([("a","x"),("b","y")])) l.append(frozenset([("a","y"),("b","x")])) env = Environment() env.add_ids_to_frame(["S","T","F"]) env.set_value("S", frozenset(["a","b"])) env.set_value("T", frozenset(["x","y"])) env.set_value("F", frozenset(l)) assert interpret(root.children[0],env)
def __contains__(self, element): S = self.aset if element==frozenset([]): return False if not is_a_function(element): return False return _check_element_in_sequence(element, S)
def enumerate_all(self): if not self.explicit_set_computed: assert isinstance(self, W_Object) assert isinstance(self, SymbolicSet) result = [] # RPython typing constraints made this ugly code necessary if isinstance(self, SymbolicSequenceSet): for e in self.SymbolicSequenceSet_generator(): result.append(e) elif isinstance(self, SymbolicSequence1Set): for e in self.SymbolicSequence1Set_generator(): result.append(e) elif isinstance(self, SymbolicISequenceSet): for e in self.SymbolicISequenceSet_generator(): result.append(e) elif isinstance(self, SymbolicISequence1Set): for e in self.SymbolicISequence1Set_generator(): result.append(e) elif isinstance(self, SymbolicPermutationSet): for e in self.SymbolicPermutationSet_generator(): result.append(e) else: raise Exception("INTERNAL ERROR: unimplemented sequence enumeration") self.explicit_set_repr = frozenset(result) self.explicit_set_computed = True return self.explicit_set_repr
def test_genAST_pred_rel_dom(self): # Build AST: string_to_file("#PREDICATE S=dom(f)", file_name) ast_string = file_to_AST_str(file_name) root = str_ast_to_python_ast(ast_string) # Test env = Environment() env.add_ids_to_frame(["S","f"]) env.set_value("S", frozenset(["a"])) env.set_value("f", frozenset([("a","x")])) assert interpret(root.children[0],env) env.set_value("f", frozenset([("1","x"),("2","y"),("3","z"),("1","y")])) env.set_value("S", frozenset(["1","2","3"])) assert interpret(root.children[0],env)
def test_genAST_pred_rel_inverse(self): # Build AST: string_to_file("#PREDICATE f=r~", file_name) ast_string = file_to_AST_str(file_name) root = str_ast_to_python_ast(ast_string) # Test env = Environment() env.add_ids_to_frame(["r","f"]) env.set_value("f", frozenset([("1","a"),("42","b"),("777","c")])) env.set_value("r", frozenset([("a","1"),("b","42"),("c","777")])) assert interpret(root.children[0],env) env.set_value("f", frozenset([])) env.set_value("r", frozenset([])) assert interpret(root.children[0],env)
def test_genAST_pred_seq_front(self): # Build AST: string_to_file("#PREDICATE s=[a,b,c,d,e] & front(s)=t", file_name) ast_string = file_to_AST_str(file_name) root = str_ast_to_python_ast(ast_string) env = Environment() env.add_ids_to_frame(["a","b","c","d","e","s","t"]) env.set_value("s", frozenset([(1, 'a'), (2, 'b'),(3, 'c'), (4, 'd'),(5,'e')])) env.set_value("t", frozenset([(1, 'a'), (2, 'b'),(3, 'c'), (4, 'd')])) env.set_value("a", "a") env.set_value("b", "b") env.set_value("c", "c") env.set_value("d", "d") env.set_value("e", "e") assert interpret(root.children[0],env)
def __init__(self): # Types of AST-ID-Nodes: Node->type. # This map is used by the enumeration # and was created and filled by typeit of the module typing. self.node_to_type_map = {} self.state_space = StateSpace() # statespace self.solutions = {} # written by a solution-file # constants from config.py # for possible modification after module import time (e.g. via tests) self._min_int = MIN_INT self._max_int = MAX_INT self._bmachine_search_dir = BMACHINE_SEARCH_DIR self.solution_root = None # predicateparse unit of solution-ast(prob file) self.root_mch = None self.current_mch = None # current Working B-Machine self.all_strings = [""] # remember all strings seen (in this or other bmachines). used to enumerate 'STRING' # This is a caching-list which contains all operations of all machines # It should prevent from intensive lookup while animation and op_call substitutions self.visible_operations = frozenset([]) # rettype, opname, paratype, backlink:owner_bmch, bool:is_query_op self.operations = {} self._all_operation_asts = [] # cache for get_all_visible_op_asts() method self.parsed_bmachines = {} self.init_sets_bmachnes_names = [] # names of all bmachines with set-init done self.set_up_bmachines_names = [] # set up constants done self.init_bmachines_names = [] # init done # animation parameters self.set_up_state_on_stack = False #MOVE to status object or something like that self.init_state_on_stack = False self.set_up_done = False self.init_done = False
def test_genAST_pred_forall4(self): # Build AST: string_to_file("#PREDICATE S:POW(ID) & !(X,y).(X<:S => card(X)=y)", file_name) ast_string = file_to_AST_str(file_name) root = str_ast_to_python_ast(ast_string) # Test env = Environment() env._min_int = -1 env._max_int = 5 env.add_ids_to_frame(["S","ID"]) env.set_value("ID", frozenset(["a","b"])) env.set_value("S", frozenset(["a","b"])) lst = [("ID", PowerSetType(SetType("ID")))] type_with_known_types(root.children[0], env, lst, ["S"]) assert not interpret(root.children[0],env)
def enumerate_all(self): if not self.explicit_set_computed: result = frozenset([]) varList = self.variable_list pred = self.predicate expr = self.expression env = self.env interpret = self.interpret node = self.node names = [x.idName for x in varList] # new scope env.push_new_frame(varList) domain_generator = self.domain_generator(pred, env, varList) for entry in domain_generator: for name in names: value = entry[name] env.set_value(name, value) try: tst = interpret(pred, env) if USE_RPYTHON_CODE: cond = tst.bvalue else: cond = tst if cond: # test (|= ior) aSet = interpret(expr, env) if isinstance(aSet, SymbolicSet): aSet = aSet.enumerate_all() result = result.union(aSet) except ValueNotInDomainException: continue env.pop_frame() self.explicit_set_repr = result self.explicit_set_computed = True return self.explicit_set_repr
def all_values_by_type(atype, env, node): if PRINT_WARNINGS: print "\033[1m\033[91mWARNING\033[00m:",pretty_print(node), "caused brute force enumeration. MIN_INT:%s MAX_INT:%s" % (env._min_int, env._max_int) if isinstance(atype, IntegerType): #print env._min_int, env._max_int return range(env._min_int, env._max_int+1) elif isinstance(atype, BoolType): return [True, False] elif isinstance(atype, StringType): # FIXME:(#ISSUE 21) only some strings are returned here return frozenset(env.all_strings) elif isinstance(atype, SetType): type_name = atype.name #print type_name #env.state_space.get_state().print_bstate() value = env.get_value(type_name) assert isinstance(value, frozenset) return value elif isinstance(atype, PowerSetType): val_list = all_values_by_type(atype.data, env, node) res = powerset(val_list, node.idName) powerlist = list(res) lst = [frozenset(e) for e in powerlist] #print lst return lst elif isinstance(atype, CartType): val_pi = all_values_by_type(atype.left.data, env, node) val_i = all_values_by_type(atype.right.data, env, node) # TODO: test for realtions, seams incomplete lst = frozenset([(x,y) for x in val_pi for y in val_i]) return lst elif isinstance(atype, StructType): value_dict = {} for name in atype.dictionary: rec_type = atype.dictionary[name] values = all_values_by_type(rec_type, env, node) value_dict[name]=values res = all_records(value_dict) lst = [] for dic in res: rec = [] for entry in dic: rec.append(tuple([entry,dic[entry]])) lst.append(frozenset(rec)) return frozenset(lst) string = "Unknown Type / Not Implemented: %s" % atype #print string raise Exception(string)
def is_a_surj_function_rpython(function, image_set): if isinstance(image_set, InfiniteSet): raise EnumerationNotPossibleException("InfiniteSet") image = [] for t in function: image.append(t.tvalue[1]) image_set2 = frozenset(image) # remove duplicate items return image_set.__eq__(image_set2)