def test(A_str, B_str): print ("A = " + A_str).replace("\n", "\\n").replace("\t", "\\t") print ("B = " + B_str).replace("\n", "\\n").replace("\t", "\\t") print "---------------------------" A = regex.do(A_str, {}).extract_sm() B = regex.do(B_str, {}).extract_sm() # Determine lexeme set before union (possible modification) ## set0 = lexeme_set.get(A) ## set1 = lexeme_set.get(B) x = union.do([A, B]) y = union.do([B, A]) assert identity.do(x, y) ## if "SequenceAndLoop" not in sys.argv: ## result = lexeme_set.get(x) ## expectation = set0 ## expectation.update(set1) ## print "#result:", lexeme_set.lexeme_set_to_characters(result) ## print "#expect:", lexeme_set.lexeme_set_to_characters(expectation) ## assert result == expectation print "union = ", x print
def assert_considerations(A, B, result): """Set of rules which must hold in case the '\NotBegin' has been applied. """ assert superset.do(A, result) assert intersection.do([result, B]).is_Empty() assert identity.do(union.do([result, A]), A) assert intersection.do([result, derived.is_begin(A, B)]).is_Empty() assert identity.do(union.do([result, derived.is_begin(A, B)]), A)
def __core(Original, Cutter): print("Original = " + Original).replace("\n", "\\n").replace("\t", "\\t") print("Cutter = " + Cutter).replace("\n", "\\n").replace("\t", "\\t") orig = regex.do(Original, {}).extract_sm() cutter = regex.do(Cutter, {}).extract_sm() #print orig.get_string(NormalizeF=False) #print cutter.get_string(NormalizeF=False) # ComplementBegin = intersection(P, complement(Q)\Any*) result = derived.not_begin(orig, cutter) print if not result.is_Empty(): print "superset(Original, result): %s" % superset.do( orig, result) if not result.is_Empty(): tmp = clean(intersection.do([cutter, result])) print "intersection(Cutter, result) is None: %s" % tmp.is_Empty() tmp = clean(union.do([orig, result])) print "union(Original, result) == Original: %s" % identity.do( tmp, orig) print print "result = ", result.get_string(NormalizeF=True) assert_considerations(orig, cutter, result) return result
def snap_complement(stream, PatternDict): pattern_list = snap_curly_bracketed_expression(stream, PatternDict, "complement operator", "Co") if len(pattern_list) == 1: tmp = pattern_list[0] else: tmp = union.do(pattern_list) return complement.do(tmp)
def snap_not_end(stream, PatternDict): sm_list = snap_curly_bracketed_expression(stream, PatternDict, "not-end operator", "NotEnd", MinN=2, MaxN=sys.maxint) if len(sm_list) == 2: return complement_end.do(sm_list[0], sm_list[1]) else: return complement_end.do(sm_list[0], union.do(sm_list[1:]))
def snap_union(stream, PatternDict): pattern_list = snap_curly_bracketed_expression(stream, PatternDict, "union operator", "Union", MinN=2, MaxN=INTEGER_MAX) return union.do(pattern_list)
def do(SM_List): """Result: A state machine that matches what is matched by one of the state machines but by no other. Formula: difference(union(All), intersection(All)) """ # Difference: It only remains in A what is not in A and B. tmp0 = union.do(SM_List) tmp1 = intersection.do(SM_List) return difference.do(tmp0, tmp1)
def do(SM_List): """Result: A state machine that matches what is matched by one of the state machines but by no other. Formula: difference(union(All), intersection(All)) """ # Difference: It only remains in A what is not in A and B. tmp0 = union.do(SM_List) tmp1 = intersection.do(SM_List) return difference.do(tmp0, tmp1)
def snap_two_or_union_of_more_state_machines(Func, stream, PatternDict, Name, Cmd): sm_list = snap_curly_bracketed_expression(stream, PatternDict, Name, Cmd, MinN=2, MaxN=INTEGER_MAX) sm0 = sm_list[0] if len(sm_list) == 2: sm1 = sm_list[1] else: sm1 = union.do(sm_list[1:]) return Func(sm0, sm1)
def test(A_str): print "_____________________________________________________________________" if isinstance(A_str, (str, unicode)): print ("A = " + A_str).replace("\n", "\\n").replace("\t", "\\t") sm = regex.do(A_str, {}).sm else: sm = A_str print "A = ", sm result_1st = complement.do(sm) print "complement(A):", result_1st result_2nd = complement.do(result_1st) print print "union(A, complement(A)): All =", is_all(union.do([sm, result_1st])) print "intersection(A, complement(A)): None =", is_none(intersection.do([sm, result_1st])) print "identity(A, complement(complement(A)):", identity.do(sm, result_2nd)
def __core(Original, Cutter): print ("Original = " + Original).replace("\n", "\\n").replace("\t", "\\t") print ("Cutter = " + Cutter).replace("\n", "\\n").replace("\t", "\\t") orig = regex.do(Original, {}).sm cutter = regex.do(Cutter, {}).sm #print orig.get_string(NormalizeF=False) #print cutter.get_string(NormalizeF=False) result = clean(complement_end.do(orig, cutter)) print if not special.is_none(result): print "superset(Original, result): %s" % superset.do(orig, result) if not special.is_none(result): tmp = clean(intersection.do([cutter, result])) print "intersection(Cutter, result) is None: %s" % special.is_none(tmp) tmp = clean(union.do([orig, result])) print "union(Original, result) == Original: %s" % identity.do(tmp, orig) print print "result = ", result.get_string(NormalizeF=True)
def __core(Original, Cutter): print("Original = " + Original).replace("\n", "\\n").replace("\t", "\\t") print("Cutter = " + Cutter).replace("\n", "\\n").replace("\t", "\\t") orig = regex.do(Original, {}).sm cutter = regex.do(Cutter, {}).sm #print orig.get_string(NormalizeF=False) #print cutter.get_string(NormalizeF=False) result = clean(complement_end.do(orig, cutter)) print if not special.is_none(result): print "superset(Original, result): %s" % superset.do( orig, result) if not special.is_none(result): tmp = clean(intersection.do([cutter, result])) print "intersection(Cutter, result) is None: %s" % special.is_none( tmp) tmp = clean(union.do([orig, result])) print "union(Original, result) == Original: %s" % identity.do( tmp, orig) print print "result = ", result.get_string(NormalizeF=True)
def test(A_str): print "_____________________________________________________________________" if isinstance(A_str, (str, unicode)): print("A = " + A_str).replace("\n", "\\n").replace("\t", "\\t") sm = regex.do(A_str, {}).extract_sm() else: sm = A_str print "A = ", sm ## print "##sm:", sm.get_string(NormalizeF=False) result_1st = complement.do(sm) print "complement(A):", result_1st # .get_string(NormalizeF=False) result_2nd = complement.do(result_1st) ## print "##2nd:", result_2nd.get_string(NormalizeF=False) print print "union(A, complement(A)): All =", DFA.is_Universal( union.do([sm, result_1st])) print "intersection(A, complement(A)): None =", DFA.is_Empty( intersection.do([sm, result_1st])) print "identity(A, complement(complement(A)):", identity.do(sm, result_2nd) assert not commonality(sm, result_1st) assert not commonality(result_1st, result_2nd)
def assert_considerations(A, B, result): assert superset.do(A, result) assert intersection.do([result, B]).is_Empty() assert identity.do(union.do([result, A]), A) assert intersection.do([result, derived.is_in(A, B)]).is_Empty() assert identity.do(union.do([result, derived.is_in(A, B)]), A)
def uni(*A): return union.do(list(A)) def itsct(*A): return intersection.do(list(A))
def uni(*A): return union.do(list(A))
def snap_union(stream, PatternDict): pattern_list = snap_curly_bracketed_expression(stream, PatternDict, "union operator", "Union", MinN=2, MaxN=sys.maxint) return union.do(pattern_list)