def mk_star_nfa(N): # Follow construction from Kozen's book: # 1) Introduce new (single) start+final state IF # 2) Let Q0 = set({ IF }) # 2) Move on epsilon from IF to the set N[Q0] # 3) Make N[F] non-final # 4) Spin back from every state in N[F] to Q0 # delta_accum = dict({}) IF = NxtStateStr() Q0 = set({IF}) # new set of start + final states # Jump from IF to N's start state set delta_accum.update({(IF, ""): N["Q0"]}) delta_accum.update(N["Delta"]) # for f in N["F"]: # N's final states may already have epsilon moves to # other N-states! # Expand the target of such jumps to include Q0 also. if (f, "") in N["Delta"]: delta_accum.update({(f, ""): (Q0 | N["Delta"][(f, "")])}) else: delta_accum.update({(f, ""): Q0}) # return mk_nfa(Q=N["Q"] | Q0, Sigma=N["Sigma"], Delta=delta_accum, Q0=Q0, F=Q0)
def mk_eps_nfa(): """An nfa with exactly one start+final state """ print("Making an NFA for the Epsilon Symbol...") Q0 = set({NxtStateStr()}) F = Q0 return mk_nfa(Q=Q0, Sigma=set({}), Delta=dict({}), Q0=Q0, F=Q0)
def mk_cat_nfa(N1, N2): string = '' for a in N1["Sigma"]: string += a for b in N2["Sigma"]: string += b print("The symbols after being concatenated is the string: " + "'" + string + "'") delta_accum = dict({}) delta_accum.update(N1["Delta"]) delta_accum.update(N2["Delta"]) print("Making a NFA for the concatenated symbols.....") # Now, introduce moves from every one of N1's final states # to the set of N2's initial states. for f in N1["F"]: # However, N1's final states may already have epsilon moves to # other N1-states! # Expand the target of such jumps to include N2's Q0 also! if (f, "") in N1["Delta"]: delta_accum.update({(f, ""): (N2["Q0"] | N1["Delta"][(f, "")])}) else: delta_accum.update({(f, ""): N2["Q0"]}) print( "Finished making the Delta for the NFA with the concatenation operator..." ) # In syntax-directed translation, it is impossible # that N2 and N1 have common states. Check anyhow # in case there are bugs elsewhere that cause it. assert ((N2["F"] & N1["F"]) == set({})) return mk_nfa(Q=N1["Q"] | N2["Q"], Sigma=N1["Sigma"] | N2["Sigma"], Delta=delta_accum, Q0=N1["Q0"], F=N2["F"])
def mk_symbol_nfa(a): """The NFA for a single re letter """ # Make a fresh initial state q0 = NxtStateStr() Q0 = set({q0}) # Make a fresh final state f = NxtStateStr() F = set({f}) return mk_nfa(Q=Q0 | F, Sigma=set({a}), Delta={(q0, a): F}, Q0=Q0, F=F)
def mk_eps_nfa(): """An nfa with exactly one start+final state """ Q0 = set({ NxtStateStr() }) F = Q0 return mk_nfa(Q = Q0, Sigma = set({}), Delta = dict({}), Q0 = Q0, F = Q0)
def mk_symbol_nfa(a): """The NFA for a single re letter """ # Make a fresh initial state print("Making a symbol NFA for a single re-letter, which is " + "'" + a + "'....") q0 = NxtStateStr() Q0 = set({q0}) # Make a fresh final state f = NxtStateStr() F = set({f}) return mk_nfa(Q=Q0 | F, Sigma=set({a}), Delta={(q0, a): F}, Q0=Q0, F=F)
def mk_plus_nfa(N1, N2): """Given two NFAs, return their union. """ delta_accum = dict({}) delta_accum.update(N1["Delta"]) delta_accum.update(N2["Delta"]) # Simply accumulate the transitions # The alphabet is inferred bottom-up; thus we must union the Sigmas # of the NFAs! return mk_nfa(Q=N1["Q"] | N2["Q"], Sigma=N1["Sigma"] | N2["Sigma"], Delta=delta_accum, Q0=N1["Q0"] | N2["Q0"], F=N1["F"] | N2["F"])
def mk_cat_nfa(N1, N2): delta_accum = dict({}) delta_accum.update(N1["Delta"]) delta_accum.update(N2["Delta"]) # Now, introduce moves from every one of N1's final states # to the set of N2's initial states. for f in N1["F"]: # However, N1's final states may already have epsilon moves to # other N1-states! # Expand the target of such jumps to include N2's Q0 also! if (f, "") in N1["Delta"]: delta_accum.update({(f, ""): (N2["Q0"] | N1["Delta"][(f, "")])}) else: delta_accum.update({(f, ""): N2["Q0"]}) # In syntax-directed translation, it is impossible # that N2 and N1 have common states. Check anyhow # in case there are bugs elsewhere that cause it. assert ((N2["F"] & N1["F"]) == set({})) return mk_nfa(Q=N1["Q"] | N2["Q"], Sigma=N1["Sigma"] | N2["Sigma"], Delta=delta_accum, Q0=N1["Q0"], F=N2["F"])