Exemple #1
0
def automaton_to_regex(automaton):
    automaton = nfa_to_dfa(automaton)

    states, transitions = __automaton_to_gnfa(automaton)
    regex = __gnfa_to_regex(list(range(states)), transitions)
    ast = __get_regex_ast(regex)
    return __simplify_regex(ast)
    def build_automaton(cls, regex, skip_whitespaces=False):
        tokens = regex_tokenizer(regex, cls.grammar, skip_whitespaces=skip_whitespaces)
        parse = cls.parser([t.ttype for t in tokens])
        ast = evaluate_parse(parse, tokens)
        nfa = ast.evaluate()
        dfa = nfa_to_dfa(nfa)
        minimized = automata_minimization(dfa)

        return minimized
def test_automata_concatenation_recognize(a1, a2, text, recognize):
    concatenation = automata_concatenation(a1, a2)
    concatenation = nfa_to_dfa(concatenation)
    assert recognize == concatenation.recognize(text)
Exemple #4
0
def automaton_to_regex(automaton):
    automaton = nfa_to_dfa(automaton)

    states, transitions = __automaton_to_gnfa(automaton)
    return __gnfa_to_regex(list(range(states)), transitions)
def test_automata_union_recognize(a1, a2, text, recognize):
    union = automata_union(a1, a2)
    union = nfa_to_dfa(union)
    assert recognize == union.recognize(text)
def test_nfa_to_dfa_recognize(nfa, text, recognize):
    dfa = nfa_to_dfa(nfa)
    assert recognize == dfa.recognize(text)
def test_nfa_to_dfa_finals(nfa, finals):
    dfa = nfa_to_dfa(nfa)
    assert finals == len(dfa.finals)
def test_nfa_to_dfa_states(nfa, states):
    dfa = nfa_to_dfa(nfa)
    assert states == dfa.states
def test_automata_closure_recognize(automaton, text, recognize):
    closure = automata_closure(automaton)
    closure = nfa_to_dfa(closure)
    assert recognize == closure.recognize(text)