Esempio n. 1
0
 def cyk(self, q):
     g = ContextFree.prepare_for_cyk(self.grammar)
     if not q:
         q = [EPS]
     try:
         cyk(g, q)
     except BaseException:
         return False
     return True
Esempio n. 2
0
def test_cnf_grammar_is_equiv():
    lines = ['S a S b S', 'S eps']
    g, symbols = split_cfg(lines)

    tmp_g = ContextFree.transform_to_chomsky_normal_form(g)
    new_g = ContextFree.prepare_for_cyk(tmp_g)
    assert cyk(new_g, 'abab')
    assert cyk(new_g, 'aabb')
    with raises(Exception):
        cyk(new_g, 'aba')
    with raises(Exception):
        cyk(new_g, 'abba')
Esempio n. 3
0
def test_split_cfg_grammar():
    lines = ['S a S b S', 'S eps']
    g, symbols = split_cfg(lines)
    assert len(g.terminals) == 3
    assert len(g.nonterminals) == 1
    assert len(g.rules) == 2

    # Checks if grammar accepts right words using library methods
    new_g = ContextFree.prepare_for_cyk(g)
    assert cyk(new_g, 'abab')
    assert cyk(new_g, 'aabb')
    with raises(Exception):
        cyk(new_g, 'aba')
    with raises(Exception):
        cyk(new_g, 'abba')
Esempio n. 4
0
 def setUp(self):
     self.g = Grammar(terminals=[My],
                      nonterminals=[S, X],
                      rules=[R],
                      start_symbol=S)
     ContextFree.prepare_for_cyk(self.g, True)
Esempio n. 5
0
 def to_weak_chomsky_nf(self):
     ContextFree.prepare_for_cyk(self.grammar, inplace=True)
     return self.grammar
Esempio n. 6
0
    def get(self):
        yield from self.to_symbols[0].get
        yield from self.to_symbols[2].get


g = Grammar(terminals=list(ascii_lowercase + '()*+'),
            nonterminals=[Symb, Concat, Or, Iterate],
            rules=[
                SymbRule, Bracket, ConcatRewrite, ConcatRule, OrRewrite,
                OrRule, IterateRewrite, IterateRule
            ],
            start_symbol=Or)

if __name__ == '__main__':
    gr = ContextFree.prepare_for_cyk(g)

    while True:
        read = input("Type regex or exit to quit: ").strip()
        if read == "exit":
            break
        if len(read) == 0:
            continue

        root = cyk(gr, read)
        root = InverseContextFree.reverse_cyk_transforms(root)
        root = InverseCommon.splitted_rules(root)
        for form in root.get:
            print(form)

    print("Quiting the application")
Esempio n. 7
0
    right = [LeftBracket, PlusMinus, RightBracket]

    def compute(self):
        self._from_symbols[0].attribute = self._to_symbols[1].attribute


g = Grammar(terminals=[
    0, 1, 2, 3, 4, 5, 6, 7, 8, 9, PlusOperator, MinusOperator,
    MultipleOperator, DivideOperator, LeftBracket, RightBracket
],
            nonterminals=[Number, MultipleDivide, PlusMinus],
            rules=[
                NumberDirect, NumberCompute, DivideApplied, MultipleApplied,
                NoDivideMultiple, MinusApplied, PlusApplied, NoPlusMinus,
                BracketsApplied
            ],
            start_symbol=PlusMinus)

if __name__ == '__main__':
    g = ContextFree.prepare_for_cyk(g, inplace=True)

    root = cyk(g, [
        1, 0, PlusOperator, LeftBracket, 5, MultipleOperator, 2, PlusOperator,
        4, RightBracket, DivideOperator, 2, MultipleOperator, 4
    ])

    root = InverseContextFree.reverse_cyk_transforms(root)
    root = InverseCommon.splitted_rules(root)

    print(root.attribute)
Esempio n. 8
0
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 01.04.2018 18:59
:Licence GPLv3
Part of lambda-cli

"""
from grammpy.transforms import ContextFree, InverseContextFree, InverseCommon
from grammpy.parsers import cyk
from .lambda_grammar import lambda_grammar
from .lex import lambda_cli_lex


_g = ContextFree.prepare_for_cyk(lambda_grammar)


def parse_from_tokens(input):
    parsed = cyk(_g, input)
    parsed = InverseContextFree.transform_from_chomsky_normal_form(parsed)
    parsed = InverseContextFree.unit_rules_restore(parsed)
    parsed = InverseContextFree.epsilon_rules_restore(parsed)
    parsed = InverseCommon.splitted_rules(parsed)
    return parsed.get_representation()


def parse(input):
    return parse_from_tokens(lambda_cli_lex(input))


def steps(input):