Beispiel #1
0
    def setUp(self):
        self.lexer = Lexer(self.CalcTokens)
        self.eacc = Eacc(self.CalcGrammar)

        # Link the handles to the patterns.
        self.eacc.add_handle(self.CalcGrammar.r_plus, self.plus)
        self.eacc.add_handle(self.CalcGrammar.r_minus, self.minus)
        self.eacc.add_handle(self.CalcGrammar.r_div, self.div)
        self.eacc.add_handle(self.CalcGrammar.r_mul, self.mul)
        self.eacc.add_handle(self.CalcGrammar.r_paren, self.paren)
        self.eacc.add_handle(self.CalcGrammar.r_done, self.done)
Beispiel #2
0
class TestTokVal(unittest.TestCase):
    class Wordtokens(XSpec):
        t_word = LexTok(r'[a-zA-Z]+', Word)
        t_blank = LexTok(r' +', type=Blank, discard=True)

        root = [t_word, t_blank]

    class WordGrammar(Grammar):
        r_phrase0 = Rule(TokVal('alpha'), TokVal('beta'))
        r_phrase1 = Rule(TokVal('gamma'), TokVal('zeta'))
        r_phrase2 = Rule(TokVal('abc'), TokVal('def'))

        r_sof = Rule(Sof)
        r_eof = Rule(Eof)

        root = [r_phrase1, r_phrase0, r_phrase2, r_sof, r_eof]

    def setUp(self):
        self.lexer = Lexer(self.Wordtokens)
        self.eacc = Eacc(self.WordGrammar)

    def test0(self):
        data = 'alpha beta gamma zeta'
        tokens = self.lexer.feed(data)
        ptree = self.eacc.build(tokens)
        ptree = list(ptree)

    def test1(self):
        data = 'gamma zeta     abc      def     alpha beta '
        tokens = self.lexer.feed(data)
        ptree = self.eacc.build(tokens)
        ptree = list(ptree)

    def test2(self):
        data = 'gamma zeta'
        tokens = self.lexer.feed(data)
        ptree = self.eacc.build(tokens)
        ptree = list(ptree)

    def test3(self):
        data = 'gamma zeta'
        tokens = self.lexer.feed(data)
        ptree = self.eacc.build(tokens)
        ptree = list(ptree)

    def test4(self):
        data = 'gamma zeta abc'
        tokens = self.lexer.feed(data)
        ptree = self.eacc.build(tokens)

        with self.assertRaises(EaccError):
            ptree = list(ptree)
Beispiel #3
0
class TestOps0(unittest.TestCase):
    class ExprTokens(XSpec):
        t_one = LexTok(r'1', One)
        t_two = LexTok(r'2', Two)

        t_three = LexTok(r'3', Three)
        t_four = LexTok(r'4', Four)
        t_five = LexTok(r'5', Five)
        t_blank = LexTok(r' +', Blank, discard=True)

        root = [t_one, t_two, t_three, t_four, t_five, t_blank]

    class ExprGrammar(Grammar):
        r_num = Rule(One, Except(Three), One)

        r_sof = Rule(Sof)
        r_eof = Rule(Eof)

        root = [r_num, r_sof, r_eof]

    def setUp(self):
        self.lexer = Lexer(self.ExprTokens)
        self.eacc = Eacc(self.ExprGrammar)

    def test0(self):
        data = '121 141 141 141'
        tokens = self.lexer.feed(data)
        ptree = self.eacc.build(tokens)
        ptree = list(ptree)
Beispiel #4
0
"""

from eacc.eacc import Eacc, Rule, Grammar, TokVal
from eacc.lexer import XSpec, Lexer, LexTok
from eacc.token import Blank, Word, Sof, Eof


class WordTokens(XSpec):
    t_word = LexTok(r'[a-zA-Z]+', Word)
    t_blank = LexTok(r' +', type=Blank, discard=True)

    root = [t_word, t_blank]


class WordGrammar(Grammar):
    r_phrase0 = Rule(TokVal('alpha'), TokVal('beta'))
    r_phrase1 = Rule(TokVal('gamma'), TokVal('zeta'))
    r_sof = Rule(Sof)
    r_eof = Rule(Eof)

    root = [r_phrase1, r_phrase0, r_sof, r_eof]


if __name__ == '__main__':
    data = 'alpha beta gamma zeta'
    lexer = Lexer(WordTokens)
    eacc = Eacc(WordGrammar)
    tokens = lexer.feed(data)
    ptree = eacc.build(tokens)
    print(list(ptree))
Beispiel #5
0
Datei: calc.py Projekt: iogf/eacc
def mul(term, sign, factor):
    return term.val() * factor.val()


def paren(left, expression, right):
    return expression.val()


def done(sof, num, eof):
    print('Result:', num.val())
    return num.val()


if __name__ == '__main__':
    data = '2 * 5 + 10 -(2 * 3 - 10 )+ 30/(1-3+ 4* 10 + (11/1))'
    lexer = Lexer(CalcTokens)
    tokens = lexer.feed(data)
    eacc = Eacc(CalcGrammar)

    # Link the handles to the patterns.
    eacc.add_handle(CalcGrammar.r_plus, plus)

    eacc.add_handle(CalcGrammar.r_minus, minus)
    eacc.add_handle(CalcGrammar.r_div, div)
    eacc.add_handle(CalcGrammar.r_mul, mul)
    eacc.add_handle(CalcGrammar.r_paren, paren)
    eacc.add_handle(CalcGrammar.r_done, done)

    ptree = eacc.build(tokens)
    ptree = list(ptree)
Beispiel #6
0
    root = [r_lparen, r_rparen, r_num, r_blank]


class TupleGrammar(Grammar):
    # It means to accumulate as many Num tokens as possible.
    g_num = Times(Num, min=1, type=Num)

    # Then we trigge such a pattern in this rule.
    r_paren = Rule(LP, g_num, RP, type=Num)
    r_done = Rule(Sof, Num, Eof)

    root = [r_paren, r_done]


def done(sof, expr, eof):
    print('Result:', expr)


if __name__ == '__main__':
    print('Example 1')
    data = '(1 (1 1) ((((1)))))'

    lexer = Lexer(TupleTokens)
    tokens = lexer.feed(data)
    eacc = Eacc(TupleGrammar)

    ptree = eacc.build(tokens)
    eacc.add_handle(TupleGrammar.r_done, done)

    ptree = list(ptree)
Beispiel #7
0
class TestRule(unittest.TestCase):
    class CalcTokens(XSpec):
        t_plus = LexTok(r'\+', Plus)
        t_minus = LexTok(r'\-', Minus)

        t_lparen = LexTok(r'\(', LP)
        t_rparen = LexTok(r'\)', RP)
        t_mul = LexTok(r'\*', Mul)
        t_div = LexTok(r'\/', Div)

        t_num = LexTok(r'[0-9]+', Num, float)
        t_blank = LexTok(r' +', Blank, discard=True)

        root = [
            t_plus, t_minus, t_lparen, t_num, t_blank, t_rparen, t_mul, t_div
        ]

    class CalcGrammar(Grammar):
        r_paren = Rule(LP, Num, RP, type=Num)
        r_div = Rule(Num, Div, Num, type=Num)
        r_mul = Rule(Num, Mul, Num, type=Num)
        o_div = Rule(Div)
        o_mul = Rule(Mul)

        r_plus = Rule(Num, Plus, Num, type=Num, up=(o_mul, o_div))
        r_minus = Rule(Num, Minus, Num, type=Num, up=(o_mul, o_div))

        r_done = Rule(Sof, Num, Eof)
        root = [r_paren, r_plus, r_minus, r_mul, r_div, r_done]

    def plus(self, expr, sign, term):
        return expr.val() + term.val()

    def minus(self, expr, sign, term):
        return expr.val() - term.val()

    def div(self, term, sign, factor):
        return term.val() / factor.val()

    def mul(self, term, sign, factor):
        return term.val() * factor.val()

    def paren(self, left, expression, right):
        return expression.val()

    def done(self, sof, num, eof):
        print('Result:', num.val())
        return num.val()

    def setUp(self):
        self.lexer = Lexer(self.CalcTokens)
        self.eacc = Eacc(self.CalcGrammar)

        # Link the handles to the patterns.
        self.eacc.add_handle(self.CalcGrammar.r_plus, self.plus)
        self.eacc.add_handle(self.CalcGrammar.r_minus, self.minus)
        self.eacc.add_handle(self.CalcGrammar.r_div, self.div)
        self.eacc.add_handle(self.CalcGrammar.r_mul, self.mul)
        self.eacc.add_handle(self.CalcGrammar.r_paren, self.paren)
        self.eacc.add_handle(self.CalcGrammar.r_done, self.done)

    def test0(self):
        data = '1+2/3*(3*2 - 1) /(1-1-2-3-1+2)*3/ (1 - 2)*10'
        tokens = self.lexer.feed(data)
        ptree = self.eacc.build(tokens)
        ptree = list(ptree)
        print('Expr:', data)
        self.assertEqual(ptree[-1].val(), eval(data))

    def test1(self):
        data = '(1+2/3*(3*2 - 1)) + ((1 - 2)*10)'
        tokens = self.lexer.feed(data)
        ptree = self.eacc.build(tokens)
        ptree = list(ptree)
        print('Expr:', data)
        self.assertEqual(ptree[-1].val(), eval(data))

    def test2(self):
        data = '((1+2/3*(3*2 - 1)) + ((1 - 2)*10))'
        tokens = self.lexer.feed(data)
        ptree = self.eacc.build(tokens)
        ptree = list(ptree)
        print('Expr:', data)
        self.assertEqual(ptree[-1].val(), eval(data))

    def test3(self):
        data = '(1/2) * (3/4) * (5/2/3/5/2*1)/((((((1))))))'
        tokens = self.lexer.feed(data)
        ptree = self.eacc.build(tokens)
        ptree = list(ptree)
        print('Expr:', data)
        self.assertEqual(ptree[-1].val(), eval(data))

    def test4(self):
        data = '(1/2) * (3/4) * (5 2)'
        tokens = self.lexer.feed(data)
        print('Expr:', data)
        ptree = self.eacc.build(tokens)

        with self.assertRaises(EaccError):
            ptree = list(ptree)

    def test4(self):
        data = '(1/2) * 3/4) * (512)'
        tokens = self.lexer.feed(data)
        print('Expr:', data)
        ptree = self.eacc.build(tokens)

        with self.assertRaises(EaccError):
            ptree = list(ptree)

    def test5(self):
        data = '1+2*2/2 - 2/2 - 2*2/2+1'
        tokens = self.lexer.feed(data)
        ptree = self.eacc.build(tokens)
        ptree = list(ptree)
        print('Expr:', data)
        self.assertEqual(ptree[-1].val(), eval(data))
Beispiel #8
0
 def setUp(self):
     self.lexer = Lexer(self.ExprTokens)
     self.eacc = Eacc(self.ExprGrammar)
Beispiel #9
0
 def setUp(self):
     self.lexer = Lexer(self.Wordtokens)
     self.eacc = Eacc(self.WordGrammar)
Beispiel #10
0
    t_four = LexTok(r'4', Four)
    t_five = LexTok(r'5', Five)
    t_blank = LexTok(r' +', Blank, discard=True)

    root = [t_one, t_two, t_three, t_four, t_five, t_blank]


class ExprGrammar(Grammar):
    r_one = Rule(One, Except(Three), One)
    r_sof = Rule(Sof)
    r_eof = Rule(Eof)
    root = [r_one, r_sof, r_eof]


if __name__ == '__main__':
    print('Example 1')
    lexer = Lexer(ExprTokens)
    eacc = Eacc(ExprGrammar)
    data = '121 141'

    tokens = lexer.feed(data)
    ptree = eacc.build(tokens)
    ptree = list(ptree)
    print(ptree)

    print('\nExample 2')
    data = '1 2 1 1 3 1'  # Will fail.
    tokens = lexer.feed(data)
    ptree = eacc.build(tokens)
    ptree = list(ptree)