Esempio n. 1
0
 def __init__(self, kws, sigfilter, opt_timescale):
     toks = ['ID', 'NUMBER', 'STRING', 'TIMEUNIT', EPS, EOF] + kws
     g = Grammar()
     g.add_terminals(toks)
     g.add_production('input', ['exp_list'])
     g.add_one_or_more('exp', 'exp_list')
     g.add_production('exp', ['$SCOPE', 'scopetype', 'ID', '$END'],
                      self.handle_start_module)
     g.add_production('scopetype', ['MODULE'], lambda l: l)
     g.add_production('scopetype', ['TASK'], lambda l: l)
     g.add_production('scopetype', ['BEGIN'], lambda l: l)
     g.add_production('exp', ['$UPSCOPE', '$END'], self.handle_end_module)
     g.add_production('exp', ['$TIMESCALE', 'TIMEUNIT', '$END'],
                      self.handle_timescale)
     g.add_production('exp',
                      ['$VAR', 'type', 'NUMBER', 'code', 'name', '$END'],
                      self.handle_vardecl)
     g.add_production('code', ['ID'], lambda l: l)
     g.add_production('code', ['NUMBER'], self.handle_number)
     g.add_production('name', ['ID'], lambda l: l)
     g.add_production('name', ['ID', 'ID'], self.handle_index)
     g.add_production('type', ['WIRE'], lambda l: l)
     g.add_production('type', ['REG'], lambda l: l)
     g.add_production('type', ['INTEGER'], lambda l: l)
     g.add_production('type', ['TRIREG'], lambda l: l)
     g.start_symbol = 'input'
     self.p = LrParserBuilder(g).generate_parser()
     self.sigprefix = []
     self.sigfilter = sigfilter
     self.timescale = opt_timescale
Esempio n. 2
0
    def test_parser(self):
        tokens = ['(', '(', ')', ')', '(', ')']
        # 3. build parser:
        p = LrParserBuilder(self.g).generate_parser()
        self.assertEqual(len(p.goto_table), 5)
        self.assertEqual(len(p.action_table), 19)

        # 4. feed input:
        p.parse(gen_tokens(tokens))
Esempio n. 3
0
 def test_empty(self):
     """ Test empty token stream """
     g = Grammar()
     g.add_terminals([','])
     g.add_production('input', [','])
     g.start_symbol = 'input'
     p = LrParserBuilder(g).generate_parser()
     tokens = gen_tokens([])
     with self.assertRaises(ParserException):
         p.parse(tokens)
Esempio n. 4
0
 def test_eps(self):
     """ Test epsilon terminal """
     g = Grammar()
     g.add_terminals(['a', 'b'])
     g.add_production('input', ['optional_a', 'b'])
     g.add_production('optional_a', ['a'])
     g.add_production('optional_a', [])
     g.start_symbol = 'input'
     p = LrParserBuilder(g).generate_parser()
     tokens = gen_tokens(['b'])
     p.parse(tokens)
Esempio n. 5
0
 def test_shift_reduce_conflict(self):
     """ Must be handled automatically by doing shift """
     g = Grammar()
     g.add_terminals([EOF, 'if', 'then', 'else', 'ass'])
     # Ambiguous grammar:
     g.add_production('if_stmt', ['if', 'then', 'stmt'])
     g.add_production('if_stmt', ['if', 'then', 'stmt', 'else', 'stmt'])
     g.add_production('stmt', ['if_stmt'])
     g.add_production('stmt', ['ass'])
     g.start_symbol = 'stmt'
     p = LrParserBuilder(g).generate_parser()
     # Ambiguous program:
     tokens = gen_tokens(['if', 'then', 'if', 'then', 'ass', 'else', 'ass'])
     p.parse(tokens)
Esempio n. 6
0
    def test_cb(self):
        """ Test callback of one rule and order or parameters """
        self.cb_called = False

        def cb(a, c, b):
            self.cb_called = True
            self.assertEqual(a.val, 'a')
            self.assertEqual(b.val, 'b')
            self.assertEqual(c.val, 'c')

        g = Grammar()
        g.add_terminals(['a', 'b', 'c'])
        g.add_production('goal', ['a', 'c', 'b'], cb)
        g.start_symbol = 'goal'
        p = LrParserBuilder(g).generate_parser()
        tokens = gen_tokens(['a', 'c', 'b'])
        p.parse(tokens)
        self.assertTrue(self.cb_called)
Esempio n. 7
0
 def test_simple_grammar(self):
     # 1. define a simple grammar:
     g = Grammar()
     g.add_terminals(['identifier', '(', ')', '+', '*'])
     g.add_production('input', ['expression'])
     g.add_production('expression', ['term'])
     g.add_production('expression', ['expression', '+', 'term'])
     g.add_production('term', ['factor'])
     g.add_production('term', ['term', '*', 'factor'])
     g.add_production('factor', ['(', 'expression', ')'])
     g.add_production('factor', ['identifier'])
     g.start_symbol = 'input'
     # 2. define input:
     tokens = gen_tokens(
         ['identifier', '+', 'identifier', '+', 'identifier'])
     # 3. build parser:
     p = LrParserBuilder(g).generate_parser()
     # 4. feed input:
     p.parse(tokens)
Esempio n. 8
0
 def test_redefine_terminal(self):
     """ Test correct behavior when a terminal is redefined """
     g = Grammar()
     g.add_terminals([EOF, 'b', 'c'])
     g.add_production('goal', ['a'])
     with self.assertRaises(ParserGenerationException):
         g.add_production('b', ['c'])  # Not allowed
     g.add_production('a', ['c'])
     g.start_symbol = 'goal'
     LrParserBuilder(g).generate_parser()
Esempio n. 9
0
 def test_undefined_terminal(self):
     """ Test correct behavior when a terminal is undefined """
     g = Grammar()
     g.add_terminals(['b'])
     g.add_production('goal', ['a'])
     g.add_production('a', ['b'])
     g.add_production('a', ['c'])
     g.start_symbol = 'goal'
     with self.assertRaises(ParserGenerationException):
         LrParserBuilder(g).generate_parser()
Esempio n. 10
0
 def test_reduce_reduce_conflict(self):
     """ Check if a reduce-reduce conflict is detected """
     # Define a grammar with an obvious reduce-reduce conflict:
     g = Grammar()
     g.add_terminals(['id'])
     g.add_production('goal', ['a'])
     g.add_production('a', ['b'])
     g.add_production('a', ['c'])
     g.add_production('b', ['id'])
     g.add_production('c', ['id'])
     g.start_symbol = 'goal'
     with self.assertRaises(ParserGenerationException):
         LrParserBuilder(g).generate_parser()
Esempio n. 11
0
 def test_init_item_set(self):
     p0, p1, p2, p3, p4 = self.g.productions
     s0 = LrParserBuilder(self.g).initial_item_set()
     self.assertEqual(len(s0), 9)  # 9 with the goal rule included!
     self.assertIn(Item(p0, 0, EOF), s0)
     self.assertIn(Item(p1, 0, EOF), s0)
     self.assertIn(Item(p1, 0, '('), s0)
     self.assertIn(Item(p2, 0, EOF), s0)
     self.assertIn(Item(p2, 0, '('), s0)
     self.assertIn(Item(p3, 0, EOF), s0)
     self.assertIn(Item(p3, 0, '('), s0)
     self.assertIn(Item(p4, 0, EOF), s0)
     self.assertIn(Item(p4, 0, '('), s0)
Esempio n. 12
0
 def test_eps_sequence(self):
     """ Test epsilon terminal for use in sequences """
     g = Grammar()
     g.add_terminals(['a'])
     g.add_production('aas', [])
     g.add_production('aas', ['aas', 'a'])
     g.start_symbol = 'aas'
     p = LrParserBuilder(g).generate_parser()
     tokens = gen_tokens(['a', 'a', 'a'])
     p.parse(tokens)
     tokens = gen_tokens([])
     p.parse(tokens)
Esempio n. 13
0
    def test_closure(self):
        p0, p1, p2, p3, p4 = self.g.productions
        s0 = set()
        s0.add(Item(p0, 0, EOF))
        self.assertEqual(len(s0), 1)  # 1 rule
        self.assertIn(Item(p0, 0, EOF), s0)

        # Invoke closure on set:
        s0 = LrParserBuilder(self.g).closure(s0)
        self.assertIn(Item(p0, 0, EOF), s0)
        self.assertIn(Item(p1, 0, EOF), s0)
        self.assertIn(Item(p1, 0, '('), s0)
        self.assertIn(Item(p2, 0, EOF), s0)
        self.assertIn(Item(p2, 0, '('), s0)
        self.assertIn(Item(p3, 0, EOF), s0)
        self.assertIn(Item(p3, 0, '('), s0)
        self.assertIn(Item(p4, 0, EOF), s0)
        self.assertIn(Item(p4, 0, '('), s0)
Esempio n. 14
0
 def test_eps2(self):
     g = Grammar()
     g.add_terminals(['id', ':'])
     g.add_production('input', ['opt_lab', 'ins', 'op1'])
     g.add_production('input', ['ins', 'op1'])
     g.add_production('opt_lab', ['id', ':'])
     g.add_production('ins', ['id'])
     g.add_production('op1', ['id'])
     g.start_symbol = 'input'
     p = LrParserBuilder(g).generate_parser()
     tokens = gen_tokens(['id', ':', 'id', 'id'])  # i.e. "lab_0: inc rax"
     p.parse(tokens)
     tokens = gen_tokens(['id', 'id'])  # i.e. "inc rax"
     p.parse(tokens)
Esempio n. 15
0
 def test_canonical(self):
     pb = LrParserBuilder(self.g)
     s0 = pb.initial_item_set()
     s, gt, _ = pb.gen_canonical_set(s0)
     # Must result in 12 sets:
     self.assertEqual(len(s), 12)
Esempio n. 16
0
 def test_first_set(self):
     pb = LrParserBuilder(self.g)
     for a in ['(', ')', EOF, 'EPS']:
         self.assertEqual(pb.first[a], {a})
     for nt in ['list', 'pair', 'goal']:
         self.assertEqual(pb.first[nt], {'('})
Esempio n. 17
0
class VcdParser:
    def __init__(self, kws, sigfilter, opt_timescale):
        toks = ['ID', 'NUMBER', 'STRING', 'TIMEUNIT', EPS, EOF] + kws
        g = Grammar()
        g.add_terminals(toks)
        g.add_production('input', ['exp_list'])
        g.add_one_or_more('exp', 'exp_list')
        g.add_production('exp', ['$SCOPE', 'scopetype', 'ID', '$END'],
                         self.handle_start_module)
        g.add_production('scopetype', ['MODULE'], lambda l: l)
        g.add_production('scopetype', ['TASK'], lambda l: l)
        g.add_production('scopetype', ['BEGIN'], lambda l: l)
        g.add_production('exp', ['$UPSCOPE', '$END'], self.handle_end_module)
        g.add_production('exp', ['$TIMESCALE', 'TIMEUNIT', '$END'],
                         self.handle_timescale)
        g.add_production('exp',
                         ['$VAR', 'type', 'NUMBER', 'code', 'name', '$END'],
                         self.handle_vardecl)
        g.add_production('code', ['ID'], lambda l: l)
        g.add_production('code', ['NUMBER'], self.handle_number)
        g.add_production('name', ['ID'], lambda l: l)
        g.add_production('name', ['ID', 'ID'], self.handle_index)
        g.add_production('type', ['WIRE'], lambda l: l)
        g.add_production('type', ['REG'], lambda l: l)
        g.add_production('type', ['INTEGER'], lambda l: l)
        g.add_production('type', ['TRIREG'], lambda l: l)
        g.start_symbol = 'input'
        self.p = LrParserBuilder(g).generate_parser()
        self.sigprefix = []
        self.sigfilter = sigfilter
        self.timescale = opt_timescale

    def parse(self, lexer):
        self.p.parse(lexer)

    def handle_index(self, name, index):
        name.val += index.val
        return (name)

    def handle_number(self, num):
        num.val = str(num.val)
        return (num)

    def handle_start_module(self, scope_tag, module_tag, name, end_tag):
        self.sigprefix.append(name.val)

    def handle_end_module(self, scope_tag, end_tag):
        name = self.sigprefix.pop()

    def handle_timescale(self, time_tag, timeunit, end_tag):
        global timemul
        timemul = timeunit.val / timeunit2val(self.timescale)

    def handle_vardecl(self, var_tag, typeinfo, width, key, signame, end_tag):
        global data, code2sig
        path = '.'.join(self.sigprefix)
        name = path + '.' + signame.val
        if name in self.sigfilter:
            if key.val not in data:
                data[key.val] = {}
            if 'nets' not in data[key.val]:
                data[key.val]['nets'] = []
            sig_data = {
                'type': typeinfo.val,
                'name': signame.val,
                'size': width.val,
                'hier': path,
            }
            data[key.val]['nets'].append(sig_data)
            code2sig[name] = key.val