Ejemplo n.º 1
0
 def get_tokens(s):
     tokens = list()
     lex = Lexer.lexer(s, g=OmniGrammar())
     for t in lex:
         # print(f'yields: {t}')
         tokens.append(t)
     return tokens
Ejemplo n.º 2
0
    def test_send(self):
        s = "One Two Three"
        tokens = Lexer.lexer(s)
        for t in tokens:
            if t == "Two":
                tokens.send(t)  # return the Token to the generator
                break

        self.assertEqual("Two", next(tokens))
Ejemplo n.º 3
0
 def get_tokens(s):
     tokens = list()
     g = PVLGrammar()
     g.comments = (('/*', '*/'), ('#', '\n'))
     lex = Lexer.lexer(s, g=g)
     for t in lex:
         # print(f'yields: {t}')
         tokens.append(t)
     return tokens
Ejemplo n.º 4
0
 def get_tokens(s):
     tokens = list()
     g = PVLGrammar()
     g.comments = (("/*", "*/"), ("#", "\n"))
     lex = Lexer.lexer(s, g=g)
     for t in lex:
         # print(f'yields: {t}')
         tokens.append(t)
     return tokens
Ejemplo n.º 5
0
    def test_lexer_recurse(self):
        def foo(tokens):
            two = list()
            for t in tokens:
                if t == "f":
                    break
                two.append(t)
            return two

        lex = Lexer.lexer("a b c d e f g h")
        one = list()
        for t in lex:
            if t == "c":
                two = foo(lex)
            else:
                one.append(t)

        self.assertEqual(["a", "b", "g", "h"], one)
        self.assertEqual(["d", "e"], two)
Ejemplo n.º 6
0
    def test_lexer_recurse(self):
        def foo(tokens):
            two = list()
            for t in tokens:
                if t == 'f':
                    break
                two.append(t)
            return two

        lex = Lexer.lexer('a b c d e f g h')
        one = list()
        for t in lex:
            if t == 'c':
                two = foo(lex)
            else:
                one.append(t)

        self.assertEqual(['a', 'b', 'g', 'h'], one)
        self.assertEqual(['d', 'e'], two)