def get_tokens(s): tokens = list() lex = Lexer.lexer(s, g=OmniGrammar()) for t in lex: # print(f'yields: {t}') tokens.append(t) return tokens
def test_send(self): s = "One Two Three" tokens = Lexer.lexer(s) for t in tokens: if t == "Two": tokens.send(t) # return the Token to the generator break self.assertEqual("Two", next(tokens))
def get_tokens(s): tokens = list() g = PVLGrammar() g.comments = (('/*', '*/'), ('#', '\n')) lex = Lexer.lexer(s, g=g) for t in lex: # print(f'yields: {t}') tokens.append(t) return tokens
def get_tokens(s): tokens = list() g = PVLGrammar() g.comments = (("/*", "*/"), ("#", "\n")) lex = Lexer.lexer(s, g=g) for t in lex: # print(f'yields: {t}') tokens.append(t) return tokens
def test_lexer_recurse(self): def foo(tokens): two = list() for t in tokens: if t == "f": break two.append(t) return two lex = Lexer.lexer("a b c d e f g h") one = list() for t in lex: if t == "c": two = foo(lex) else: one.append(t) self.assertEqual(["a", "b", "g", "h"], one) self.assertEqual(["d", "e"], two)
def test_lexer_recurse(self): def foo(tokens): two = list() for t in tokens: if t == 'f': break two.append(t) return two lex = Lexer.lexer('a b c d e f g h') one = list() for t in lex: if t == 'c': two = foo(lex) else: one.append(t) self.assertEqual(['a', 'b', 'g', 'h'], one) self.assertEqual(['d', 'e'], two)