Пример #1
0
 def get_result(self, test):
     result = ''
     tokenizer = Tokenizer(text=test)
     try:
         while (token := tokenizer.next()).type != TokenType.EOF:
             result += str(token) + '\n'
         return result[:-1]
Пример #2
0
 def print_tokens(self):
     tokenizer = Tokenizer(self.file)
     while (token := tokenizer.next()).type != TokenType.EOF:
         print(token)