def get_result(self, test): result = '' tokenizer = Tokenizer(text=test) try: while (token := tokenizer.next()).type != TokenType.EOF: result += str(token) + '\n' return result[:-1]
def print_tokens(self): tokenizer = Tokenizer(self.file) while (token := tokenizer.next()).type != TokenType.EOF: print(token)