def get_result(self, test): result = '' tokenizer = Tokenizer(text=test) try: while (token := tokenizer.next()).type != TokenType.EOF: result += str(token) + '\n' return result[:-1]
class CLI: def __init__(self): self.tokenizer = Tokenizer() def run(self): while True: instruction = input("> ") self.tokenizer.tokenize(instruction) tokens = self.tokenizer.tokens for token in tokens: print(token, end=' ') print("")
def get_result(self, test): tokenizer = Tokenizer(text=test) parser = Parser(tokenizer) semantics = SemanticAnalyzer(parser, False) try: semantics.analyze() except Error as e: return e.message
def get_result(self, test): tokenizer = Tokenizer(text=test) parser = Parser(tokenizer) try: tree = parser.parse() return AstVizGen(tree).return_str_tree() except Error as e: return e.message
def get_result(self, test): tokenizer = Tokenizer(text=test) parser = Parser(tokenizer) interpreter = Interpreter(parser) try: res = str(interpreter.calc()) return res except Error as e: return e.message
def print_tokens(self): tokenizer = Tokenizer(self.file) while (token := tokenizer.next()).type != TokenType.EOF: print(token)
def check_semantic(self): tokenizer = Tokenizer(self.file) parser = Parser(tokenizer) semantic = SemanticAnalyzer(parser, True) semantic.analyze()
def print_ast(self): tokenizer = Tokenizer(self.file) parser = Parser(tokenizer) tree = parser.parse() AstVizGen(tree).generate()
def calculate(self): tokenizer = Tokenizer(self.file) parser = Parser(tokenizer) interpreter = Interpreter(parser) print(interpreter.calc())
def __init__(self): self.tokenizer = Tokenizer()