Example #1
0
 def get_result(self, test):
     result = ''
     tokenizer = Tokenizer(text=test)
     try:
         while (token := tokenizer.next()).type != TokenType.EOF:
             result += str(token) + '\n'
         return result[:-1]
Example #2
0
 def get_result(self, test):
     tokenizer = Tokenizer(text=test)
     parser = Parser(tokenizer)
     semantics = SemanticAnalyzer(parser, False)
     try:
         semantics.analyze()
     except Error as e:
         return e.message
Example #3
0
 def get_result(self, test):
     tokenizer = Tokenizer(text=test)
     parser = Parser(tokenizer)
     try:
         tree = parser.parse()
         return AstVizGen(tree).return_str_tree()
     except Error as e:
         return e.message
Example #4
0
 def get_result(self, test):
     tokenizer = Tokenizer(text=test)
     parser = Parser(tokenizer)
     interpreter = Interpreter(parser)
     try:
         res = str(interpreter.calc())
         return res
     except Error as e:
         return e.message
Example #5
0
 def print_tokens(self):
     tokenizer = Tokenizer(self.file)
     while (token := tokenizer.next()).type != TokenType.EOF:
         print(token)
Example #6
0
 def check_semantic(self):
     tokenizer = Tokenizer(self.file)
     parser = Parser(tokenizer)
     semantic = SemanticAnalyzer(parser, True)
     semantic.analyze()
Example #7
0
 def print_ast(self):
     tokenizer = Tokenizer(self.file)
     parser = Parser(tokenizer)
     tree = parser.parse()
     AstVizGen(tree).generate()
Example #8
0
 def calculate(self):
     tokenizer = Tokenizer(self.file)
     parser = Parser(tokenizer)
     interpreter = Interpreter(parser)
     print(interpreter.calc())