Example #1
0
 def get_result(self, test):
     result = ''
     tokenizer = Tokenizer(text=test)
     try:
         while (token := tokenizer.next()).type != TokenType.EOF:
             result += str(token) + '\n'
         return result[:-1]
Example #2
0
class CLI:
    def __init__(self):
        self.tokenizer = Tokenizer()

    def run(self):
        while True:
            instruction = input("> ")
            self.tokenizer.tokenize(instruction)
            tokens = self.tokenizer.tokens
            for token in tokens:
                print(token, end=' ')
            print("")
Example #3
0
 def get_result(self, test):
     tokenizer = Tokenizer(text=test)
     parser = Parser(tokenizer)
     semantics = SemanticAnalyzer(parser, False)
     try:
         semantics.analyze()
     except Error as e:
         return e.message
Example #4
0
 def get_result(self, test):
     tokenizer = Tokenizer(text=test)
     parser = Parser(tokenizer)
     try:
         tree = parser.parse()
         return AstVizGen(tree).return_str_tree()
     except Error as e:
         return e.message
Example #5
0
 def get_result(self, test):
     tokenizer = Tokenizer(text=test)
     parser = Parser(tokenizer)
     interpreter = Interpreter(parser)
     try:
         res = str(interpreter.calc())
         return res
     except Error as e:
         return e.message
Example #6
0
 def print_tokens(self):
     tokenizer = Tokenizer(self.file)
     while (token := tokenizer.next()).type != TokenType.EOF:
         print(token)
Example #7
0
 def check_semantic(self):
     tokenizer = Tokenizer(self.file)
     parser = Parser(tokenizer)
     semantic = SemanticAnalyzer(parser, True)
     semantic.analyze()
Example #8
0
 def print_ast(self):
     tokenizer = Tokenizer(self.file)
     parser = Parser(tokenizer)
     tree = parser.parse()
     AstVizGen(tree).generate()
Example #9
0
 def calculate(self):
     tokenizer = Tokenizer(self.file)
     parser = Parser(tokenizer)
     interpreter = Interpreter(parser)
     print(interpreter.calc())
Example #10
0
 def __init__(self):
     self.tokenizer = Tokenizer()