import sys from parser import MyParser from check_and_eval import typecheck_expression, evaluate_expression from term_defs import Seq if __name__ == "__main__": parser = MyParser() print("Parser setup finished") for arg in sys.argv[1:]: print("Interpreting file: ", str(arg)) with open(arg, 'r') as myfile: data=myfile.read() print(data) parsed_data = parser.parse(data) print(parsed_data) for expression in parsed_data: print("") _type = typecheck_expression(expression, dict()) print("Expression:") print(" ", expression) print("") print("Typecheck:") print(" ", _type) print("") _eval = evaluate_expression(expression) print("Evaluation:") print(" ", _eval) print("")
from lexer import MyLexer from parser import MyParser if __name__ == "__main__": lexer = MyLexer() parser = MyParser() while True: text = input('our language > ') if text: lex = lexer.tokenize(text) #for token in lex: # print(token) tree = parser.parse(lex) print(tree)
_mylexer = MyLexer() _myparser = MyParser() if len(sys.argv) > 1: _file = sys.argv[1] _cool_program = open(_file, encoding="utf-8").read() try: _mylexer_result = _mylexer.tokenize(_cool_program) except: pass if _mylexer.errors: print(_mylexer.errors[0]) exit(1) try: myAst = _myparser.parse(_cool_program) except: pass if _myparser.errors: print(_myparser.errors[0]) exit(1) # SemanticTODO semantic_analyzer = MySemanticAnalyzer(myAst) context, scope = semantic_analyzer.analyze() for e in semantic_analyzer.errors: print(e) exit(1)