def scan_file(fname, test_name): print("-----------------------------Running " + test_name + "-----------------------------") # Read the file with open(os.path.join(dir, fname)) as f: code = f.read() # Lex the source code tokens = scanner.input(code) # Initialize the code generator codegen = CodeGen() parser = ProjectParser() global ast ast = parser.input(tokens) ast.eval() print("--------------------Declarations--------------------") for scope, item, line, item_type in declarations: print("%s: declare \"%s\", %s %s" % (line, item, scope, str(item_type))) print("--------------------Variable Usage--------------------") for scope, item, use_line, item_type, declared_line in usage: print("%s: use \"%s\", %s %s declared on %s" % (use_line, item, scope, str(item_type), declared_line)) print("--------------------Errors--------------------") for item in errors: print(item) # print("--------------------Standard Output--------------------") # for item in print_queue: # print(item) print("--------------------Generating IR--------------------") ast.ir_eval(codegen.module, codegen.builder, codegen.printf) codegen.create_ir() save_name = fname[0:-4] codegen.save_ir(save_name + ".ll") print("--------------------Compiling IR--------------------") os.system("llc -filetype=obj %s.ll" % save_name) os.system("gcc %s.o -static -o output" % save_name) print("--------------------Standard Output--------------------") os.system("./output") ast = None
def main(filename, output='output'): with open(filename) as f: text_input = f.read() lexer = Lexer().get_lexer() tokens = lexer.lex(text_input) codegen = CodeGen() module = codegen.module builder = codegen.builder printf = codegen.printf pg = Parser(module, builder, printf) pg.parse() parser = pg.get_parser() parser.parse(tokens).eval() codegen.create_ir() codegen.save_ir("{}.ll".format(output))
from lexer import Lexer from parser import Parser from codegen import CodeGen import sys print sys.argv[1] fname, extension = sys.argv[1].split(".") with open(fname + "." + extension) as f: text_input = f.read() lexer = Lexer().get_lexer() tokens = lexer.lex(text_input) codegen = CodeGen() module = codegen.module builder = codegen.builder printf = codegen.printf pg = Parser(module, builder, printf) pg.parse() parser = pg.get_parser() parser.parse(tokens).eval() codegen.create_ir() codegen.save_ir(fname + ".ll")