def main(argv): inputFile = FileStream(argv[1]) lexer = CLexer(inputFile) stream = CommonTokenStream(lexer) parser = CParser(stream) tree = parser.prog() if parser.getNumberOfSyntaxErrors(): return # Visualise parse tree parseTreeDotGen = ParseTreeDotGenerator() parseTreeDotGen.generateDOT(parser, tree, "output/parse_tree.gv", render=False) # Build AST astBuilder = ASTBuilder() AST = astBuilder.visit(tree) # Semantic Validation semanticValidator = SemanticValidator() AST.accept(semanticValidator) # Print errors, if any if semanticValidator.errors: for error in semanticValidator.errors: print("ERROR: " + error) return # Code optimiser optimiser = Optimiser(semanticValidator.symbolTable) AST.accept(optimiser) # Print warnings, if any if optimiser.warnings: for warning in optimiser.warnings: print("WARNING: " + warning) # Visualise AST dotGraph = AST.visit(DotGraphBuilder) dotGraph.render("output/ast.gv", view=False) # Code generator codeGenerator = None if 2 <= len(argv) - 1: codeGenerator = CodeGenerator(optimiser.symbolTable, argv[2]) else: codeGenerator = CodeGenerator(optimiser.symbolTable) AST.accept(codeGenerator)
def semanticAnalyse(self, file): lexer = CLexer(FileStream(os.path.dirname(os.path.abspath(__file__)) + "/" + file)) stream = CommonTokenStream(lexer) parser = CParser(stream) tree = parser.prog() astBuilder = ASTBuilder() AST = astBuilder.visit(tree) semanticValidator = SemanticValidator() AST.accept(semanticValidator) return semanticValidator.errors
def semanticAnalyse(self, file): lexer = CLexer( FileStream( os.path.dirname(os.path.abspath(__file__)) + "/" + file)) stream = CommonTokenStream(lexer) parser = CParser(stream) tree = parser.prog() astBuilder = ASTBuilder() AST = astBuilder.visit(tree) oldAST = deepcopy(AST) semanticValidator = SemanticValidator() AST.accept(semanticValidator) optimiser = Optimiser(semanticValidator.symbolTable) AST.accept(optimiser) return optimiser.warnings, oldAST, AST