def main(): with open('example.gg', 'r') as f: text_input = f.read() lexer = Lexer().get_lexer() tokens = lexer.lex(text_input) cg = CodeGen() pg = Parser(cg) pg.parse() parser = pg.get_parser() parser.parse(tokens, state=ParserState()).generate() cg.create_ir() cg.save_ir('output/output.ll') print(cg.run(False))
if (i > 0) { print("-> Call User Defined Function !"); userDefined(); } else { print(); print("Input value equal to or less than 0 !"); } } main(); """ lexer = Lexer().build() # Build the lexer using LexerGenerator tokens: LexerStream try: tokens = lexer.lex(call_declared_functions) # Stream the input to analysis the lexical syntax tokenType = map(lambda x: x.gettokentype(), copy(tokens)) tokenName = map(lambda x: x.getstr(), copy(tokens)) pprint(list(copy(tokens))) # pprint(list(copy(tokenType))) # pprint(list(copy(tokenName))) except (BaseException, Exception): traceback.print_exc() finally: print("Finish lexical analysis !") SymbolTable = ParserState() syntaxRoot: Node semanticRoot = Node("main") try: syntaxRoot = Node("main", Parser(syntax=True).build().parse(copy(tokens), state=SymbolTable)) # Get syntax tree !
from compiler.lexer import Lexer from compiler.parser import Parser, ParserState from compiler.JSONparsedTree import Node, write from compiler.codegen import CodeGen from rply.lexer import LexerStream from copy import copy from pprint import pprint import traceback import json input_file = open('input.code').read() lexer = Lexer().build() tokens: LexerStream try: tokens = lexer.lex(input_file) tokenType = map(lambda x: x.gettokentype(), copy(tokens)) tokenName = map(lambda x: x.getstr(), copy(tokens)) pprint(list(copy(tokens))) except (BaseException, Exception): traceback.print_exc() finally: print("\n\nCompile log:") codegen = CodeGen() module = codegen.module builder = codegen.builder printf = codegen.printf SymbolTable = ParserState() syntaxRoot: Node
from compiler.lexer import (Lexer) if __name__ == '__main__': lexer = Lexer('hello world') print(lexer.lex())