Пример #1
0
    def compile_code(code, output_type="exe", compiler_opts=CompilerOptions()):
        """
        supported_output_types = [
            "exe",
            "ll",
            "wasm",
            "ast",
            "sema",
            "lowered_ast",
            "tokens",
        ]
        """

        if output_type == "tokens":
            tokens = Lexer(code, compiler_opts).lex()
            result = json_dumps(tokens)

        elif output_type == "ast":
            ast = Parser.from_code(code, compiler_opts).parse()
            result = json_dumps(ast)

        elif output_type == "sema":
            tokens = Lexer(code, compiler_opts).lex()
            ast = Parser(tokens, compiler_opts).parse()
            semantic_info = SemanticAnalyzer(ast, tokens,
                                             compiler_opts).analyze()
            result = json_dumps(semantic_info)

        elif output_type == "ll":
            compiler_opts.target_code = "llvm"
            tokens = Lexer(code, compiler_opts).lex()
            ast = Parser(tokens, compiler_opts).parse()
            semantic_info = SemanticAnalyzer(ast, tokens,
                                             compiler_opts).analyze()
            llvm = LLVMCodegen(ast, semantic_info).generate()
            result = llvm.dumps()

        elif output_type == "wasm":
            compiler_opts.target_code = "wasm"
            tokens = Lexer(code, compiler_opts).lex()
            ast = Parser(tokens, compiler_opts).parse()
            semantic_info = SemanticAnalyzer(ast, tokens,
                                             compiler_opts).analyze()
            result = json_dumps(semantic_info)

        else:
            click.echo("Unimplemented Output Type!")
            return

        click.echo(result)
Пример #2
0
    def compile(self):
        tknizer = Tokenizer(self.input_code)
        parser = Parser(tknizer.run())
        transformer = Transformer(parser.run())
        code_generator = CodeGenerator(transformer.run())

        return code_generator.run()
Пример #3
0
 def test_parser_on_inputfile(self):
     path = self.__get_path("example3.nl")
     with open(path, "r") as f:
         parser = Parser(inFile=f, debug=0)
     expected_nodes = Node("compilation_unit", [
         Node("external_declaration", [
             Node("declaration", [
                 Node("declaration_specifier", [Node("type")]),
                 Node("init_declarator_list", [
                     Node("init_declarator", [
                         Node("declarator", [Node("x")]),
                         Node("="),
                         Node("assignment_expression", [
                             Node("math_expression", [
                                 Node("postfix_expression", [
                                     Node("primary_expression", [Node("4")])
                                 ])
                             ])
                         ])
                     ])
                 ]),
                 Node(";")
             ])
         ])
     ])
     nodes = parser.getNodes()
     print(nodes)
Пример #4
0
def main():
    parser = Parser()
    with open('../doc/programs/gcd.txt') as f:
        parser.load_program(f.read())
        pcodes = parser.analyze()
        if pcodes:
            interpreter = Interpreter()
            interpreter.interpret(parser.pcode.get())
Пример #5
0
 def compile(chunk, chunk_name):
     parser = Parser()
     lexer = Lexer(chunk, chunk_name)
     ast = parser.parse_block(lexer)
     # print(ast)
     proto = Codegen.gen_proto(ast)
     # proto.print_code()
     LuaCompiler.set_source(proto, chunk_name)
     return proto
Пример #6
0
def run_typecheck_test(test, compiler: Compiler) -> bool:
    astparser = Parser()
    ast = compiler.parse(test, astparser)
    if len(astparser.errors) > 0:
        return False
    tc = TypeChecker()
    compiler.visit(ast, tc)
    ast_json = ast.toJSON()
    with test.with_suffix(".py.ast.typed").open("r") as f:
        correct_json = json.load(f)
        return ast_equals(ast_json, correct_json)
Пример #7
0
def api_parser():
    s = StringIO()
    t = StringIO()
    with redirect_stdout(s), redirect_stderr(t):
        parser = Parser()
        program = request.form['code'].strip()
        parser.load_program(program)
        parser.analyze()
    if t.getvalue() != '':
        return t.getvalue()
    else:
        return s.getvalue()
Пример #8
0
def run():
    lexer = Lexer('primeiro_portugolo.ptgl')
    parser = Parser(lexer)
    # token = lexer.next_token()
    #
    # while token and token.tag != Tag.END_OF_FILE:
    #     print(str(token))
    #     token = lexer.next_token()
    #
    # print("\n\n\nSymbol Table:")
    # lexer.print_symbol_table()

    parser.compilador()
Пример #9
0
    def __init__(self, path, file):
        # collect all results
        results = []
        # we know exactly the structure of the document
        with open(os.path.join(path, file), "r") as file:
            # first line are dimensions and number of definitions and programs
            self.rows, self.cols, self.d, self.e = file.readline().split(" ")
            self.rows = int(self.rows)
            self.cols = int(self.cols)

            # read the labyrinth
            labyrinth = []
            for i in range(int(self.rows)):
                # -1 removes the ending \n
                labyrinth.append(list(file.readline().rstrip()))
            self.labyrinth = labyrinth

            # create symbol table and save procedure definitions
            proc_defs = SymbolTable()
            for i in range(int(self.d)):
                p = Parser(file.readline().rstrip())
                name, program = p.proc_def()
                simplify(program)
                proc_defs.add_definition(name, program)

            # run and interpret different programs
            for i in range(int(self.e)):
                x, y, o = file.readline().rstrip().split(" ")
                p = Parser(file.readline().rstrip())
                ast = p.program()
                simplify(ast)
                # -1 since the indexes start at 1
                results.append(
                    self.interpret_labyrinth_for_program(
                        labyrinth,
                        int(x) - 1,
                        int(y) - 1, o, ast, proc_defs))
        self.results = results
Пример #10
0
    def compile(self, data):
        machine = self.find_machine(self.options)

        ast = Parser(data, errors=self.errors).run()
        VarCheck(ast, machine.builtins, errors=self.errors).run()
        Flatten(ast, errors=self.errors).run()
        Reduce(ast, errors=self.errors).run()
        TailRecursion(ast, errors=self.errors).run()
        Inline(ast, errors=self.errors).run()
        for f in ast.symbol_table.symbols.values():
            cfg = f.cfg
            RegisterAllocation(f.cfg, errors=self.errors).run()

        lines = Linearise(ast, errors=self.errors).run()
        output = Render(lines, machine, errors=self.errors).run()
        return output
Пример #11
0
def main():
    with open('example.gg', 'r') as f:
        text_input = f.read()

    lexer = Lexer().get_lexer()
    tokens = lexer.lex(text_input)

    cg = CodeGen()
    pg = Parser(cg)
    pg.parse()
    parser = pg.get_parser()
    parser.parse(tokens, state=ParserState()).generate()

    cg.create_ir()
    cg.save_ir('output/output.ll')
    print(cg.run(False))
Пример #12
0
def run_parse_test(test, compiler: Compiler, bad=True) -> bool:
    # if bad=True, then test cases prefixed with bad are expected to fail
    astparser = Parser()
    ast = compiler.parse(test, astparser)
    # check that parsing error exists
    if bad and test.name.startswith("bad"):
        return len(astparser.errors) > 0
    if len(astparser.errors) > 0:
        return False
    ast_json = ast.toJSON()
    try:
        with test.with_suffix(".py.ast").open("r") as f:
            correct_json = json.load(f)
            return ast_equals(ast_json, correct_json)
    except:
        with test.with_suffix(".py.ast.typed").open("r") as f:
            correct_json = json.load(f)
            return ast_equals(ast_json, correct_json)
Пример #13
0
    def _compile_script(self, input_stream: io.IOBase
                        , interpreter: typing.Optional[Interpreter] = None
                        , source_location: typing.Optional[SourceLocation] = None) \
        -> typing.Tuple[SourceLocation, Executable]:
        scanner = Scanner(input_stream)
        parser = Parser(scanner)
        bytecode_generator = BytecodeGenerator(parser)
        source_location2 = scanner.get_source_location()

        try:
            executable = bytecode_generator.get_executable()
        except CompilerError as error:
            message = "" if interpreter is None \
                         else self._get_stack_trace_string(interpreter, source_location)
            message += "compilation error: " + str(error)
            raise SystemExit(message)

        return source_location2, executable
Пример #14
0
def compile_il():
	file = open("test.il", "r")
	file_contents = file.read()
	file.close()
	
	tokens = lexer.lex(file_contents)
	
	parser = Parser(tokens)
	ast = parser.parse()
	funcs = parser.get_func_list()
	#print_ast(ast)
	
	cg = CodeGen(ast, funcs)
	bytes = cg.generate()
	
	output = open("test.ix", "bw")
	output.write(bytes)
	output.close()
Пример #15
0
 def _test_parse_impl(self, buf, expected_exprs):
     lexer = Lexer(buf)
     tokens = list()
     while True:
         tok = lexer.lex_token()
         tokens.append(tok)
         if tok.type == TokenType.EOF:
             break
     self.parser = Parser(tokens)
     while True:
         expr = self.parser.parse_top_level_expr()
         if expr is None:
             break
         self.exprs.append(expr)
     for e in self.exprs:
         print(e)
     self.assertEqual(len(self.exprs), len(expected_exprs))
     for e, exp in zip(self.exprs, expected_exprs):
         self.assertEqual(e, exp)
Пример #16
0
    def compile(self, source):
        scanner = Scanner(source)

        parser = Parser(scanner)
        program = parser.parse()

        tree_to_json = TreeToJson()
        obj = program.accept(tree_to_json)
        print(obj)

        logger.ACTIVE = True
        logger.DEBUG = False

        # scope_analyzer = ScopeAnalyzer()

        program_copy = copy.deepcopy(program)

        # program_copy.accept(scope_analyzer)

        linear_generator = LinearGenerator()
        code = program.accept(linear_generator)

        return code
Пример #17
0
debug_enabled = False
files = sys.argv[1:]

if sys.argv[1] == '--debug':
    debug_enabled = True
    files = files[1:]

generated = ""
for filename in files:
    tokenizer = Tokenizer(open(filename).read())
    tokens = tokenizer.tokenize()
    if debug_enabled:
        print('>>> parsed tokens:\n%s\n' % list(map(lambda x: x.value, tokens)))

    parser = Parser(tokens)
    tree = parser.parse()
    if debug_enabled:
        print('>>> parse tree:\n%s\n' % tree)

    generator = Generator(tree)
    generated = generated + '\n' + generator.generate(tree)

if debug_enabled:
    print('>>> generated code:\n%s' % generated)
    exit()

RUNTIME = """
function add(x, y) { return x + y; }
function subtract(x, y) { return x - y; }
function multiply(x, y) { return x * y; }
Пример #18
0
    traceback.print_exc()
finally:
    print("\n\nCompile log:")

codegen = CodeGen()
module = codegen.module
builder = codegen.builder
printf = codegen.printf

SymbolTable = ParserState()
syntaxRoot: Node
semanticRoot = Node("main")
has_errors = False
try:
    Parser(module, builder,
           printf).build().parse(copy(tokens),
                                 state=SymbolTable).eval(semanticRoot)
except (BaseException, Exception) as e:
    # traceback.print_exc()
    print('Error occurred: %s' % e)
    has_errors = True
finally:
    write(semanticRoot, "SemanticAnalyzer")

    codegen.create_ir()
    codegen.save_ir("output.ll")

    if not has_errors:
        print('Compile complete without errors')
    else:
        print('Compile complete with errors!')
Пример #19
0
import argparse

from graphviz import Digraph

import compiler.tree_printer
from compiler.lexer import Lexer
from compiler.names import Scope
from compiler.parser import Parser

lexer = Lexer()
lexer.build()
parser = Parser(lexer.tokens)
parser.build()
scope = Scope()


def print_tokens(code):
    lexer.lexer.input(code)
    tok = lexer.lexer.token()
    while tok:
        print(tok)
        tok = lexer.lexer.token()


def run(code, opt, ast_file_name=None, repl_mode=False):
    if repl_mode and code[-1] != ";":
        code += ";"

    res = parser.parse(lexer, code)

    if res is not None:
Пример #20
0
from compiler.lexer import Lexer
from compiler.parser import Parser
from compiler.codegen import LLVMCodegenVisitor

code = '''
def add(a: int, b: int) -> int:
    return a + b
'''

tokens = Lexer(code).lex()
ast = Parser(tokens).parse()
module = LLVMCodegenVisitor(ast).start_visit()

print(module)
Пример #21
0
def test_parser(chunk, chunkname):
    parser = Parser()
    lexer = Lexer(chunk, chunkname)
    ast = parser.parse_block(lexer)
    print(ast)
Пример #22
0
        print "compile options:"
        print "  no_reuse      : prevent register resuse"
        print "  no_concurrent : prevent concurrency"
        print
        print "tool options:"
        print "  iverilog      : compiles using the icarus verilog compiler"
        print "  run           : runs compiled code, used with ghdl or modelsimoptions"
        sys.exit(-1)

    #parse command line
    input_file = sys.argv[-1]
    reuse = "no_reuse" not in sys.argv

    try:
        # realloc is false because we want to do loop unrolling so a optimisation doesn't make any sense
        parser = Parser(input_file, False)
        process = parser.parse_process()

        # just for testing
        if True:
            unrollTypeForForStatement = Unroller.getUnrollTypeOfForStatement(
                process.main.statement.statements[1])

            if unrollTypeForForStatement == Unroller.EnumForStatementUnrollType.A:
                unrollFactor = 4

                tempForStatement = process.main.statement.statements[1]

                Unroller.unrollForStatementTypeA(unrollFactor,
                                                 tempForStatement)
Пример #23
0
"""

lexer = Lexer().build()  # Build the lexer using LexerGenerator
tokens: LexerStream
try:
    tokens = lexer.lex(call_declared_functions)  # Stream the input to analysis the lexical syntax
    tokenType = map(lambda x: x.gettokentype(), copy(tokens))
    tokenName = map(lambda x: x.getstr(), copy(tokens))
    pprint(list(copy(tokens)))
    # pprint(list(copy(tokenType)))
    # pprint(list(copy(tokenName)))
except (BaseException, Exception):
    traceback.print_exc()
finally:
    print("Finish lexical analysis !")

SymbolTable = ParserState()
syntaxRoot: Node
semanticRoot = Node("main")
try:
    syntaxRoot = Node("main", Parser(syntax=True).build().parse(copy(tokens), state=SymbolTable))  # Get syntax tree !
    Parser().build().parse(copy(tokens), state=SymbolTable).eval(semanticRoot)  # Get semantic tree !
except (BaseException, Exception):
    traceback.print_exc()
finally:
    write(syntaxRoot, "SyntaxAnalyzer")
    write(semanticRoot, "SemanticAnalyzer")
    print("------------------------------Declared Variables & Functions are:------------------------------")
    pprint(SymbolTable.variables)
    pprint(SymbolTable.functions)
Пример #24
0
def main():
    parser = argparse.ArgumentParser(description='Chocopy frontend')
    parser.add_argument('-t',
                        dest='typecheck',
                        action='store_false',
                        help='do not typecheck the AST')
    parser.add_argument('-o',
                        dest='output',
                        action='store_false',
                        help="output AST to stdout instead of to a JSON file")
    parser.add_argument('--test-all',
                        dest='testall',
                        action='store_true',
                        help="run all test cases")
    parser.add_argument('--test-parse',
                        dest='testparse',
                        action='store_true',
                        help="run parser test cases")
    parser.add_argument('--test-tc',
                        dest='testtc',
                        action='store_true',
                        help="run typechecker test cases")
    parser.add_argument('infile', nargs='?', type=str, default=None)
    parser.add_argument('outfile', nargs='?', type=str, default=None)
    args = parser.parse_args()

    compiler = Compiler()

    if args.testall:
        run_all_tests(compiler)
        return

    if args.testparse:
        run_parse_tests(compiler)
        return

    if args.testtc:
        run_typecheck_tests(compiler)
        return

    infile = args.infile
    outfile = args.outfile
    if args.infile == None:
        print("Error: must specify input file")
        parser.print_help()
        return

    if args.outfile is None:
        if args.typecheck:
            outfile = infile + ".ast.typed"
        else:
            outfile = infile + ".ast"

    astparser = Parser()
    tree = compiler.parse(infile, astparser)

    if len(astparser.errors) > 0:
        for e in astparser.errors:
            print(e)
    elif args.typecheck:
        tc = TypeChecker()
        compiler.visit(tree, tc)
        if len(tc.errors) > 0:
            for e in astparser.errors:
                print(e)

    if args.output:
        ast_json = tree.toJSON()
        with open(outfile, "w") as f:
            json.dump(ast_json, f)
    else:
        if isinstance(tree, Node):
            print(json.dumps(tree.toJSON()))