def test_and_expr_function_parses_and_expression_successfully(): # TODO: More complex examples result0 = Parser.from_code("5²").and_expr() result1 = Parser.from_code("0xff&3&4").and_expr() result2 = Parser.from_code("5&5>>3").and_expr() result3 = Parser.from_code("5²<<-4&-5-+3").and_expr() assert result0 == UnaryExpr(Integer(0), Operator(1)) assert result1 == BinaryExpr( BinaryExpr(Integer(0), Operator(1), Integer(2)), Operator(3), Integer(4) ) assert result2 == BinaryExpr( Integer(0), Operator(1), BinaryExpr(Integer(2), Operator(3), Integer(4)) ) assert result3 == BinaryExpr( BinaryExpr( UnaryExpr(Integer(0), Operator(1)), Operator(2), UnaryExpr(Integer(4), Operator(3)), ), Operator(5), BinaryExpr( UnaryExpr(Integer(7), Operator(6)), Operator(8), UnaryExpr(Integer(10), Operator(9)) ) )
def test_parser_on_inputfile(self): path = self.__get_path("example3.nl") with open(path, "r") as f: parser = Parser(inFile=f, debug=0) expected_nodes = Node("compilation_unit", [ Node("external_declaration", [ Node("declaration", [ Node("declaration_specifier", [Node("type")]), Node("init_declarator_list", [ Node("init_declarator", [ Node("declarator", [Node("x")]), Node("="), Node("assignment_expression", [ Node("math_expression", [ Node("postfix_expression", [ Node("primary_expression", [Node("4")]) ]) ]) ]) ]) ]), Node(";") ]) ]) ]) nodes = parser.getNodes() print(nodes)
def test_or_expr_function_parses_or_expression_successfully(): # TODO: More complex examples result0 = Parser.from_code("5²").or_expr() result1 = Parser.from_code("0xff|3|4").or_expr() result2 = Parser.from_code("5|5||3").or_expr() result3 = Parser.from_code("5²||-4|-5<<+3").or_expr() assert result0 == UnaryExpr(Integer(0), Operator(1)) assert result1 == BinaryExpr( BinaryExpr(Integer(0), Operator(1), Integer(2)), Operator(3), Integer(4) ) assert result2 == BinaryExpr( Integer(0), Operator(1), BinaryExpr(Integer(2), Operator(3), Integer(4)) ) assert result3 == BinaryExpr( BinaryExpr( UnaryExpr(Integer(0), Operator(1)), Operator(2), UnaryExpr(Integer(4), Operator(3)), ), Operator(5), BinaryExpr( UnaryExpr(Integer(7), Operator(6)), Operator(8), UnaryExpr(Integer(10), Operator(9)) ) )
def compile(self): tknizer = Tokenizer(self.input_code) parser = Parser(tknizer.run()) transformer = Transformer(parser.run()) code_generator = CodeGenerator(transformer.run()) return code_generator.run()
def test_shift_expr_function_parses_shift_expression_successfully(): # TODO: More complex examples result0 = Parser.from_code("5²").shift_expr() result1 = Parser.from_code("0xff>>3<<4").shift_expr() result2 = Parser.from_code("5>>5*3").shift_expr() result3 = Parser.from_code("5²+-4<<-5*+3").shift_expr() assert result0 == UnaryExpr(Integer(0), Operator(1)) assert result1 == BinaryExpr( BinaryExpr(Integer(0), Operator(1), Integer(2)), Operator(3), Integer(4) ) assert result2 == BinaryExpr( Integer(0), Operator(1), BinaryExpr(Integer(2), Operator(3), Integer(4)) ) assert result3 == BinaryExpr( BinaryExpr( UnaryExpr(Integer(0), Operator(1)), Operator(2), UnaryExpr(Integer(4), Operator(3)), ), Operator(5), BinaryExpr( UnaryExpr(Integer(7), Operator(6)), Operator(8), UnaryExpr(Integer(10), Operator(9)) ) )
def main(): parser = Parser() with open('../doc/programs/gcd.txt') as f: parser.load_program(f.read()) pcodes = parser.analyze() if pcodes: interpreter = Interpreter() interpreter.interpret(parser.pcode.get())
def test_compile(program, grammar, start_sym, out): from compiler.parser import Parser try: p = Parser(program, grammar, start_sym, out) p.ll_compiler() except: print traceback.format_exc()
def test_program(self): t1 = None with open( self.input_file, 'w') as f: f.write('BEGIN A := BB + 314 + A; END$') with open(self.input_file, 'r') as f: p = Parser(f) p.program() os.remove(self.input_file)
def test_newline_parses_newlines_successfully(): result0 = Parser.from_code("\n").newline() result1 = Parser.from_code("\r\n").newline() result2 = Parser.from_code("\r").newline() assert result0 == Newline(0) assert result1 == Newline(0) assert result2 == Newline(0)
def interpret_calculation(self, calculation, expected=0): tokenizer = Tokenizer() tokens = tokenizer.tokenize('var i = ' + calculation) parser = Parser() ast = parser.parse(tokens) interpreter = Interpreter() interpreter.interpret(ast) self.assertEqual(interpreter.vars['i'], expected)
def compile(chunk, chunk_name): parser = Parser() lexer = Lexer(chunk, chunk_name) ast = parser.parse_block(lexer) # print(ast) proto = Codegen.gen_proto(ast) # proto.print_code() LuaCompiler.set_source(proto, chunk_name) return proto
def test_prefixed_string_parses_prefixed_string_literals_successfully(): result0 = Parser.from_code("u'hello\t there'").prefixed_string() result1 = Parser.from_code('rf" This is a new world"').prefixed_string() result2 = Parser.from_code("r'''\n This is a new world\n'''").prefixed_string() result3 = Parser.from_code('rf"""\n This is a new world\n"""').prefixed_string() assert result0 == PrefixedString(0) assert result1 == PrefixedString(0) assert result2 == PrefixedString(0) assert result3 == PrefixedString(0)
def test_byte_string_parses_byte_string_literals_successfully(): result0 = Parser.from_code("b'hello\t there'").byte_string() result1 = Parser.from_code('rb" This is a new world"').byte_string() result2 = Parser.from_code("b'''\n This is a new world\n'''").byte_string() result3 = Parser.from_code('rb"""\n This is a new world\n"""').byte_string() assert result0 == ByteString(0) assert result1 == ByteString(0) assert result2 == ByteString(0) assert result3 == ByteString(0)
def test_not_test_function_parses_not_test_successfully(): # TODO: More complex examples result0 = Parser.from_code("5²").not_test() result1 = Parser.from_code("0xff&3&4").not_test() result2 = Parser.from_code("5&5>>3").not_test() result3 = Parser.from_code("5²<<-4&-5-+3").not_test() print('', result0, result1, result2, result3, sep="\n\n>>>> ") assert result0 == UnaryExpr(Integer(0), Operator(1))
def test_integer_parses_integer_literals_successfully(): result0 = Parser.from_code("5_000").integer() result1 = Parser.from_code("0001").integer() result2 = Parser.from_code("0b11_00").integer() result3 = Parser.from_code("0o217").integer() result4 = Parser.from_code("0xffEE_210").integer() assert result0 == Integer(0) assert result1 == Integer(0) assert result2 == Integer(0) assert result3 == Integer(0) assert result4 == Integer(0)
def test_imag_float_parses_imaginary_float_literals_successfully(): result0 = Parser.from_code(".05im").imag_float() result1 = Parser.from_code("0.0_55im").imag_float() result2 = Parser.from_code("1_00.00_50im").imag_float() result3 = Parser.from_code("1.e-5_00im").imag_float() result4 = Parser.from_code("1_00.1_00e-1_00im").imag_float() assert result0 == ImagFloat(0) assert result1 == ImagFloat(0) assert result2 == ImagFloat(0) assert result3 == ImagFloat(0) assert result4 == ImagFloat(0)
def optimize(self, code, expected): tokenizer1 = Tokenizer() tokenizer2 = Tokenizer() tokens1 = tokenizer1.tokenize('var i = ' + code) tokens2 = tokenizer2.tokenize('var i = ' + expected) parser1 = Parser() parser2 = Parser() ast1 = parser1.parse(tokens1) ast2 = parser2.parse(tokens2) optimizer = Optimizer() optimizer.optimize(ast1) self.assertEqual(ast1, ast2)
def run(): lexer = Lexer('primeiro_portugolo.ptgl') parser = Parser(lexer) # token = lexer.next_token() # # while token and token.tag != Tag.END_OF_FILE: # print(str(token)) # token = lexer.next_token() # # print("\n\n\nSymbol Table:") # lexer.print_symbol_table() parser.compilador()
def test_parser(program): from compiler.parser import Parser p = Parser(program) try: while True: p.system_goal() break except (SyntaxError, LexicalError) as e: print "Lexical or Syntax error occured with char: '%s'" % e.err else: for o in p.output: print o
def compile_code(code, output_type="exe", compiler_opts=CompilerOptions()): """ supported_output_types = [ "exe", "ll", "wasm", "ast", "sema", "lowered_ast", "tokens", ] """ if output_type == "tokens": tokens = Lexer(code, compiler_opts).lex() result = json_dumps(tokens) elif output_type == "ast": ast = Parser.from_code(code, compiler_opts).parse() result = json_dumps(ast) elif output_type == "sema": tokens = Lexer(code, compiler_opts).lex() ast = Parser(tokens, compiler_opts).parse() semantic_info = SemanticAnalyzer(ast, tokens, compiler_opts).analyze() result = json_dumps(semantic_info) elif output_type == "ll": compiler_opts.target_code = "llvm" tokens = Lexer(code, compiler_opts).lex() ast = Parser(tokens, compiler_opts).parse() semantic_info = SemanticAnalyzer(ast, tokens, compiler_opts).analyze() llvm = LLVMCodegen(ast, semantic_info).generate() result = llvm.dumps() elif output_type == "wasm": compiler_opts.target_code = "wasm" tokens = Lexer(code, compiler_opts).lex() ast = Parser(tokens, compiler_opts).parse() semantic_info = SemanticAnalyzer(ast, tokens, compiler_opts).analyze() result = json_dumps(semantic_info) else: click.echo("Unimplemented Output Type!") return click.echo(result)
def test_power_expr_function_parses_power_expression_successfully(): # TODO: More complex examples result0 = Parser.from_code("5^6").power_expr() result1 = Parser.from_code("5²").power_expr() result2 = Parser.from_code("5").power_expr() result3 = Parser.from_code("√5²").power_expr() result4 = Parser.from_code("√5^5").power_expr() assert result0 == BinaryExpr(Integer(0), Operator(1), Integer(2)) assert result1 == UnaryExpr(Integer(0), Operator(1)) assert result2 == Integer(0) assert result3 == UnaryExpr(UnaryExpr(Integer(1), Operator(2)), Operator(0)) assert result4 == UnaryExpr( BinaryExpr(Integer(1), Operator(2), Integer(3)), Operator(0) )
def test_system_goal(self): t1 = None with open( self.input_file, 'w') as f: f.write('BEGIN A := BB + 314 + A; END$') with open(self.input_file, 'r') as f: p = Parser(f) p.system_goal() with open(self.output_file, 'r') as f: t1 = f.read() os.remove(self.output_file) os.remove(self.input_file) self.assertEqual(t1, "HALT")
def main(): with open('example.gg', 'r') as f: text_input = f.read() lexer = Lexer().get_lexer() tokens = lexer.lex(text_input) cg = CodeGen() pg = Parser(cg) pg.parse() parser = pg.get_parser() parser.parse(tokens, state=ParserState()).generate() cg.create_ir() cg.save_ir('output/output.ll') print(cg.run(False))
def parse_while_stat(lexer): lexer.get_next_token_of_kind(TokenKind.KW_WHILE) exp = ExpParser.parse_exp(lexer) lexer.get_next_token_of_kind(TokenKind.KW_DO) block = Parser.parse_block(lexer) lexer.get_next_token_of_kind(TokenKind.KW_END) return lua_stat.WhileStat(exp, block)
def test_parser_backtracks_on_fail_successfully(): parser0 = Parser.from_code("1hello") result0 = parser0.identifier() # TODO: Try more complex parser functions assert parser0.cursor == -1 assert result0 is None
def finish_for_in_stat(lexer, name): name_list = StatParser.finish_name_list(lexer, name) lexer.get_next_token_of_kind(TokenKind.KW_IN) exp_list = ExpParser.parse_exp_list(lexer) line_of_do, _ = lexer.get_next_token_of_kind(TokenKind.KW_DO) block = Parser.parse_block(lexer) lexer.get_next_token_of_kind(TokenKind.KW_END) return lua_stat.ForInStat(line_of_do, name_list, exp_list, block)
def compile_il(): file = open("test.il", "r") file_contents = file.read() file.close() tokens = lexer.lex(file_contents) parser = Parser(tokens) ast = parser.parse() funcs = parser.get_func_list() #print_ast(ast) cg = CodeGen(ast, funcs) bytes = cg.generate() output = open("test.ix", "bw") output.write(bytes) output.close()
def parse_func_def_exp(lexer): from compiler.parser import Parser line = lexer.get_line() lexer.get_next_token_of_kind(TokenKind.SEP_LPAREN) par_list, is_var_arg = ExpParser.parse_par_list(lexer) lexer.get_next_token_of_kind(TokenKind.SEP_RPAREN) block = Parser.parse_block(lexer) last_line, _ = lexer.get_next_token_of_kind(TokenKind.KW_END) return lua_exp.FuncDefExp(line, last_line, par_list, is_var_arg, block)
def test_compile(in_file, out_file): from compiler.parser import Parser p = Parser(in_file) try: output = WriteObj() sys.stdout = output p.system_goal() except Exception as e: print traceback.format_exc() else: sys.stdout = sys.__stdout__ out_file.write(output.content) print "Compiled to location: %s" % out_file.name if p.errors: print "The following errors where detected and recovery was attempted" for err in p.errors: print err
def _test_parse_impl(self, buf, expected_exprs): lexer = Lexer(buf) tokens = list() while True: tok = lexer.lex_token() tokens.append(tok) if tok.type == TokenType.EOF: break self.parser = Parser(tokens) while True: expr = self.parser.parse_top_level_expr() if expr is None: break self.exprs.append(expr) for e in self.exprs: print(e) self.assertEqual(len(self.exprs), len(expected_exprs)) for e, exp in zip(self.exprs, expected_exprs): self.assertEqual(e, exp)
def test_unary_expr_function_parses_root_expression_successfully(): # TODO: More complex examples result0 = Parser.from_code("-6").unary_expr() result1 = Parser.from_code("-5²").unary_expr() result2 = Parser.from_code("~√5²").unary_expr() result3 = Parser.from_code("-√5^5").unary_expr() result4 = Parser.from_code("-~+5_00").unary_expr() assert result0 == UnaryExpr(Integer(1), Operator(0)) assert result1 == UnaryExpr(UnaryExpr(Integer(1), Operator(2)), Operator(0)) assert result2 == UnaryExpr( UnaryExpr(UnaryExpr(Integer(2), Operator(3)), Operator(1)), Operator(0) ) assert result3 == UnaryExpr( UnaryExpr(BinaryExpr(Integer(2), Operator(3), Integer(4)), Operator(1)), Operator(0), ) assert result4 == UnaryExpr( UnaryExpr(UnaryExpr(Integer(3), Operator(2)), Operator(1)), Operator(0) )
def run_typecheck_test(test, compiler: Compiler) -> bool: astparser = Parser() ast = compiler.parse(test, astparser) if len(astparser.errors) > 0: return False tc = TypeChecker() compiler.visit(ast, tc) ast_json = ast.toJSON() with test.with_suffix(".py.ast.typed").open("r") as f: correct_json = json.load(f) return ast_equals(ast_json, correct_json)
def parse_if_stat(lexer): exps = [] blocks = [] lexer.get_next_token_of_kind(TokenKind.KW_IF) exps.append(ExpParser.parse_exp(lexer)) lexer.get_next_token_of_kind(TokenKind.KW_THEN) blocks.append(Parser.parse_block(lexer)) while lexer.look_ahead() == TokenKind.KW_ELSEIF: lexer.get_next_token() exps.append(ExpParser.parse_exp(lexer)) lexer.get_next_token_of_kind(TokenKind.KW_THEN) blocks.append(Parser.parse_block(lexer)) if lexer.look_ahead() == TokenKind.KW_ELSE: lexer.get_next_token() exps.append(lua_exp.TrueExp(lexer.get_line())) blocks.append(Parser.parse_block(lexer)) lexer.get_next_token_of_kind(TokenKind.KW_END) return lua_stat.IfStat(exps, blocks)
def test_mul_expr_function_parses_multiply_expression_successfully(): # TODO: More complex examples result0 = Parser.from_code("-5").mul_expr() result1 = Parser.from_code("0xff*3//4").mul_expr() result2 = Parser.from_code("5%5^3").mul_expr() result3 = Parser.from_code("5²/-4@+3").mul_expr() assert result0 == UnaryExpr(Integer(1), Operator(0)) assert result1 == BinaryExpr( BinaryExpr(Integer(0), Operator(1), Integer(2)), Operator(3), Integer(4) ) assert result2 == BinaryExpr( Integer(0), Operator(1), BinaryExpr(Integer(2), Operator(3), Integer(4)) ) assert result3 == BinaryExpr( BinaryExpr( UnaryExpr(Integer(0), Operator(1)), Operator(2), UnaryExpr(Integer(4), Operator(3)), ), Operator(5), UnaryExpr(Integer(7), Operator(6)), )
def compile(self, source): scanner = Scanner(source) parser = Parser(scanner) program = parser.parse() tree_to_json = TreeToJson() obj = program.accept(tree_to_json) print(obj) logger.ACTIVE = True logger.DEBUG = False # scope_analyzer = ScopeAnalyzer() program_copy = copy.deepcopy(program) # program_copy.accept(scope_analyzer) linear_generator = LinearGenerator() code = program.accept(linear_generator) return code
def compile(self, data): machine = self.find_machine(self.options) ast = Parser(data, errors=self.errors).run() VarCheck(ast, machine.builtins, errors=self.errors).run() Flatten(ast, errors=self.errors).run() Reduce(ast, errors=self.errors).run() TailRecursion(ast, errors=self.errors).run() Inline(ast, errors=self.errors).run() for f in ast.symbol_table.symbols.values(): cfg = f.cfg RegisterAllocation(f.cfg, errors=self.errors).run() lines = Linearise(ast, errors=self.errors).run() output = Render(lines, machine, errors=self.errors).run() return output
def api_parser(): s = StringIO() t = StringIO() with redirect_stdout(s), redirect_stderr(t): parser = Parser() program = request.form['code'].strip() parser.load_program(program) parser.analyze() if t.getvalue() != '': return t.getvalue() else: return s.getvalue()
def test_parser_driver(program, grammar, start_sym): from compiler.parser import Parser p = Parser(program, grammar, start_sym) p.ll_driver()
f.close() tokenizer = Tokenizer() tokens = tokenizer.tokenize(code) """ for line in tokens: for token in line: sys.stdout.write(TOKENS[token[0]]) if len(token) == 2: sys.stdout.write('(%s)' % token[1]) sys.stdout.write(' ') print """ parser = Parser() ast = parser.parse(tokens) """ for a in ast: print a """ """ interpreter = Interpreter() interpreter.interpret(ast) """ optimizer = Optimizer() optimizer.optimize(ast)
print "compile options:" print " no_reuse : prevent register resuse" print " no_concurrent : prevent concurrency" print print "tool options:" print " iverilog : compiles using the icarus verilog compiler" print " run : runs compiled code, used with ghdl or modelsimoptions" sys.exit(-1) #parse command line input_file = sys.argv[-1] reuse = "no_reuse" not in sys.argv try: #compile into CHIP parser = Parser(input_file, reuse) process = parser.parse_process() name = process.main.name instructions = process.generate() if "no_concurrent" in sys.argv: frames = [[i] for i in instructions] else: frames = parallelise(instructions) output_file = name + ".v" output_file = open(output_file, "w") generate_CHIP(input_file, name, frames, output_file, parser.allocator.all_registers, parser.allocator.memory_size) output_file.close() except C2CHIPError as err: print "Error in file:", err.filename, "at line:", err.lineno print err.message