def ddump_optimizer(source): import parser import cfg import typecheck import codegen with errors.listen(): parse = parser.make_parser() ast = parse(source) typecheck.typecheck(ast) functions = cfg.ssa_pass(ast) cgen = codegen.LLVMEmitter() blockgen = codegen.BlockEmitter(cgen) for name, retty, argtys, start_block in functions: function = blockgen.generate_function( name, retty, argtys, start_block ) optimizer = codegen.LLVMOptimizer(cgen.module) print 'Optimizer Diff'.center(80, '=') optimizer.diff(function, cgen.module)
def test_dataclass(): @dataclasses.dataclass class T: i: int s: str assert typecheck(T(i=42, s='lol')) assert not typecheck(T(i='lol', s='lol')) assert not typecheck(T(i=42, s=42))
def ifelse(self, node): 'ifelse = "if" _ "(" expr ")" "{" expr* "}" _ "else" _ "{" expr* "}" ' _, _, _, cond, _, _, cons, _, _, _, _, _, alt, _ = node consExpr = self.eval(cons) altExpr = self.eval(alt) ccheck = typecheck.typecheck(consExpr, self.env.copy()) acheck = typecheck.typecheck(altExpr, self.env.copy()) if type(ccheck) != type(acheck): print("cons: ", ccheck, "alt: ", acheck) raise Exception("Consequent and alternative types don't match") return [consExpr, altExpr]
def main(): lexer = adalex.make_lexer() tokens = adalex.tokens parser = adaparse.make_parser() fpath = sys.argv[1] #input file path global filepath filepath = fpath program = parser.parse(open(fpath).read()) cwd = os.getcwd() #gettign the current working directory slash, dot = fpath.rfind('/'), fpath.rfind('.') gfilename = fpath[slash+1:dot] # getting the input canonical file name stripping of the rest # Check the program typechecker = typecheck.typecheck() initialize_types(typechecker) env = typechecker.check_goal_symbol(program) if typechecker.get_error_count() > 0: print "Fix the type errors and compile again" sys.exit(0) # If no errors occurred, generate code code = generate_code(program) gen_file = cwd + "/assembly/" + gfilename + ".asm" #forming the output file name try: fd = open(gen_file, "w") fd.write(code) fd.flush() fd.close() except IOError: print "folder cannot be created" print "Done" # Emit the code sequence JumpGenerator().visit(code)
def test_list(): l = [1, 2, 3] assert typecheck(l, typing.List[int]) assert typecheck(l, typing.List) assert typecheck(l, list[int]) assert typecheck(l, list) assert typecheck(l, typing.Iterable[int]) assert typecheck(l, typing.Iterable) l = ["a", "b", "c"] assert not typecheck(l, typing.List[int]) assert not typecheck(l, list[int]) assert not typecheck(l, typing.Iterable[int]) l = (1, 2, 3) assert not typecheck(l, typing.List[int])
def main(): src = sys.stdin.read() toks = lex(src) # source -> tokens ast = parse(toks) # tokens -> AST symtab = build_symtab(ast) # AST -> symbol table typed_ast = typecheck(ast, symtab) # AST * symbol table -> Typed AST codegen(typed_ast, symtab) # Typed AST * symbol table -> C code
def test_tuple(): t = (1, "a", True) assert typecheck(t, tuple[int, str, bool]) assert typecheck(t, typing.Tuple[int, str, bool]) assert typecheck(t, tuple) assert typecheck(t, typing.Tuple) assert not typecheck(t, tuple[int, ...]) t = (1, 2, 3) assert typecheck(t, tuple[int, ...]) assert typecheck(t, typing.Tuple[int, ...]) assert not typecheck(t, tuple[str, ...]) assert not typecheck(t, typing.Tuple[str, ...]) t = [1, 2, 3] assert not typecheck(t, tuple[int, ...])
def test_bad_examples(self): mjl = MiniJavaLexer() mjl.build() mpj = MiniJavaParser() mpj.build() exmpls_path = "../tests/BadSamples" def find_error_type_in_file(code, lineno): lines = code.split('\n') if lineno is None: return 'endOfFile' if lineno > len(lines): print("Wrong lineno : got ", lineno, end =" ") return None line = lines[lineno] if "HERE" not in line: print("!!! No error specifyed !!!", end=" ") return None return line[line.index("HERE") + 5:].strip() good_files = sorted(os.listdir(exmpls_path)) for idx, file in enumerate(good_files): print("{} out of {} Parsing {} ...".format(idx + 1,len(good_files), file), end=" ") file_path = os.path.join(exmpls_path, file) code = utils.read_file(file_path) try: prog_ast = mpj.get_AST(code, lexer=mjl.lexer, debug=False) symbol_table = build_symbol_table(prog_ast) typecheck(prog_ast, symbol_table) print("!!! No error, but should be !!!", end=" ") except CompilationError as e: error_type = find_error_type_in_file(code, e.lineno) if error_type is not None and (len(error_type) == 0): print("!!! No error, but should be !!!", end=" ") if error_type: self.assertEqual(getattr(ErrorType, error_type), e.error_type) print("[done]") print("Success.")
def test_set(): s = {1, 2, 3} assert typecheck(s, typing.Set[int]) assert typecheck(s, typing.Set) assert typecheck(s, set[int]) assert typecheck(s, set) assert typecheck(s, typing.Iterable[int]) assert typecheck(s, typing.Iterable) assert typecheck(s, typing.Iterable[int]) assert typecheck(s, typing.Iterable)
def typecheckFile(filename): with open(filename + ".lqimp", "r", encoding="utf8") as myfile: a = QImp() progri = a.eval(myfile.read()) for item in progri: print(typecheck.typecheck(item, a.env)) # print("Global env:",a.env) return a.env
def typecheckFile(filename): with open(filename + ".lqimp", "r", encoding="utf8") as myfile: a = QImp() progri = a.eval(myfile.read()) for item in progri: print(typecheck.typecheck(item, a.env)) #print("Global env:",a.env) return a.env
def main(): # src = sys.stdin.read() src = open('./demos/conditional.mini', 'r').read() toks = lex(src) # source -> tokens ast = parse(toks) # tokens -> AST symtab = build_symtab(ast) # AST -> symbol table typed_ast = typecheck(ast, symtab) # AST * symbol table -> Typed AST print("\nTAC Equivalent\n==================================") tac_gen(typed_ast, symtab) # Typed AST * symbol table -> TAC Code
def assignment(self, node, children): 'assignment = "let" _ lvalue "=" expr' _, _, lvalue, _, expr = children if lvalue in self.env: raise Exception("Duplicate definitions for" + ": " + lvalue) self.env[lvalue] = typecheck.typecheck(expr, self.env.copy()) return "IGNORE"
def assignment(self, node, children): 'assignment = "let" _ lvalue "=" expr' _,_,lvalue, _, expr = children if lvalue in self.env: raise Exception("Duplicate definitions for" + ": " + lvalue) self.env[lvalue] = typecheck.typecheck(expr,self.env.copy()) return "IGNORE"
def assignment(self, node, children): 'assignment = "let" _ lvalue "=" expr' _, _, lvalue, _, expr = children if lvalue in self.env: raise Exception("Duplicate definitions for" + ": " + lvalue) if not isinstance(expr, typecheck.Lollipop): self.env[lvalue] = typecheck.typecheck(expr, copy.deepcopy(self.env)) else: self.env[lvalue] = expr return "IGNORE"
def ddump_blocks(source): import errors import lexer import parser import typecheck parse = parser.make_parser() with errors.listen(): program = parse(source) typecheck.typecheck(program) if not errors.reported(): functions = ssa_pass(program) for funcname, retty, argtys, start_block in functions: fname = (" %s %s %s " % (funcname, retty, argtys)) print fname.center(80, '-') print_block(start_block) print("") else: raise AssertionError
def test_mapping(): m = {'k': 42} assert typecheck(m, dict[str, int]) assert typecheck(m, typing.Dict[str, int]) assert typecheck(m, typing.Mapping[str, int]) assert typecheck(m, dict) assert typecheck(m, typing.Dict) assert typecheck(m, typing.Mapping)
def test_good_examples(self): mjl = MiniJavaLexer() mjl.build() mpj = MiniJavaParser() mpj.build() exmpls_path = "../tests/codeExamples" good_files = os.listdir(exmpls_path) for idx, file in enumerate(good_files): print("{} out of {} Parsing {} ...".format(idx + 1,len(good_files), file), end=" ") file_path = os.path.join(exmpls_path, file) code = utils.read_file(file_path) prog_ast = mpj.get_AST(code, lexer=mjl.lexer, debug=False) symbol_table = build_symbol_table(prog_ast) typecheck(prog_ast, symbol_table) print("[done]") print("Success.")
def ddump_optimizer(source): import parser import cfg import typecheck import codegen with errors.listen(): parse = parser.make_parser() ast = parse(source) typecheck.typecheck(ast) functions = cfg.ssa_pass(ast) cgen = codegen.LLVMEmitter() blockgen = codegen.BlockEmitter(cgen) for name, retty, argtys, start_block in functions: function = blockgen.generate_function(name, retty, argtys, start_block) optimizer = codegen.LLVMOptimizer(cgen.module) print 'Optimizer Diff'.center(80, '=') optimizer.diff(function, cgen.module)
def _analyze_schema(self, schema): order = { 'string': 0, 'boolean': 1, 'float': 2, 'integer': 3, 'none': 4 } for row in self.body: fields = self._split(row) for index, field in enumerate(fields): t = typecheck(field.strip()) if order[schema[index]] > order[t]: schema[index] = t return [t if t != 'none' else 'string' for t in schema]
def test_nested_obj_bad_inner(): class Inner: i: int def __init__(self, i): self.i = i class Outer: inner: Inner def __init__(self, inner): self.inner = inner m = Outer(Inner('lol')) assert not typecheck(m)
def test_nested_obj(): class Inner: i: int def __init__(self, i): self.i = i class Outer: inner: Inner def __init__(self, inner): self.inner = inner m = Outer(Inner(42)) assert typecheck(m)
def test_simple(): good = Mine(42, 'lol', {'k': 42}, ['lol'], (1, 'l', 1), 42) assert typecheck(good) good = Mine(42, 'lol', {'k': 42}, ['lol'], (1, 'l', 1), 'lol') assert typecheck(good)
def test_bad_type(): bad = Mine('', 'lol', {'k': 42}, ['lol'], (1, 'l', 1)) assert not typecheck(bad)
env["len"] = lambda x: len(x) env["null?"] = lambda x: len(x) == 0 env["sqrt"] = lambda x: np.sqrt(x) env["reverse"] = lambda x: list(reversed(x)) env["fold"] = lambda x, y: functools.reduce(x, y) env["pi"] = math.pi env["exp"] = np.exp #should add cleanExp like quantum parethesis env["oracle"] = lambda fun: oracleLib.generateOracle(fun) env["expm"] = lambda matrix: list(scipyAlg.expm(matrix)) env["logm"] = lambda matrix: list(scipyAlg.logm(matrix)) env["logTwo"] = lambda x: int(math.log(x, 2)) env["length"] = lambda x: len(x) env["transpose"] = lambda x: (quantumLib.ctransp(x)).tolist() def repl(): qImpInstance = QImp() while True: print(qImpInstance.eval(input(">>>"))) with open("typecheckTest.qimp", "r", encoding="utf8") as myfile: a = QImp() progri = a.eval(myfile.read()) for item in progri: print(typecheck.typecheck(item, a.env)) #print("Global env:",a.env)
def typecheck_pass(ast, env): symtab = typecheck.typecheck(ast) env['symtab'] = symtab return ast, env
def test_double_annotations_wrapping(): @tc.typecheck def foo(x: int): return x assert foo(1) == tc.typecheck(foo)(1) == 1
def typecheck_pass(ast, env): symtab = typecheck.typecheck(ast) env["symtab"] = symtab return ast, env
def bar() -> type(None): assert extract_stack()[-2][2] == "typecheck_invocation_proxy" bar() print("ok") ############################################################################ print("double annotations wrapping: ", end="") @typecheck def foo(x: int): return x assert foo(1) == typecheck(foo)(1) == 1 print("ok") ############################################################################ print("empty strings in incompatible values: ", end="") @typecheck def foo(s: lambda s: s != "" = None): return s assert foo() is None assert foo(None) is None assert foo(0) == 0
def func(self, node): 'func = "lambda" "(" typeDecl ((sep typeDecl)*)? ")" "{" expr* "}" ( "(" expr* ((sep expr)*)? ")" )?' _, _, param1, params, _, _, expr, _, app = node param1, type1 = self.eval(param1) paramRest = [] typeRest = [] declRest = list(map(self.eval, params)) # process extra arguments if declRest != [[]]: for item in declRest[0]: paramRest.append(item[1][0]) typeRest.append(item[1][1]) listOfParams = [] listOfTypes = [] listOfParams.append(param1) listOfTypes.append(type1) for item in paramRest: listOfParams.append(item) for item in typeRest: listOfTypes.append(item) if app.text: arg = self.eval(app) arguments = [arg[0][1][0]] for item in arg[0][2][0]: arguments.append(item[1]) return func(*arguments) constrs = [] firstType = parseType(listOfTypes[0]) constrs.append(firstType) topLam = typecheck.Lam(typecheck.Identifier(listOfParams[0]), firstType, []) latestLam = topLam localContext = copy.deepcopy(self.env) localContext[listOfParams[0]] = firstType for item, typeString in list(zip(listOfParams, listOfTypes))[1:]: typos = parseType(typeString) constrs.append(typos) localContext[item] = typos currentLam = typecheck.Lam(typecheck.Identifier(item), typos, []) latestLam.body = currentLam latestLam = currentLam bodyExprs = [] body = [] evltr = QImp(localContext) for item in evltr.eval(expr): if item != "IGNORE": bodyExprs.append(item) body = bodyExprs latestLam.body = body topLam.setConstr(constrs) return typecheck.typecheck(topLam, localContext)
def test_bad_args(): bad_args = Mine('', 'lol', {'k': 42}, ['lol', 1], (1, 'l', 1)) assert not typecheck(bad_args)
def func(self, node): 'func = "lambda" "(" typeDecl ((sep typeDecl)*)? ")" "{" expr* "}" ( "(" expr* ((sep expr)*)? ")" )?' _, _, param1, params, _, _, expr, _, app = node param1, type1 = self.eval(param1) paramRest = [] typeRest = [] declRest = list(map(self.eval, params)) #process extra arguments if declRest != [[]]: for item in declRest[0]: paramRest.append(item[1][0]) typeRest.append(item[1][1]) listOfParams = [] listOfTypes = [] listOfParams.append(param1) listOfTypes.append(type1) for item in paramRest: listOfParams.append(item) for item in typeRest: listOfTypes.append(item) if (app.text): arg = self.eval(app) arguments = [arg[0][1][0]] for item in arg[0][2][0]: arguments.append(item[1]) return (func(*arguments)) constrs = [] firstType = parseType(listOfTypes[0]) constrs.append(firstType) topLam = typecheck.Lam(typecheck.Identifier(listOfParams[0]), firstType, []) latestLam = topLam localContext = copy.deepcopy(self.env) localContext[listOfParams[0]] = firstType for item, typeString in list(zip(listOfParams, listOfTypes))[1:]: typos = parseType(typeString) constrs.append(typos) localContext[item] = typos currentLam = typecheck.Lam(typecheck.Identifier(item), typos, []) latestLam.body = currentLam latestLam = currentLam bodyExprs = [] body = [] evltr = QImp(localContext) for item in evltr.eval(expr): if item != "IGNORE": bodyExprs.append(item) body = bodyExprs latestLam.body = body topLam.setConstr(constrs) return typecheck.typecheck(topLam, localContext)
env["/"] = lambda x,y: np.divide(x,y) env["="] = lambda x,y: x == y env["len"] = lambda x: len(x) env["null?"] = lambda x: len(x) == 0 env["sqrt"] = lambda x: np.sqrt(x) env["reverse"] = lambda x : list(reversed(x)) env["fold"] = lambda x,y : functools.reduce(x,y) env["pi"] = math.pi env["exp"] = np.exp #should add cleanExp like quantum parethesis env["oracle"] = lambda fun: oracleLib.generateOracle(fun) env["expm"] = lambda matrix: list(scipyAlg.expm(matrix)) env["logm"] = lambda matrix: list(scipyAlg.logm(matrix)) env["logTwo"] = lambda x: int(math.log(x,2)) env["length"] = lambda x: len(x) env["transpose"] = lambda x: (quantumLib.ctransp(x)).tolist(); def repl(): qImpInstance = QImp() while True: print(qImpInstance.eval(input(">>>"))) with open ("typecheckTest.qimp", "r",encoding="utf8") as myfile: a = QImp() progri = a.eval(myfile.read()) for item in progri: print(typecheck.typecheck(item,a.env)) #print("Global env:",a.env)
} return c ; } } """ # tests/codeExamples/BinarySearch.java try: prog_ast = mpj.get_AST(code, lexer=mjl.lexer, debug=False) tree_to_svg(prog_ast, "test_prog") symbol_table = build_symbol_table(prog_ast) print('\n\nTYPECHECKING...') typecheck(prog_ast, symbol_table) print('TYPES ARE OK') print('\n\nBUILDING IR...') ir = build_ir(prog_ast, symbol_table) print('IR:\n') for method in ir: print('METHOD', method) print(ir[method].to_printable()) print() print("\n\n\nGENERATE ASM CODE\n\n") asm = X86Assembler(symbol_table) # asm_code = asm.ir_to_asm(ir)
def test_union(): T = typing.Union[str, int] assert typecheck(42, T) assert typecheck('lol', T) assert not typecheck(b'lol', T)
def test_optional(): T = typing.Optional[int] assert typecheck(42, T) assert typecheck(None, T) assert not typecheck('lol', T)
def __call__(self, x): fn = tc.typecheck(self.function) return fn(x)
def joosc(targets, options): # SETUP ######## global stdlib_asts # Build a list of targets to compile. target_files = [] for target in targets: if os.path.isfile(target) and target.endswith('.java'): target_files.append(target) elif os.path.isdir(target) and options.directory_crawl == True: target_files.extend(opts.directory_crawl(target)) else: logging.error("Invalid target %s, exiting..." % target) if options.include_stdlib == True and stdlib_asts == None: target_files.extend(opts.stdlib_files) # BUILD AST ############ # Build token list for each file. token_lists = [] for target_file in target_files: token_lists.append(get_tokens(target_file, options)) if options.stage == 'scanner': sys.exit(0) # Build parse trees for each file. parse_trees = [] for i, tokens in enumerate(token_lists): parse_trees.append(get_parse_tree(tokens, target_files[i], options)) if options.stage == 'parser': sys.exit(0) # Weed each parse tree. for i, parse_tree in enumerate(parse_trees): weed_parse_tree(parse_tree, target_files[i], options) if options.stage == 'weeder': sys.exit(0) ast_list = [] from utils.node import find_nodes, Node for i, parse_tree in enumerate(parse_trees): for o, n in enumerate(find_nodes(parse_tree, [Node('FieldDeclaration'), Node('ConstructorDeclaration'), Node('MethodDeclaration')])): n.decl_order = o ast_list.append(get_ast(parse_tree, target_files[i], options)) if options.stage == 'ast': sys.exit(0) # stdlib optimization if options.include_stdlib == True: if stdlib_asts != None: ast_list.extend(stdlib_asts) else: stdlib_asts = [] for i, ast in enumerate(ast_list): if target_files[i] in opts.stdlib_files: stdlib_asts.append(ast) # TYPE RESOLUTION ################## pkg_index = build_environments(ast_list) type_index = typelink.typelink(ast_list, pkg_index) class_index = class_hierarchy.class_hierarchy(ast_list, pkg_index, type_index) if options.stage == 'hierarchy': sys.exit(0) name_resolve.name_link(pkg_index, type_index, class_index) if options.stage == 'name': for i, _ in enumerate(ast_list): if options.include_stdlib == False or target_files[i] not in opts.stdlib_files or \ options.print_stdlib == True: ast_list[i].pprint() sys.exit(0) typecheck.typecheck(type_index, class_index) if options.stage == 'typecheck': for i, _ in enumerate(ast_list): if options.include_stdlib == False or target_files[i] not in opts.stdlib_files or \ options.print_stdlib == True: ast_list[i].pprint() sys.exit(0) name_resolve.check_method_forwardreference(pkg_index, type_index, class_index) for i in ast_list: reachability.reachability(i) if options.stage == 'reachability': for i, _ in enumerate(ast_list): if options.include_stdlib == False or target_files[i] not in opts.stdlib_files or \ options.print_stdlib == True: ast_list[i].pprint() sys.exit(0) codegen.gen(options, ast_list, class_index, type_index)