def __init__(self, sourceCode, testFile, mockTarget, mockMethodInfo): # cat_owner.py self.sourceCode = sourceCode # test_cat_owner.py self.testFile = testFile # cat_database.CatDatabase self.mockTarget = mockTarget # CatDatabase self.mockClassName = mockTarget.split(".")[1] # ast tree self.clean_root = astor.parse_file(self.testFile) self.root = copy.deepcopy(self.clean_root) self.clean_test = copy.deepcopy(self.getTargetTest()) self.clean_test.decorator_list = [ ast.Call(func=ast.Name(id='patch'), args=[ast.Str(s=self.mockTarget)], keywords=[]) ] self.test_suite_count = 0 self.mockMethodInfo = self.transferListMockMethodInfo(mockMethodInfo) self.injectMock() self.instrumentedTestFileName = self.writeToFile() self.mockedTestSuite = []
def extract_seed_ints(self): seed_ints = set([]) # seed ints set root = astor.parse_file(self.filename) # parse file # auxiliary function for DFS def dfs_extract_seed_ints(node): nonlocal seed_ints # if node is number, then add to seed ints set if isinstance(node, ast.Num): if isinstance(node.n, int): seed_ints.add(node.n) seed_ints.add(-node.n) # recursively call itself for info in astor.iter_node(node): dfs_extract_seed_ints(info[0]) # call auxiliary function dfs_extract_seed_ints(root) # add special values seed_ints.update([ #-sys.maxsize - 1, #sys.maxsize, -1, 0, 1 ]) return list(seed_ints)
def collect_functiondef(fp: str): root = astor.parse_file(fp) with open(fp, mode='r', encoding='utf-8') as rf: lines = rf.readlines() line_cnt = len(lines) functiondef_list = [] for node in root.body: if isinstance(node, ast.FunctionDef): entry = FunctionDef() entry.file = fp entry.name = node.name entry.start_lineno = node.lineno functiondef_list.append(entry) elif isinstance(node, ast.ClassDef): for n in node.body: if isinstance(n, ast.FunctionDef): entry = FunctionDef() entry.file = fp entry.name = n.name entry.start_lineno = n.lineno functiondef_list.append(entry) EOF_entry = FunctionDef() EOF_entry.file = fp EOF_entry.name = '__EOF__' EOF_entry.start_lineno = line_cnt + 1 functiondef_list.append(EOF_entry) return sorted(functiondef_list)
def pre_instrument(self): root = astor.parse_file(self.filename) temp_file = open(self.filename) first_line = temp_file.readline() temp_file.close() if first_line[:20] == '# target_function = ' and len(first_line.split()) == 4: self.funcname = first_line.split()[3] else: print('ERROR : specify target function name in first lien of file') exit() f = None for stmt in root.body: if isinstance(stmt, ast.FunctionDef) and stmt.name == self.funcname: f = stmt break else: print('ERROR : function <' + self.funcname + '> does not exist') exit() def dfs_pre_instrument(node): for info in astor.iter_node(node): if info[1] == 'body' or info[1] == 'orelse': for stmt in info[0]: if isinstance(stmt, ast.For) or isinstance(stmt, ast.If) or isinstance(stmt, ast.While): if len(stmt.orelse) == 0: stmt.orelse.append(ast.Pass()) dfs_pre_instrument(stmt) dfs_pre_instrument(f) instrumented_file = open('pre_' + self.filename, 'w') instrumented_file.write(astor.to_source(root)) instrumented_file.close()
def examineCodeFile(code_file): try: node = astor.parse_file(code_file) print(astor.to_source(node)) return True except SyntaxError as e: print(e) return False
def parse_codebase(self, codebase, *args, **kwargs): root = astor.parse_file(codebase) root.custom_id = id(root) for node in ast.walk(root): node.custom_id = id(node) # for child in ast.iter_child_nodes(node): # child.custom_id = id(child) return root
def __init__(self, le_fichier: pathlib.Path): if isinstance(le_fichier, str): le_fichier = pathlib.Path(le_fichier) self.nom_fichier = self.get_nom_fichier(le_fichier) #self.nom_module = self.get_nom_module(le_fichier) self.nom_module = le_fichier.stem self.mon_ast = astor.parse_file(self.nom_fichier) self.mes_fonctions = self.getFonctions() self.mes_classes = self.getClasses() self.produire_test = self.produireTest()
def __init__(self, file_name, class_name=''): self._file_name: str = abspath(file_name) self._class_name: str = class_name self._func_list: List[FuncInfo] = [] self._constructor = None tree = astor.parse_file(self._file_name) tree.parent = None for node in ast.walk(tree): for child in ast.iter_child_nodes(node): child.parent = node self.visit(tree)
def parse(filename, include_path, extra_args): try: ast = astor.parse_file(filename) except SyntaxError as e: # Catch and convert syntax error ASAP to avoid hiding real syntax errors parse_requires(False, e, loc=Location(e.filename, e.lineno, None, None)) # print(astor.to_source(ast)) # print(astor.dump(ast)) return Converter().convert(ast, filename)
def getMethods(self): method_dict = {} mock_ast = astor.parse_file(self.mockTarget.split(".")[0] + '.py') classdef_ast = mock_ast.__getattribute__("body")[0] for node in ast.walk(classdef_ast): if isinstance(node, ast.FunctionDef): return_int = False returns = node.__getattribute__("returns") if (isinstance(returns, ast.Name)): name = returns.__getattribute__("id") if (eq(name, "int")): return_int = True method_dict[node.__getattribute__("name")] = return_int return method_dict
def emit(infile, query, input_s, merge, outfile): keys = query.split(".") tree = astor.parse_file(infile) # parse_file(infile=infile, keys=keys) # print(astor.dump_tree(tree)) if merge == MergeStrategy.upsert: # TODO: Traverse and remove existent node(s) that match tree.body.append(ast.parse(input_s, filename="input_s")) else: raise NotImplementedError(merge) stream = stdout if outfile is None else open(outfile, "wt") stream.write(astor.to_source(tree))
def test_generate(schema_filename): fixture_filename = FIXTURES_DIR / (schema_filename.name.split(".")[0] + ".py") schema = load_schema(schema_filename.read_text()) fixture = astor.parse_file(fixture_filename) generator = Python3Generator(schema) result = generator.generate().as_ast() result_ast = astor.dump_tree(result) expected = astor.dump_tree(fixture) print(astor.to_source(result)) assert result_ast == expected
def main(args): # ============================ read input program as code_string ============================ input_program = args.input output_path = args.output selected_function_name = args.func check_constant = args.constant max_depth = args.depth max_iter = args.iter max_tries = args.tries if not os.path.exists(output_path): os.makedirs(output_path) # ============================ Initialization ============================ function_names = [] function_CFGs = {} py_cfg = PyCFG() # create AST from source file; get string astree = astor.parse_file(input_program) code_string = astor.to_source(astree) results = [] # ============================ CFG generation ============================ # get CFG of each defined fn index = 0 for node in ast.walk(astree): if isinstance(node, ast.FunctionDef): function_names.append(node.name) if selected_function_name == node.name: selected_index = index function_CFGs[node.name] = py_cfg.gen_cfg(astor.to_source(node)) index += 1 # ============================ Analysis ============================ # only check selected function if selected_function_name: print_func(selected_function_name) results += analyze_program(code_string, function_names, selected_index, py_cfg, max_depth, max_tries, max_iter, check_constant) # analyze all functions from input program else: for i in range(len(function_names)): print_func(function_names[i]) results += analyze_program(code_string, function_names, i, py_cfg, max_depth, max_tries, max_iter, check_constant) generate_report(results, output_path, input_program)
def test_generate(schema_filename): fixture_filename = FIXTURES_DIR / (schema_filename.name.split(".")[0] + ".py") schema = load_schema(schema_filename.read_text()) try: fixture = astor.parse_file(fixture_filename) except FileNotFoundError: warnings.warn(f"Fixture not implemented yet: {os.path.basename(fixture_filename)}") return generator = Python3MarshmallowGenerator(schema) result = generator.generate().as_ast() result_ast = astor.dump_tree(result) expected = astor.dump_tree(fixture) print(astor.to_source(result)) assert result_ast == expected
def transpile_file(filename_src: Path, out_dir: Path): # Setup destination file filename_dest = out_dir / filename_src.name logger.info(f"Transpiling file {filename_src} into {filename_dest}") # Parse AST try: source_ast = astor.parse_file(filename_src) except Exception as e: raise TranspileException(e) # Apply transformations for transformation in get_transformations(): transformation(source_ast) source = astor.to_source(source_ast) # Save file with open(filename_dest, "w") as f: f.write(source)
def run(graphene_schema_path: str, graphql_schema_path: str, strategy: str): """[summary] Parse python schema file Generate copy of the AST (just in case) Find all branches within the tree Execute search on the tree with the found targets Args: :param graphene_schema (str): Path to graphene schema file :param graphql_schema_path: Path to graphql schema file :param strategy: Search strategy to use """ schema_tree = astor.parse_file(graphene_schema_path) query_dict = get_query_dict(graphql_schema_path) for func_name, query_field_args_dict in query_dict.items(): query = query_field_args_dict["query"] field_args_dict = query_field_args_dict["field_args_dict"] targets = get_targets(copy.deepcopy(schema_tree), func_name) do_search(copy.deepcopy(schema_tree), targets, strategy, func_name, query, field_args_dict)
def gen_python_ward_enums(provinces: Sequence[Province]) -> str: template_file = Path(__file__).parent / '_enum_ward_template.py' module = astor.parse_file(template_file) class_defs = tuple(n for n in module.body if isinstance(n, ast.ClassDef)) # Will generate members for WardEnum and WardDEnum ward_enum_def = next(n for n in class_defs if n.name == 'WardEnum') ward_desc_enum_def = next(n for n in class_defs if n.name == 'WardDEnum') # Remove example members, except for the docstring. old_body = ward_enum_def.body ward_enum_def.body = deque(m for m in old_body if isinstance(m, ast.Expr)) old_body = ward_desc_enum_def.body ward_desc_enum_def.body = deque(m for m in old_body if isinstance(m, ast.Expr)) for p in provinces: for d in p.indexed_districts.values(): for w in d.indexed_wards.values(): node_w = ward_enum_member(w, d, p) ward_enum_def.body.append(node_w) node_dw = ward_descriptive_enum_member(w, d, p) ward_desc_enum_def.body.append(node_dw) return astor.to_source(module)
def parse(cls, granularity_level, path, target_files): """ :param granularity_level: The granularity level of a program :type granularity_level: :py:class:`.program.GranularityLevel` :param str path: The project root path :param target_files: The paths to target files from the project root :type target_files: list(str) :return: The contents of the files, see `Hint` :rtype: dict(str, list(str)) .. hint:: - key: the file name - value: the contents of the file """ assert isinstance(granularity_level, GranularityLevel) if granularity_level == GranularityLevel.LINE: contents = {} for target in target_files: with open(os.path.join(path, target), 'r') as target_file: contents[target] = list( map(str.rstrip, target_file.readlines())) return contents elif granularity_level == GranularityLevel.AST: import ast import astor contents = {} for target in target_files: if cls.is_python_code(target): root = astor.parse_file(os.path.join(path, target)) contents[target] = root else: raise Exception( 'Program', '{} file is not supported'.format( cls.get_file_extension(target))) return contents return None
def gen_python_district_enums(provinces: Sequence[Province]) -> str: template_file = Path(__file__).parent / '_enum_district_template.py' module = astor.parse_file(template_file) class_defs = tuple(n for n in module.body if isinstance(n, ast.ClassDef)) # Will generate definition for ProvinceEnum, ProvinceDEnum province_enum_def = next(n for n in class_defs if n.name == 'ProvinceEnum') province_enum_desc_def = next(n for n in class_defs if n.name == 'ProvinceDEnum') # Remove example members, except for the docstring. old_body = province_enum_def.body province_enum_def.body = deque(m for m in old_body if isinstance(m, ast.Expr)) old_body = province_enum_desc_def.body province_enum_desc_def.body = deque(m for m in old_body if isinstance(m, ast.Expr)) # Will generate members for DistrictEnum, DistrictDEnum district_enum_def = next(n for n in class_defs if n.name == 'DistrictEnum') district_enum_desc_def = next(n for n in class_defs if n.name == 'DistrictDEnum') # Remove example members, except for the docstring. old_body = district_enum_def.body district_enum_def.body = deque(m for m in old_body if isinstance(m, ast.Expr)) old_body = district_enum_desc_def.body district_enum_desc_def.body = deque(m for m in old_body if isinstance(m, ast.Expr)) for p in provinces: node = province_enum_member(p) province_enum_def.body.append(node) desc_node = province_descriptive_enum_member(p) province_enum_desc_def.body.append(desc_node) for d in p.indexed_districts.values(): node_d = district_enum_member(d, p) district_enum_def.body.append(node_d) desc_node_d = district_descriptive_enum_member(d, p) district_enum_desc_def.body.append(desc_node_d) return astor.to_source(module)
def get_fns_from_file(filename, rootdir): try: src_tree = astor.parse_file(filename) except (SyntaxError, UnicodeDecodeError, FileNotFoundError, MemoryError, ValueError): return [] rel_path = os.path.relpath(filename, start=rootdir) output_list = [] class GetFunctions(ast.NodeVisitor): def visit_FunctionDef(self, node): docstring_data = extract_docstring(node) if docstring_data is not None: output_list.append(docstring_data) try: GetFunctions().visit(src_tree) except RecursionError: return [] output_list = [[filename] + entry for entry in output_list] return output_list
def getCorrectSourceCodeAST(self): return astor.parse_file(self.sourceCode)
cond_true.append(stmt.test) cond_false.append(neg(stmt.test)) cond_tree[branch_lineno.index(stmt.lineno)] = { 'T': cond_true, 'F': cond_false } extract_condition(stmt.body, cond_true) extract_condition(stmt.orelse, cond_false) elif type(stmt) == ast.Pass: pass else: raise Exception(ast.dump(stmt)) if __name__ == '__main__': tree = astor.parse_file('target.py') target_fn = None for node in tree.body: if type(node) == ast.Import: exec(astor.to_source(node)) elif type(node) == ast.FunctionDef: exec(astor.to_source(node)) target_fn = node c_branch(target_fn.body) branch_lineno = sorted(branch_lineno) global minlen minlen = minLength.minLength(target_fn)
def convert_str(before, after): root = astor.parse_file(before) after_file = open(after, "w") after_file.write(astor.to_source(root))
def debug_py(infile): parsed = astor.parse_file(infile) DebugVisitor().visit(parsed)
def print_transformed(filename): from .loader import transform ast = astor.parse_file(filename) print(astor.to_source(transform(ast)))
def load_fixture(name): filename = FIXTURES_DIR / (name + ".ast.json") return astor.parse_file(filename)
def getCorrectTestFileAST(self): return astor.parse_file(self.testFile)
if __name__ == "__main__": # if len(sys.argv) < 2: # print("Please provide the name of test directory, e.g., '1-demo`") # sys.exit() app = "2-geek" # app = sys.argv[1] test_folder = join("finalized-tests", app) tests = [ f for f in listdir(test_folder) if isfile(join(test_folder, f)) and f.startswith("Test") ] for t in tests: if app == "groupon" and "TestSignUp" in t: continue logger.debug(t) tree = astor.parse_file(join(test_folder, t)) transformer = TestTransformer() transformer.transform_imports(tree) transformer.visit(tree) ast.fix_missing_locations(tree) with open(join(test_folder, AUG_PREFIX + t), "w", encoding="utf-8") as f: f.write(astor.to_source(tree)) make_config() with open(join(CONFIG_FOLDER, app, "config.json"), "r+", encoding="utf-8") as f: config = json.load(f) config["transfer_setting"][AUG_PREFIX + t.split(".")[0]] = { "web_test_path": join(test_folder, AUG_PREFIX + t).replace("\\", "/"),
import astor, ast import sys import argparse if __name__ == '__main__': parser = argparse.ArgumentParser(description='Rewrites programs.') parser.add_argument('-t', '--target', required=True) parser.add_argument("remaining", nargs="*") args = parser.parse_args() target = args.target sys.argv[1:] = args.remaining root = astor.parse_file(target) # implement rewriting routines here # make modifications to the AST modified = astor.to_source(root) with open(target, "w") as f: f.write(modified) f.close()
check_funcT = gen_check_func(modT) check_funcF = gen_check_func(modF) Tpoint = find_mini(check_funcT, argc) Fpoint = find_mini(check_funcF, argc) if Tpoint: print '| %dT ' % i + str(Tpoint)[1:-1] else: print '| %dT -' % i if Fpoint: print '| %dF ' % i + str(Fpoint)[1:-1] else: print '| %dF -' % i if __name__ == '__main__': if len(sys.argv) < 2: print 'usage: python covgen.py target.py' exit() root = astor.parse_file(sys.argv[1]) K = 1 base_width = 10 imports = filter(lambda x: isinstance(x,Import),root.body) for func in root.body: if not isinstance(func,FunctionDef): continue counter = 0 print 'function : ' + func.name func_test_gen(func) print