def test_get_ast(): def f(): pass f_str = 'def f(): pass' ast_str_0 = astor.dump_tree(get_ast(f)) ast_str_1 = astor.dump_tree(ast.parse(f_str).body[0]) assert ast_str_0 == ast_str_1
def update_file(source, destination, filen, check_mode, update_args): """ update a python file Args: source (str): The source directory destination (str): The destination directory filen (str): The name of the file to be updated check_mode (bool): Perforom or simply log """ source_tree = astor.code_to_ast.parse_file("%s/%s" % (source, filen)) destination_tree = astor.code_to_ast.parse_file("%s/%s" % (destination, filen)) for src_entry in source_tree.body: match = False for dst_entry in destination_tree.body: if astor.dump_tree(src_entry) == astor.dump_tree(dst_entry): match = True break if not match: if isinstance(src_entry, ast.Import): destination_tree = handle_import( destination_tree=destination_tree, src_entry=src_entry) elif isinstance(src_entry, ast.ImportFrom): destination_tree = handle_import_from( destination_tree=destination_tree, src_entry=src_entry) elif isinstance(src_entry, ast.FunctionDef): destination_tree = handle_function_def( filen=filen, destination_tree=destination_tree, src_entry=src_entry, update_args=update_args) else: PrintInColor.message(color='RED', action="unhandled", string=filen) print("-> %s" % astor.to_source(src_entry)) destination_tree = sort_defs(destination_tree) destination_contents = astor.to_source(destination_tree) with open("%s/%s" % (destination, filen)) as fileh: source_contents = fileh.read() if source_contents != destination_contents: PrintInColor.message(color='YELLOW', action="updated", string="%s/%s" % (destination, filen)) PrintInColor.diff(left=source_contents, right=destination_contents) if not check_mode: with open("%s/%s" % (destination, filen), 'w') as fileh: fileh.write(destination_contents) else: PrintInColor.message(color='GREEN', action="unmodified", string="%s/%s" % (destination, filen))
def test_remove_annotations(source, expected): source_ast = ast.parse(source) print(astor.dump_tree(source_ast)) print(astor.dump_tree(ast.parse(expected))) RemoveAnnotationsTransformer(source_ast) result = astor.to_source(source_ast) expected_gen = astor.to_source(ast.parse(expected)) assert result == expected_gen
def assertAstEqual(self, srctxt): """This asserts that the reconstituted source code can be compiled into the exact same AST as the original source code. """ srctxt = canonical(srctxt) srcast = ast.parse(srctxt) dsttxt = astor.to_source(srcast) dstast = ast.parse(dsttxt) srcdmp = astor.dump_tree(srcast) dstdmp = astor.dump_tree(dstast) self.assertEqual(dstdmp, srcdmp)
def test_fstring_to_format(source, expected): source_ast = ast.parse(source) print(astor.dump_tree(source_ast)) print(astor.dump_tree(ast.parse(expected))) FStringToFormatTransformer(source_ast) print(astor.dump_tree(source_ast)) result = astor.to_source(source_ast) expected_gen = astor.to_source(ast.parse(expected)) assert result == expected_gen
def test_underscore_numeric_literals(source, expected): source_ast = ast.parse(source) print(astor.dump_tree(source_ast)) print(astor.dump_tree(ast.parse(expected))) UnderscoreNumericLiteralsTransformer(source_ast) print(astor.dump_tree(source_ast)) result = astor.to_source(source_ast) expected_gen = astor.to_source(ast.parse(expected)) assert result == expected_gen
def visit_ImportFrom(self, node): self.generic_visit(node) print('NameFrom--->:', type(node).__name__) print(astor.dump_tree(node.names)) print(astor.dump_tree(node)) module_name = node.module print("module_name--->", module_name) for func_name in node.names: print(func_name.name) full_name = module_name + '.' + func_name.name self.from_import_dict.update({func_name.name: full_name})
def test_generate(schema_filename): fixture_filename = FIXTURES_DIR / (schema_filename.name.split(".")[0] + ".py") schema = load_schema(schema_filename.read_text()) fixture = astor.parse_file(fixture_filename) generator = Python3Generator(schema) result = generator.generate().as_ast() result_ast = astor.dump_tree(result) expected = astor.dump_tree(fixture) print(astor.to_source(result)) assert result_ast == expected
def save_or_return_source(f_file, f_mod, glbls, return_source, save_source): if return_source or save_source: try: source = astor.to_source(f_mod) except Exception as ex: # pragma: nocover raise RuntimeError(astor.dump_tree(f_mod)) from ex else: source = None if return_source: return source f_mod = ast.fix_missing_locations(f_mod) if save_source: temp = tempfile.NamedTemporaryFile('w', delete=False) f_file = temp.name exec(compile(f_mod, f_file, 'exec'), glbls) func = glbls[f_mod.body[0].name] if save_source: func.__tempfile__ = temp # When there are other decorators, the co_firstlineno of *some* python distributions gets confused # and thinks they will be there even when they are not written to the file, causing readline overflow # So we put some empty lines to make them align temp.write(source) temp.write('\n' * func.__code__.co_firstlineno) temp.flush() temp.close() return func
def process_stmt(self, sentence, node, var, expression): if len(sentence) > 0: last_stmt = sentence[-1] last_stmt_target = last_stmt.targets[0] last_stmt_val = last_stmt.value if last_stmt in self.one_predecessor_stmts and stmt_contains_var( expression, last_stmt_target.id): expression = SubstituteVarWithExpression( last_stmt_target.id, last_stmt_val).visit(expression) sentence.remove(last_stmt) target = ast.Name(id=var, ctx=ast.Store()) assignement = ast.Assign(targets=[target], value=expression) print(astor.dump_tree(assignement)) if not node.id in self.cycle_nodes and len(node.predecessors) <= 1: self.rhs_assignement_expressions[node.id] = expression elif len(node.predecessors) <= 1: self.one_predecessor_stmts.append(assignement) sentence.append(assignement) else: sentence.append(assignement)
def florit(line, cell): """ Cell magic :param line: :param cell: :return: """ # TODO: Test on IPython (Tested on Jupyter) tree = ast.parse(cell) logger.debug(astor.dump_tree(tree)) for idx, each in enumerate(tree.body): visitor = Visitor(False, os.path.abspath(util.get_notebook_name())) visitor.visit(each) visitor.consolidate_structs() transformer = Transformer(visitor.__structs__, []) transformer.visit(each) tree.body[idx] = each injected.log_record_buffer = [] injected.log_record_flag = True shell = get_ipython().get_ipython() shell.run_cell(astunparse.unparse(tree)) injected.file.close() injected.file = open(global_state.log_name, 'a') df = Tree(injected.log_record_buffer).get_df() injected.log_record_buffer = [] injected.log_record_flag = False return df
def test_dump(ast_person): expected = "Module(\n" \ " body=[\n" \ " ClassDef(name='Person',\n" \ " bases=[],\n" ans = astor.dump_tree(ast_person) assert ans[:len(expected)] == expected
def visit(self, node): orig_node_code = astor.to_source(node).strip() log.debug("Starting to visit >> {} << ({})".format( orig_node_code, type(node))) new_node = super().visit(node) try: if new_node is None: log.debug("Deleted >>> {} <<<".format(orig_node_code)) elif isinstance(new_node, ast.AST): log.debug("Converted >>> {} <<< to >>> {} <<<".format( orig_node_code, astor.to_source(new_node).strip())) elif isinstance(new_node, list): log.debug("Converted >>> {} <<< to [[[ {} ]]]".format( orig_node_code, ", ".join(astor.to_source(n).strip() for n in new_node))) except Exception as ex: log.error("Failed on {} >>> {}".format(orig_node_code, astor.dump_tree(new_node)), exc_info=ex) raise ex return new_node
def print_file_ast(): with open(__file__) as fp: this_source = fp.read() this_ast = ast.parse(this_source) print("AST:") print("astor.dump_tree(this_ast)") print(astor.dump_tree(this_ast))
def lpcompile(function): function_ast = ast.parse(inspect.getsource(function)).body[0] logging.debug("Python AST:\n{}\n".format(astor.dump(function_ast))) parser = frontend.LambdaPackParse() type_checker = frontend.LambdaPackTypeCheck() lp_ast = parser.visit(function_ast) logging.debug("IR AST:\n{}\n".format(astor.dump_tree(lp_ast))) lp_ast_type_checked = type_checker.visit(lp_ast) logging.debug("typed IR AST:\n{}\n".format( astor.dump_tree(lp_ast_type_checked))) def f(*args, **kwargs): backend_generator = frontend.BackendGenerate(*args, **kwargs) backend_generator.visit(lp_ast_type_checked) return backend_generator.remote_calls return f
def __call__(self, f): f_mod, f_body, f_file = function_ast(f) # Grab function closure variables free_vars = self._get_free_vars(f) if self.imports: f_body, free_vars = self._insert_imports(f, f_body, free_vars) func_def = f_mod.body[0] new_kws = [ ast.arg(arg=k, annotation=self._annotate(k, v)) for k, v in free_vars ] new_kw_defaults = [self._get_default(k, v) for k, v in free_vars] # python 3.8 introduced a new signature for ast.arguments.__init__, so use whatever they use ast_arguments_dict = func_def.args.__dict__ ast_arguments_dict['kwonlyargs'] += new_kws ast_arguments_dict['kw_defaults'] += new_kw_defaults new_func_def = ast.FunctionDef( name=func_def.name, body=f_body, decorator_list=[], # func_def.decorator_list, returns=func_def.returns, args=ast.arguments(**ast_arguments_dict)) f_mod.body[0] = new_func_def if self.return_source or self.save_source: try: source = astor.to_source(f_mod) except ImportError: # pragma: nocover raise ImportError( "miniutils.pragma.{name} requires 'astor' to be installed to obtain source code" .format(name=lift.__name__)) except Exception as ex: # pragma: nocover raise RuntimeError(astor.dump_tree(f_mod)) from ex else: source = None if self.return_source: return source else: f_mod = ast.fix_missing_locations(f_mod) if self.save_source: temp = tempfile.NamedTemporaryFile('w', delete=True) f_file = temp.name no_globals = {} exec(compile(f_mod, f_file, 'exec'), no_globals) func = no_globals[f_mod.body[0].name] if self.save_source: func.__tempfile__ = temp temp.write(source) temp.flush() return func
def test_parse_ast(): input_vector = """ def step(self, **kwargs): def deco(fn: typing.Callable[[Client, dict], dict]): return fn return deco """ node = ast.parse(input_vector) print(astor.dump_tree(node))
def inner(f): f_mod, f_body, f_file = function_ast(f) if not explicit_only: # Grab function globals glbls = f.__globals__.copy() # Grab function closure variables if isinstance(f.__closure__, tuple): glbls.update({ k: v.cell_contents for k, v in zip(f.__code__.co_freevars, f.__closure__) }) else: # Initialize empty context if function_globals is None and len(kwargs) == 0: log.warning( "No global context nor function context. No collapse will occur" ) glbls = dict() # Apply manual globals override if function_globals is not None: glbls.update(function_globals) # print({k: v for k, v in glbls.items() if k not in globals()}) trans = transformer_type(DictStack(glbls, kwargs), **transformer_kwargs) trans.collapse_iterables = collapse_iterables trans.unroll_targets = unroll_targets trans.unroll_in_tiers = unroll_in_tiers f_mod.body[0].decorator_list = [] f_mod = trans.visit(f_mod) # print(astor.dump_tree(f_mod)) if return_source or save_source: try: source = astor.to_source(f_mod) except Exception as ex: # pragma: nocover raise RuntimeError(astor.dump_tree(f_mod)) from ex else: source = None if return_source: return source else: f_mod = ast.fix_missing_locations(f_mod) if save_source: temp = tempfile.NamedTemporaryFile('w', delete=False) f_file = temp.name exec(compile(f_mod, f_file, 'exec'), glbls) func = glbls[f_mod.body[0].name] if save_source: func.__tempfile__ = temp # When there are other decorators, the co_firstlineno of *some* python distributions gets confused # and thinks they will be there even when they are not written to the file, causing readline overflow # So we put some empty lines to make them align temp.write(source) temp.write('\n' * func.__code__.co_firstlineno) temp.flush() temp.close() return func
def transform(path, transformer): with open(path) as f: source = f.read() tree = ast.parse(source) old_tree_dump = astor.dump_tree(tree) transformer.visit(tree) transformer.visit(tree) transformer.visit(tree) if old_tree_dump == astor.dump_tree(tree): return source = astor.to_source(tree) with open(path, 'w') as f: f.write(source)
def rewrite( self, tree: ast.AST, env: SymbolTable, metadata: tp.MutableMapping, ) -> PASS_ARGS_T: def _do_dumps(dumps, dump_writer): for dump in dumps: dump_writer(f'BEGIN {dump[0]}\n') dump_writer(dump[1].strip()) dump_writer(f'\nEND {dump[0]}\n\n') dumps = [] if self.dump_ast: dumps.append(('AST', astor.dump_tree(tree))) if self.dump_src: dumps.append(('SRC', astor.to_source(tree))) if self.dump_env: dumps.append(('ENV', repr(env))) if self.dump_source_filename: if "source_filename" not in metadata: raise Exception("Cannot dump source filename without " "@begin_rewrite(debug=True)") dumps.append(('SOURCE_FILENAME', metadata["source_filename"])) if self.dump_source_lines: if "source_lines" not in metadata: raise Exception("Cannot dump source lines without " "@begin_rewrite(debug=True)") lines, start_line_number = metadata["source_lines"] dump_str = "".join(f"{start_line_number + i}:{line}" for i, line in enumerate(lines)) dumps.append(('SOURCE_LINES', dump_str)) if self.file is not None: if self.append: mode = 'wa' else: mode = 'w' with open(self.dump_file, mode) as fp: _do_dumps(dumps, fp.write) else: def _print(*args, **kwargs): print(*args, end='', **kwargs) _do_dumps(dumps, _print) if self.interactive: # Launch a repl loop code.interact( banner=('Warning: modifications to tree, env, and metadata ' 'will have side effects'), local=dict(tree=tree, env=env, metadata=metadata), ) return tree, env, metadata
def test_attribute_decorator(self): # pylint: disable=protected-access node = ast.parse("import contextlib\n" "@contextlib.contextmanager\n" "def tag(name):\n" " pass\n") import astor print(astor.dump_tree(node.body[1])) deco = PUML_Generator._deco_marker(node.body[1].decorator_list[0]) assert deco == '@contextlib.contextmanager'
def test_generate(schema_filename): fixture_filename = FIXTURES_DIR / (schema_filename.name.split(".")[0] + ".py") schema = load_schema(schema_filename.read_text()) try: fixture = astor.parse_file(fixture_filename) except FileNotFoundError: warnings.warn(f"Fixture not implemented yet: {os.path.basename(fixture_filename)}") return generator = Python3MarshmallowGenerator(schema) result = generator.generate().as_ast() result_ast = astor.dump_tree(result) expected = astor.dump_tree(fixture) print(astor.to_source(result)) assert result_ast == expected
def calc_dist(self, node): if isinstance(node, If) or isinstance(node, While): if isinstance(node.test, Compare): distance = None for i in range(0, len(node.test.ops)): # sum up distances of all comparisons operation = astor.dump_tree(node.test.ops[0]) if i == 0: distance = self.dist_functs(operation)( node.test.left, node.test.comparators[0]) else: distance = BinOp( left=distance, op=Add(), right=self.dist_functs(operation)( node.test.comparators[i-1], node.test.comparators[i]) ) return distance else: if isinstance(node.test, Name): # distance = abs(test_var) return Call( func=Name(id='abs', ctx=Load()), args=[Name(id=node.test, ctx=Load())], keywords=[], starargs=None, kwargs=None) # no known test; return 1 for now return Num(n=1) else: if isinstance(node, For): if isinstance(node.iter, Call): if (node.iter.func.id == 'range' or node.iter.func.id == 'xrange'): # distance = lower_limit - upper_limit if len(node.iter.args) == 1: return UnaryOp( op=USub(), operand=node.iter.args[0]) if len(node.iter.args) >= 2: return BinOp( left=node.iter.args[0], op=Sub(), right=node.iter.args[1]) # the for-loop does not iterate over a range but a list, # tuple, ... -> therefore there is only one element missing # for the loop to execute if it doesn't execute return Num(n=1)
def test_getast_of_function(self): def func(self, dt): y = 1 # zzz = "this should not be visible" x = 22 if x < 30: y = 50 else: y = 100.5 y += 3 return y * 4 tree = SH.getast(func) print(astor.dump_tree(tree))
def pythonToAst(filename): pythonToGenericAst = {'print': 'logStatement'} srcAst = astor.code_to_ast.parse_file(filename) body = srcAst.body print(astor.dump_tree(srcAst)) convertedAst = {'type': 'root', 'body': []} for statement in body: if type(statement) == ast.Expr: convertedAst['body'].append( handleExpression(statement, pythonToGenericAst)) if type(statement) == ast.Assign: convertedAst['body'].append(handleAssign(statement)) return convertedAst
def gen_code(setlx_file, py_file): try: gen_tree = transpile(setlx_file,False) except Exception as e: TestCase.fail(f"cannot transpile {setlx_file}:{str(e)}") with open(py_file) as f: py_code = f.read() py_tree = ast.parse(py_code) gen_ast = astor.dump_tree(gen_tree) py_ast = astor.dump_tree(py_tree) if gen_ast != py_ast: warnings.warn(Warning( f"generated ast from {os.path.basename(setlx_file)} does not match py file ({os.path.basename(py_file)})")) code = None try: code = astor.to_source(gen_tree) except Exception as e: TestCase.fail( f"ERROR: invalid generated ast for {setlx_file}:{str(e)}") return (code, gen_ast, py_ast)
def op_count(py_path): py_file = open(py_path, 'r') op_ast = ast.parse(py_file.read()) # Add parent attribute to each node for node in ast.walk(op_ast): for child in ast.iter_child_nodes(node): child.parent = node print(astor.dump_tree(op_ast)) # insert global _count list op_ast.body.insert(0, ast.Assign(targets=[ast.Name(id='_count', ctx=ast.Store())], \ value=ast.List(elts=[ast.Num(n=0, ctx=ast.Store()), ast.Num(n=0, ctx=ast.Store()), \ ast.Num(n=0, ctx=ast.Store()), ast.Num(n=0, ctx=ast.Store())], ctx=ast.Load()))) # insert output function op_ast.body.append(ast.Expr(value=ast.Call(func=ast.Name(id='print', \ ctx=ast.Load()), args=[ast.Name(id='_count', \ ctx=ast.Load())], keywords=[]))) AddCounts().visit(op_ast) DeclGlobalCountForEachFunc().visit(op_ast) ast.fix_missing_locations(op_ast) print(astor.dump_tree(op_ast)) exec(compile(op_ast, filename="<ast>", mode="exec"))
def test_simple_inheritance(): # cdef = astor.code_to_ast("class MyClass (MyParent): pass").body[0] cdef = ast.parse("class MyClass (MyParent): pass").body[0] # ast and astor give different dump results: assert ast.dump( cdef ) == "ClassDef(name='MyClass', bases=[Name(id='MyParent', ctx=Load())], keywords=[], body=[Pass()], decorator_list=[])" assert astor.dump_tree( cdef ) == "ClassDef(name='MyClass', bases=[Name(id='MyParent')], keywords=[], body=[Pass], decorator_list=[])" # list bases: exprs = [astor.to_source(base).rstrip() for base in cdef.bases] assert exprs == ['MyParent']
def inject_code(self, class_visitor, node): location_to_be_inserted = [] for idx, obj in enumerate(node.body): visitor = class_visitor() visitor.visit(obj) if visitor.found: if isinstance(obj, (ast.Expr, ast.Call)): location_to_be_inserted.append(idx) elif isinstance(obj, (ast.If, ast.For)): self.inject_code(class_visitor, obj) else: # should not print print(astor.dump_tree(obj)) if len(location_to_be_inserted): for idx in location_to_be_inserted[::-1]: node.body.insert(idx + 1, self.inject_code_ast_tree)
def main(): parser = argparse.ArgumentParser() parser.add_argument("file", help="Source to process") parser.add_argument("-o", "--output", help="Where to write patched code") args = parser.parse_args() path = args.file tree = ast.parse(open(path, "r").read()) new_tree = NodeTransformer().visit(tree) source = astor.to_source(new_tree) print(astor.dump_tree(tree)) print(source) if args.output: open(args.output, "w").write(source)
def inner(f): f_mod, f_body, f_file = _function_ast(f) # Grab function globals glbls = f.__globals__ # Grab function closure variables if isinstance(f.__closure__, tuple): glbls.update({k: v.cell_contents for k, v in zip(f.__code__.co_freevars, f.__closure__)}) # Apply manual globals override if function_globals is not None: glbls.update(function_globals) # print({k: v for k, v in glbls.items() if k not in globals()}) trans = transformer_type(DictStack(glbls, kwargs), **transformer_kwargs) f_mod.body[0].decorator_list = [] f_mod = trans.visit(f_mod) # print(astor.dump_tree(f_mod)) if return_source or save_source: try: source = astor.to_source(f_mod) except ImportError: # pragma: nocover raise ImportError("miniutils.pragma.{name} requires 'astor' to be installed to obtain source code" .format(name=name)) except Exception as ex: # pragma: nocover raise RuntimeError(astor.dump_tree(f_mod)) from ex else: source = None if return_source: return source else: # func_source = astor.to_source(f_mod) f_mod = ast.fix_missing_locations(f_mod) if save_source: temp = tempfile.NamedTemporaryFile('w', delete=True) f_file = temp.name exec(compile(f_mod, f_file, 'exec'), glbls) func = glbls[f_mod.body[0].name] if save_source: func.__tempfile__ = temp temp.write(source) temp.flush() return func
def check(a, b): ast_a = ast.parse(a) ast_b = ast.parse(b) dump_a = astor.dump_tree(ast_a) dump_b = astor.dump_tree(ast_b) self.assertEqual(dump_a == dump_b, fast_compare(ast_a, ast_b))
generate_steps('-(2 + 4)') print '' tree = ast.parse('5 * 5 / 2 + 10 - 1 % 6') #print astor.dump_tree(tree) generate_steps('5 * 5 / 2 + 10 - 1 % 6') print '' tree = ast.parse('((5 * 5) / 2) + ((10 - 1) % 6)') #print astor.dump_tree(tree) generate_steps('((5 * 5) / 2) + ((10 - 1) % 6)') print '' tree = ast.parse('x = ((9 - 2) + 4) + 2') #print astor.dump_tree(tree) generate_steps('x = ((9 - 2) + 4) + 2') print '' tree = ast.parse('answer = 115 + 5') print astor.dump_tree(tree) generate_steps('answer = 115 + 5') print '' tree = ast.parse('print 4') print astor.dump_tree(tree) generate_steps('print 4') print '' tree = ast.parse('(True and False) or False') print astor.dump_tree(tree) generate_steps('True and False') print '' tree = ast.parse('not False') print astor.dump_tree(tree) generate_steps('not False') print '' tree = ast.parse('\"justinefrancis\"[1:12]')
def assertAstEqual(self, ast1, ast2): dmp1 = astor.dump_tree(ast1) dmp2 = astor.dump_tree(ast2) self.assertEqual(dmp1, dmp2)