def __init__(self): scope = {} import_asts = [] # add in required functions into scope for call availability for func_name in dir(function): fn = getattr(function, func_name) if callable(fn): scope[func_name] = fn import_ast = ast.ImportFrom( 'vectordatasource.meta.function', [ast.alias(func_name, None)], 0, ) import_asts.append(import_ast) scope['util'] = util utils_import_ast = ast.ImportFrom( 'vectordatasource', [ast.alias('util', None)], 0, ) import_asts.append(utils_import_ast) self.import_asts = import_asts self.scope = scope
def visit_Module(self, node): pre_nodes = list(itertools.takewhile( lambda node: (self._is_docstring(node) or self._is_future_import(node)), node.body)) rest_nodes = [self.visit(n) for n in node.body[len(pre_nodes):]] importnode = ast.ImportFrom( module='hesitate.driver', names=[ ast.alias( name='should_assert', asname=self.ASSERTION_TEST_IMPORTED_NAME), ast.alias( name='timed', asname=self.ASSERTION_TIMER_IMPORTED_NAME)], lineno=1, col_offset=0, level=0) if pre_nodes: importnode = ast.copy_location(importnode, pre_nodes[0]) new_mod = ast.Module( body=pre_nodes + [importnode] + rest_nodes, lineno=1, col_offset=0) return new_mod
def test_simple_statements(self): # Simple statements can be put on a single line as long as the scope # has not changed. for body, expect in [(ast.Expr(ast.Num(42)), '42'), (ast.Import([ast.alias('a', None)]), 'import a'), (ast.ImportFrom('b', [ast.alias('a', None)], 1), 'from .b import a'), (ast.Break(), 'break'), (ast.Continue(), 'continue'), (ast.Pass(), 'pass'), (ast.Assign([ast.Name('X', ast.Store())], ast.Num(42)), 'X=42'), (ast.Delete([ast.Name('X', ast.Del())]), 'del X'), (ast.Raise(None, None), 'raise'), (ast.Return(None), 'return'), (ast.AugAssign(ast.Name('X', ast.Store()), ast.Add(), ast.Num(42)), 'X+=42'), (ast.Assert(ast.Num(42), None), 'assert 42'), (ast.Global(['x']), 'global x'), (ast.Nonlocal(['x']), 'nonlocal x'), ]: if_simple = ast.If(ast.Num(42), [body], None) self.verify(if_simple, 'if 42:{}'.format(expect)) if_multiple_simples = ast.If(ast.Num(42), [ast.Pass(), ast.Pass()], None) self.verify(if_multiple_simples, 'if 42:pass;pass') inner_if = ast.If(ast.Num(6), [ast.Pass()], None) funky_if = ast.If(ast.Num(42), [ast.Break(), ast.Continue(), inner_if, ast.Break(), ast.Continue()], None) self.verify(funky_if, 'if 42:\n break;continue\n if 6:pass\n break;continue')
def test_import_from(): assert eq( import_from.bar[alias.foo, alias["foo", "baz"]], ast.ImportFrom( module="bar", names=[ast.alias(name="foo", asname=None), ast.alias(name="foo", asname="baz")], level=0 ), )
def test_Import(self): self.verify(ast.Import([ast.alias('spam', None)]), 'import spam') self.verify(ast.Import([ast.alias('spam', 'bacon')]), 'import spam as bacon') self.verify(ast.Import([ast.alias('spam', None), ast.alias('bacon', 'bacn'), ast.alias('eggs', None)]), 'import spam,bacon as bacn,eggs')
def _process_instr_import_name(instr, queue, stack, body, context): """ Process an IMPORT_NAME instruction. Side Effects ------------ Pops two instuctions from `stack` Consumes instructions from `queue` to the end of the import statement. Appends an ast.Import or ast.ImportFrom node to `body`. """ # If this is "import module", fromlist is None. # If this this is "from module import a, b fromlist will be ('a', 'b'). fromlist = stack.pop().arg # level argument to __import__. Should be 0, 1, or 2. level = stack.pop().arg module = instr.arg if fromlist is None: # Regular import. attr_loads = _pop_import_LOAD_ATTRs(module, queue) store = queue.popleft() # There are two cases where we should emit an alias: # import a as <anything but a> # import a.b.c as <anything (including a)> if attr_loads or module.split('.')[0] != store.arg: asname = store.arg else: asname = None body.append( ast.Import( names=[ ast.alias( name=module, asname=(asname), ), ], level=level, ), ) return elif fromlist == ('*',): # From module import *. expect(queue.popleft(), instrs.IMPORT_STAR, "after IMPORT_NAME") body.append( ast.ImportFrom( module=module, names=[ast.alias(name='*', asname=None)], level=level, ), ) return # Consume a pair of IMPORT_FROM, STORE_NAME instructions for each entry in # fromlist. names = list(map(make_importfrom_alias(queue, body, context), fromlist)) body.append(ast.ImportFrom(module=module, names=names, level=level)) # Remove the final POP_TOP of the imported module. expect(queue.popleft(), instrs.POP_TOP, "after 'from import'")
def p_module_encoding(p): # noqa """module_encoding : ENCODING module""" global debug_on p[0] = ast.Module( [ast.ImportFrom('concat.libconcat', [ast.alias('*', None)], 0), ast.Import([ast.alias('concat.stdlib.builtins', None)])] + (ast.parse('stack.debug = True').body if debug_on else []) + p[2].body) _set_line_info(p)
def compile_import_expression(self, expr): def _compile_import(expr, module, names=None, importer=ast.Import): return [ importer( lineno=expr.start_line, col_offset=expr.start_column, module=ast_str(module), names=names or [ast.alias(name=ast_str(module), asname=None)], level=0, ) ] expr.pop(0) # index rimports = [] while len(expr) > 0: iexpr = expr.pop(0) if isinstance(iexpr, HySymbol): rimports += _compile_import(expr, iexpr) continue if isinstance(iexpr, HyList) and len(iexpr) == 1: rimports += _compile_import(expr, iexpr.pop(0)) continue if isinstance(iexpr, HyList) and iexpr: module = iexpr.pop(0) entry = iexpr[0] if isinstance(entry, HyKeyword) and entry == HyKeyword(":as"): assert len(iexpr) == 2, "garbage after aliased import" iexpr.pop(0) # :as alias = iexpr.pop(0) rimports += _compile_import( expr, ast_str(module), [ast.alias(name=ast_str(module), asname=ast_str(alias))] ) continue if isinstance(entry, HyList): names = [] while entry: sym = entry.pop(0) if entry and isinstance(entry[0], HyKeyword): entry.pop(0) alias = ast_str(entry.pop(0)) else: alias = None names += [ast.alias(name=ast_str(sym), asname=alias)] rimports += _compile_import(expr, module, names, ast.ImportFrom) continue raise TypeError("Unknown entry (`%s`) in the HyList" % (entry)) if len(rimports) == 1: return rimports[0] else: return rimports
def run(self, mod): """Find all assert statements in *mod* and rewrite them.""" if not mod.body: # Nothing to do. return # Insert some special imports at the top of the module but after any # docstrings and __future__ imports. aliases = [ast.alias(py.builtin.builtins.__name__, "@py_builtins"), ast.alias("_pytest.assertion.rewrite", "@pytest_ar")] doc = getattr(mod, "docstring", None) expect_docstring = doc is None if doc is not None and self.is_rewrite_disabled(doc): return pos = 0 lineno = 1 for item in mod.body: if (expect_docstring and isinstance(item, ast.Expr) and isinstance(item.value, ast.Str)): doc = item.value.s if self.is_rewrite_disabled(doc): return expect_docstring = False elif (not isinstance(item, ast.ImportFrom) or item.level > 0 or item.module != "__future__"): lineno = item.lineno break pos += 1 else: lineno = item.lineno imports = [ast.Import([alias], lineno=lineno, col_offset=0) for alias in aliases] mod.body[pos:pos] = imports # Collect asserts. nodes = [mod] while nodes: node = nodes.pop() for name, field in ast.iter_fields(node): if isinstance(field, list): new = [] for i, child in enumerate(field): if isinstance(child, ast.Assert): # Transform assert. new.extend(self.visit(child)) else: new.append(child) if isinstance(child, ast.AST): nodes.append(child) setattr(node, name, new) elif (isinstance(field, ast.AST) and # Don't recurse into expressions as they can't contain # asserts. not isinstance(field, ast.expr)): nodes.append(field)
def test_combining_ImportFrom(self): # Combine ImportFrom when the 'from' clause matches. imp1 = ast.ImportFrom('X', [ast.alias('Y', None)], 1) imp2 = ast.ImportFrom('X', [ast.alias('Z', None)], 1) module = ast.Module([imp1, imp2]) new_ast = self.transform.visit(module) self.assertEqual(len(module.body), 1) imp = new_ast.body[0] self.assertEqual(len(imp.names), 2) for alias, (name, asname) in zip(imp.names, (('Y', None), ('Z', None))): self.assertEqual(alias.name, name) self.assertEqual(alias.asname, asname)
def test_ImportFrom(self): # from X import Y from_X = ast.ImportFrom('X', [ast.alias('Y', None)], 0) self.verify(from_X, 'from X import Y') # from . import Y from_dot = ast.ImportFrom(None, [ast.alias('Y', None)], 1) self.verify(from_dot, 'from . import Y') # from .X import Y from_dot_X = ast.ImportFrom('X', [ast.alias('Y', None)], 1) self.verify(from_dot_X, 'from .X import Y') # from X import Y, Z from_X_multi = ast.ImportFrom('X', [ast.alias('Y', None), ast.alias('Z', None)], 0) self.verify(from_X_multi, 'from X import Y,Z')
def run(self, mod): """Find all assert statements in *mod* and rewrite them.""" if not mod.body: # Nothing to do. return # Insert some special imports at the top of the module but after any # docstrings and __future__ imports. aliases = [ast.alias(py.builtin.builtins.__name__, "@py_builtins"), ast.alias("_pytest.assertion.rewrite", "@pytest_ar")] expect_docstring = True pos = 0 lineno = 0 for item in mod.body: if (expect_docstring and isinstance(item, ast.Expr) and isinstance(item.value, ast.Str)): doc = item.value.s if "PYTEST_DONT_REWRITE" in doc: # The module has disabled assertion rewriting. return lineno += len(doc) - 1 expect_docstring = False elif (not isinstance(item, ast.ImportFrom) or item.level > 0 and item.identifier != "__future__"): lineno = item.lineno break pos += 1 imports = [ast.Import([alias], lineno=lineno, col_offset=0) for alias in aliases] mod.body[pos:pos] = imports # Collect asserts. nodes = collections.deque([mod]) while nodes: node = nodes.popleft() for name, field in ast.iter_fields(node): if isinstance(field, list): new = [] for i, child in enumerate(field): if isinstance(child, ast.Assert): # Transform assert. new.extend(self.visit(child)) else: new.append(child) if isinstance(child, ast.AST): nodes.append(child) setattr(node, name, new) elif (isinstance(field, ast.AST) and # Don't recurse into expressions as they can't contain # asserts. not isinstance(field, ast.expr)): nodes.append(field)
def generate(self, element:Element, GC:GenerationContext): acode = element.code imports_list = [] import_statements = [] with GC.let(domain=ExDom): for import_element in acode[1:]: if _is_module_path(import_element): to_import_name = _get_module_path(import_element) imports_list.append(ast.alias(name=to_import_name, asname=None)) elif is_form(import_element.code, "as"): to_import_name = _get_module_path(import_element.code[1]) to_import_asname = _get_name(import_element.code[2]) imports_list.append(ast.alias(name=to_import_name, asname=to_import_asname)) elif is_form(import_element.code) or is_seq(import_element.code): to_import_module_name = _get_module_path(import_element.code[0]) imported_names_from_module = [] for to_import_item_element in import_element.code[1:]: if is_identifier(to_import_item_element): to_import_name = _get_name(to_import_item_element) imported_names_from_module.append(ast.alias(name=to_import_name, asname=None)) elif is_form(to_import_item_element.code, "as"): to_import_name = _get_name(to_import_item_element.code[1]) to_import_asname = _get_name(to_import_item_element.code[2]) imported_names_from_module.append(ast.alias(name=to_import_name, asname=to_import_asname)) import_statements.append(ast.ImportFrom(to_import_module_name, imported_names_from_module, 0)) else: raise CodeGenerationError(import_element.range, "Special form `import` expected an import specifier but found `%s`." "For example:" "```" "import" " a.b.c" " x.y.z as name" " u.v.w( var1, var2 as v )" "```" % succinct_lisp_printer(import_element)) if len(imports_list) > 0: import_statements.append(ast.Import(imports_list)) return import_statements
def visit_Module(self, node): """ Add the imports needed to run symbolically """ node = self.generic_visit(node) if self.se_dict: import_se_dict = ast.ImportFrom(module="se_dict", names=[ast.alias(name="SeDict", asname=None)], level=0) import_instrumentation = ast.ImportFrom(module="symbolic.instrumentation", names=[ast.alias(name="whichBranch", asname=None)], level=0) import_extract = ast.ImportFrom(module="symbolic.symbolic_types", names=[ast.alias(name="getConcrete", asname=None)], level=0) ord_function = ast.parse(ord_str).body #if self.se_dict: # node.body = [import_se_dict,import_instrumentation,import_extract] + ord_function + node.body #else: node.body = [import_instrumentation,import_extract] + ord_function + node.body return node
def test_interleaved_ImportFrom(self): # Test prevention of statement merging. import_from1 = ast.ImportFrom('X', [ast.alias('Y', None)], 1) imp = ast.Import([ast.alias('X', None)]) # Separate by Import import_from2 = ast.ImportFrom('X', [ast.alias('Z', None)], 1) # Different level import_from3 = ast.ImportFrom('X', [ast.alias('W', None)], 2) # Different 'from' clause import_from4 = ast.ImportFrom('Z', [ast.alias('Y', None)], 2) module = ast.Module([import_from1, imp, import_from2, import_from3, import_from4]) new_ast = self.transform.visit(module) self.assertEqual(len(module.body), 5)
def test_interleaved_statements(self): # Do not combine if something between the Import statements. imp1 = ast.Import([ast.alias('X', None)]) imp2 = ast.Import([ast.alias('Y', None)]) from_import = ast.ImportFrom('Z', [ast.alias('W', None)], 0) module = ast.Module([imp1, from_import, imp2]) new_ast = self.transform.visit(module) self.assertEqual(len(new_ast.body), 3) for given, expect in zip(new_ast.body, (ast.Import, ast.ImportFrom, ast.Import)): self.assertIsInstance(given, expect) last_imp = new_ast.body[2] self.assertEqual(len(last_imp.names), 1) self.assertEqual(last_imp.names[0].name, 'Y')
def new_nodes(self): nodes = [] # first turn all the from imports back into proper nodes for (level, module), names in self.from_imports.iteritems(): for nm, asnm in sorted(names): node = ast.ImportFrom(module=module, names=[ast.alias(name=nm, asname=asnm)], level=level) nodes.append((self._node_sort_key(node), node)) # then build the normal imports again for nm, asnm in self.imports: node = ast.Import(names=[ast.alias(name=nm, asname=asnm)]) nodes.append((self._node_sort_key(node), node)) return nodes
def visit_Module(self, node): super(TemplateCodeGenerator, self).visit_Module(node) # Make sure we terminate the line printer self.visit_Pass(None) # Clear lines array for import visits body = self.lines self.lines = [] while self.defines: name, node = self.defines.popitem() assignment = ast.Assign(targets=[store(name)], value=node) self.visit(assignment) # Make sure we terminate the line printer self.visit_Pass(None) # Clear lines array for import visits defines = self.lines self.lines = [] while self.imports: value, node = self.imports.popitem() if isinstance(value, types.ModuleType): stmt = ast.Import( names=[ast.alias(name=value.__name__, asname=node.id)]) elif hasattr(value, '__name__'): path = reverse_builtin_map.get(value) if path is None: path = value.__module__ name = value.__name__ stmt = ast.ImportFrom( module=path, names=[ast.alias(name=name, asname=node.id)], level=0, ) else: raise TypeError(value) self.visit(stmt) # Clear last import self.visit_Pass(None) # Stich together lines self.lines += defines + body
def dedup_imports(self, list_of_imports): modules = {} for import_stmt in list_of_imports: for name in import_stmt.names: modules[name.name] = name.asname deduped_names = [ast.alias(name=unique_name, asname=modules[unique_name]) for unique_name in modules] return [ast.Import(names=[unique_name]) for unique_name in deduped_names]
def _to_initializer_body( signature_data: SignatureData, field_name_factory: Operator[str], instance_object_name: str = SELF_PARAMETER_NAME) -> List[ast.stmt]: if signature_data: def to_right_hand_value(parameter_name: str, is_callable: bool) -> ast.expr: return (ast.Call( ast.Attribute( _to_loaded_name(TYPES_MODULE_ALIAS), 'MethodType', ast.Load()), [ ast.Lambda(_to_unit_signature(instance_object_name), _to_loaded_name(parameter_name)), _to_loaded_name(instance_object_name) ], []) if is_callable else _to_loaded_name(parameter_name)) return ([ ast.Import([ast.alias(TYPES_MODULE_NAME, TYPES_MODULE_ALIAS)]) ] + [ ast.Assign([ ast.Attribute(ast.Name(instance_object_name, ast.Load()), field_name_factory(parameter_name), ast.Store()) ], to_right_hand_value(parameter_name, is_callable)) for parameter_name, _, is_callable in flatten(signature_data.values()) ]) else: return [ast.Pass()]
def add_import_statement(self, source, module_name, *subpackages): key = (module_name, subpackages) if key in self._import_set[source]: return if subpackages: node = ast.ImportFrom(module=module_name, names=[ ast.alias(name=pkg, asname=None) for pkg in subpackages ], level=0) else: node = ast.Import(names=[ast.alias(name=module_name, asname=None)], level=0) self._import_set[source].add(key) self._import_nodes[source].append(node)
def visit_Module(self, node): ''' Inject tracing logic on top module ''' self.generic_visit(node) # list of statements/expr to be prepended to body prebody = [] # "import ftracer" line = ast.Import([ast.alias('ftracer', None)]) prebody.append(line) # ftrace.set_trace attr = ast.Attribute(ast.Name('ftracer'), 'set_trace', ast.Load()) # ftrace.set_trace(<target>,<run>) call = ast.Call(func=attr, args=[ ast.Name(quoted(self.target_mpath)), ast.Name(quoted(self.run_mpath)) ], keywords=[]) # ftrace.set_trace(...) line = ast.Expr(call) prebody.append(line) node.body = prebody + node.body ast.fix_missing_locations(node) return node
def build_fn(self): def_mod_ast = ast.parse("def %s(%s): pass" % (self.fn_name, self.args)) check.is_instance(def_mod_ast, ast.Module) def_ast = def_mod_ast.body[0] check.is_instance(def_ast, ast.FunctionDef) body_ast = ast.parse(self.src) check.is_instance(body_ast, ast.Module) def_ast.body = body_ast.body if self.init: init_mod_ast = ast.parse(self.init) check.is_instance(init_mod_ast, ast.Module) def_mod_ast.body[:0] = init_mod_ast.body if self.imports: for i, _import in enumerate(self.imports): if isinstance(_import, tuple): name, asname = _import elif isinstance(_import, str): name, asname = _import, None else: raise TypeError(_import) import_ast = ast.Import(names=[ast.alias(name=name, asname=asname)], lineno=i, col_offset=0) def_mod_ast.body.insert(0, import_ast) code = compile(def_mod_ast, "<pure>", "exec") code_namespace = self.ns if self.ns else {} six.exec_(code, code_namespace) fn = code_namespace[self.fn_name] if self.name: fn.__name__ = str(self.name) return fn
def parse(code): """Annotate user code. Return annotated code (str) if annotation detected; return None if not. code: original user code (str) """ try: ast_tree = ast.parse(code) except Exception: raise RuntimeError('Bad Python code') transformer = Transformer() try: transformer.visit(ast_tree) except AssertionError as exc: raise RuntimeError('%d: %s' % (ast_tree.last_line, exc.args[0])) if not transformer.annotated: return None last_future_import = -1 import_nni = ast.Import(names=[ast.alias(name='nni', asname=None)]) nodes = ast_tree.body for i, _ in enumerate(nodes): if type(nodes[i] ) is ast.ImportFrom and nodes[i].module == '__future__': last_future_import = i nodes.insert(last_future_import + 1, import_nni) return astor.to_source(ast_tree)
def make_tree(nodes): """ Build an AST from a map from names to nodes. :param nodes: A map from names to nodes. """ new_tree = ast.parse('') definitions = [] import_from = collections.defaultdict(list) for name, node in nodes.items(): if type(node) == ast.Import: new_tree.body.append(node) elif type(node) == ast.ImportFrom: import_from[node.module].append(name) else: definitions.append(node) for identifier, names in import_from.items(): new_tree.body.append(ast.ImportFrom( identifier, [ast.alias(name, None) for name in sorted(names)], 0 )) for definition in definitions: new_tree.body.append(definition) return new_tree
def _gen_import_from(module_name, func_name_list): return ast.ImportFrom(module=module_name, names=[ ast.alias(name=func_name, asname=None) for func_name in func_name_list ], level=0)
def _build_st_import_statement(): """Build AST node for `import streamlit as __streamlit__`.""" return ast.Import( names=[ast.alias( name='streamlit', asname='__streamlit__', )], )
def add_required_imports(module_name, module_ast, module_context): imports = module_context.get_imports() if has_type_var(module_ast): imports.add('TypeVar') module_to_names = {} for imp in imports: if imp not in ImportHandler.class_to_module: continue mod = ImportHandler.class_to_module[imp] if mod in module_to_names: module_to_names[mod].append(imp) else: module_to_names[mod] = [imp] for (mod, level), names in module_to_names.items(): if mod == module_name: continue aliases = [ast.alias(name=name, asname=None) for name in names] module_ast.body.insert(0, ast.ImportFrom( module=mod, names=aliases, level=level ))
def find_dialectimport_ast(self, tree): """Find the first dialect-import statement by scanning the AST `tree`. Transform the dialect-import into `import ...`, where `...` is the absolute module name the dialects are being imported from. As a side effect, import the dialect definition module. Primarily meant to be called with `tree` the AST of a module that uses dialects, but works with any `tree` that has a `body` attribute, where that `body` is a `list` of statement AST nodes. A dialect-import is a statement of the form:: from ... import dialects, ... Return value is a dict `{dialectname: class, ...}` with all collected bindings from that one dialect-import. Each binding is a dialect, so usually there is just one. """ for index, statement in enumerate(tree.body): if ismacroimport(statement, magicname="dialects"): break else: return "", {} module_absname, bindings = get_macros(statement, filename=self.filename, reload=False, allow_asname=False) # Remove all names to prevent dialects being used as regular run-time objects. # Always use an absolute import, for the unhygienic expose API guarantee. tree.body[index] = ast.copy_location( ast.Import(names=[ast.alias(name=module_absname, asname=None)]), statement) return module_absname, bindings
def _insert_imports(self, f, f_body, free_vars): add_imports = [] for k, v in f.__globals__.items(): if isinstance(v, ModuleType): add_imports.append((k, v)) old_free_vars = free_vars free_vars = [] for k, v in old_free_vars: if isinstance(v, ModuleType): add_imports.append((k, v)) else: free_vars.append((k, v)) if isinstance(f_body[0], ast.Expr) and isinstance( f_body[0].value, _ast_str_types): f_docstring = f_body[:1] f_body = f_body[1:] else: f_docstring = [] f_body = f_docstring + [ ast.Import(names=[ ast.alias(name=v.__name__, asname=k if k != v.__name__ else None) ]) for k, v in add_imports if (isinstance(self.imports, bool) or k in self.imports) and k not in _exclude ] + f_body return f_body, free_vars
def visit_Module(self, node): imports = [ ast.Import(names=[ast.alias(name='re', asname=None)]), ast.ImportFrom( module='lib.automata', names=[ast.alias(name='*', asname=None)], level=0, ), ast.Assign( targets=[ast.Name(id='groups', ctx=ast.Store())], value=ast.List(elts=[], ctx=ast.Load()), ), ] return ast.copy_location(ast.Module( body=imports + node.body ), node)
def test_refactor( self, skip_import, _expand_import_star, _get_used_names, _transform, skip_import_return, _expand_import_star_return, _get_used_names_return, _transform_return, expand_stars, mode, original_lines, expec_fixed_lines, ): skip_import.return_value = skip_import_return _expand_import_star.return_value = _expand_import_star_return _get_used_names.return_value = _get_used_names_return _transform.return_value = _transform_return setattr(self.configs, "expand_stars", expand_stars) setattr(self.configs, mode, True) node = Import(NodeLocation((1, 0), 1), [ast.alias(name="x", asname=None)]) self.session_maker._import_stats = ImportStats({node}, set()) with sysu.std_redirect(sysu.STD.OUT): with sysu.std_redirect(sysu.STD.ERR): fixed_code = self.session_maker._refactor(original_lines) assert fixed_code == "".join(expec_fixed_lines)
def _update_Import(self, node, stmt_list, idx): if not any(x for x in node.names if x.name == self._from_mod): return new_names = [] for i, alias in enumerate(node.names[:]): if alias.name == self._from_mod: new_names.append(alias) del node.names[i] if not node.names: del stmt_list[idx] if self._to_mod and self._to_id: for alias in new_names: new_node = ast.ImportFrom(module=self._to_mod, level=0, names=[ alias, ]) stmt_list.insert(idx, ast.copy_location(new_node, node)) elif self._to_mod: for alias in new_names: new_node = ast.Import(names=[ ast.alias(self._to_mod, alias.asname), ]) stmt_list.insert(idx, ast.copy_location(new_node, node))
def get_typing_imports(self, source): nodes = [] imported_nodes = defaultdict(list) for key, value in self._import_set[source]: imported_nodes[key].extend(list(value)) for module, objects in self._typing_imports[source].items(): objects = sorted(objects) node = ast.ImportFrom( module=module, names=[ ast.alias(name=obj, asname=None) for obj in objects if obj not in imported_nodes[f"commercetools.types.{module}"] ], level=1, ) if node.names: nodes.append(node) if not nodes: return [] nodes = sorted(nodes, key=operator.attrgetter("module")) return [ ast.If( test=ast.Attribute(value=ast.Name(id="typing"), attr="TYPE_CHECKING"), body=nodes, orelse=[], ) ]
def __init__ (self, script=None, file=None, tree=None, globals=None, locals=None, **kwargs): if script is None and file is not None: # it's a pity that compile() does not accept a file as input # so we could avoid reading the whole file script= open (file).read () else: file= 'arg_to_main' self.environ= Environment (globals, locals, **kwargs) if tree is None: tree= ast.parse (script) # ImportFrom(module='bar', names=[alias(name='baz', asname=None)], level=0) node= ImportFrom (module='ayrton', names=[alias (name='CommandWrapper', asname=None)], level=0) node.lineno= 0 node.col_offset= 0 ast.fix_missing_locations (node) tree.body.insert (0, node) tree= CrazyASTTransformer(self.environ).visit (tree) self.options= {} self.source= compile (tree, file, 'exec')
def visit_Module(self, node): self.need_import = False self.generic_visit(node) if self.need_import: importIt = ast.Import(names=[ast.alias(name='numpy', asname=None)]) node.body.insert(0, importIt) return node
def visit_ImportFrom(self, node): for alias in node.names: if alias.name == '*': node.names.pop() node.names.extend( ast.alias(fname, None) for fname in MODULES[node.module]) return node
def find_macros(tree, *, filename, reload=False): '''Establish macro bindings from `tree`. Top-level entrypoint. Collect bindings from each macro-import statement (`from ... import macros, ...`) at the top level of `tree.body`. Transform each macro-import into `import ...`, where `...` is the absolute module name the macros are being imported from. As a side effect, import the macro definition modules. Primarily meant to be called with `tree` the AST of a module that uses macros, but works with any `tree` that has a `body` attribute. `filename`: str, full path to the `.py` being macroexpanded, for resolving relative macro-imports and for error reporting. In interactive use, can be an arbitrary label. `reload`: enable only if implementing a REPL. Will refresh modules, causing different uses of the same macros to point to different function objects. Return value is a dict `{macroname: function, ...}` with all collected bindings. ''' bindings = {} for index, statement in enumerate(tree.body): if ismacroimport(statement): module_absname, more_bindings = get_macros(statement, filename=filename, reload=reload) bindings.update(more_bindings) # Remove all names to prevent macros being used as regular run-time objects. # Always use an absolute import, for the unhygienic expose API guarantee. tree.body[index] = copy_location( Import(names=[alias(name=module_absname, asname=None)]), statement) return bindings
def enum_to_class(enum: Enum) -> ResolvedClassResult: """Convert Enum into AST class definition.""" enum_import = ast.Import(names=[ast.alias(name='enum', asname=None)]) class_body = [] if enum.doc: class_body.append(docstring_declaration(enum.doc)) members = [ ast.Expr(value=ast.Assign(targets=[render_enum_name(symbol)], value=ast.Str(s=symbol))) for symbol in enum.symbols ] class_body.extend(sorted(members, key=lambda e: e.value.value.s)) enum_class = ast.ClassDef( name=enum.name, bases=[ast.Attribute(value=ast.Name(id='enum'), attr='Enum')], keywords=[], body=class_body, decorator_list=[ ast.Attribute(value=ast.Name(id='enum'), attr='unique') ] # just for signalling purposes ) return ResolvedClassResult( resolved_class=enum_class, imports=[enum_import], new_frontier=[], )
def init_globals(opts, input_file): def make_stream(f): return Stream(imap(lambda x: Line(x.rstrip('\n\r')), iter(f))) pp = make_stream(input_file) globs = builtins.copy() globs['pp'] = pp for path in opts.import_paths: path = os.path.abspath(path) if path not in sys.path: sys.path.insert(0, path) for import_mod in opts.imports: import_node = ast.Import( names=[ast.alias(name=import_mod, asname=None)]) code = compile( ast.fix_missing_locations(ast.Module(body=[import_node])), 'import %s' % (import_mod, ), 'exec') eval(code, globs) for eval_str in opts.evals: try: _exec(eval_str, globs) except SyntaxError as e: raise Exit("got error: %s\nwhile evaluating: %s" % (e, eval_str)) files = [make_stream(open(f)) for f in opts.files] globs['files'] = files if len(files) > 0: # convenience for single file operation globs['ff'] = files[0] return globs
def test_fqdn_importfrom(self): # 'from os import path' simple = ast.alias(name="path", asname=None) simple_name = ast.ImportFrom(module="os", names=[simple]) assert pi.ImportedName("path", simple_name, simple).canonical_name == "os.path" module = ast.alias(name="four", asname=None) module_name = ast.ImportFrom(module="one.two.three", names=[module]) assert pi.ImportedName("four", module_name, module).canonical_name == "one.two.three.four" alias = ast.alias(name="fourth_module", asname="four") alias_name = ast.ImportFrom(module="one.two.three", names=[alias]) assert ( pi.ImportedName("four", alias_name, alias).canonical_name == "one.two.three.fourth_module" )
def visit_Import(self, node): for i in node.names: if i.asname: aliases[i.asname] = i.name return ast.Import(names=[ ast.alias(name=i.name, asname=(rename(i.name))) for i in node.names ])
def insert_npu_import(r_node): """Add NPU import modules""" npu_alias = ast.alias(name='*', asname=None) npu_import = ast.ImportFrom(module='npu_bridge.npu_init', names=[npu_alias], level=0) num = 5 if len(r_node.body) >= 5 else len(r_node.body) import_index = 0 is_insert = False for i in range(0, num): if isinstance(r_node.body[i], ast.Import): r_node.body.insert(i, npu_import) log_msg(i, "from npu_bridge.npu_init import *") is_insert = True break if isinstance(r_node.body[i], ast.ImportFrom): if r_node.body[i].module != "__future__": r_node.body.insert(i, npu_import) log_msg(i, "from npu_bridge.npu_init import *") is_insert = True break import_index = i + 1 if not is_insert: r_node.body.insert(import_index, npu_import) log_msg(import_index, "from npu_bridge.npu_init import *")
def visit_For(self, node): for loop in self._loops: print(loop.tagged_node) if node is loop.tagged_node: # generate call to generated function self._functions.append(generate_parallel_function(loop)) slices = generate_slices(loop, cpus) arr_args = [] for slc in slices: arr_args.append([append_slice(name.id, slc) for name in loop.lists]) stmts = [] parsed_import = ast.Import(names=[ast.alias(name='multiprocessing', asname=None)]) parsed_pool = ast.Assign(targets=[ast.Name(id='pool', ctx=ast.Store())], value=ast.Call(func=ast.Attribute(value=ast.Name(id='multiprocessing', ctx=ast.Load()), attr='Pool', ctx=ast.Load()), args=[ast.Num(n=cpus)], keywords=[], starargs=None, kwargs=None)) # stmts.append(parsed_import) stmts.append(parsed_pool) resnames = [] for i in range(cpus): # generate call to apply_async resname = "%s%i" % (ForTransformer.RETURN_STUB, i) resnames.append(resname) fn_call = generate_function_call(id(loop), arr_args[i], i) stmts.append(ast.Assign(targets=[ast.Name(id=resname,ctx=ast.Store())], value=fn_call)) for i, arr in enumerate(loop.lists): print("lists %i" % i) stmts.append(ast.parse(generate_template(resnames, i, arr)).body[0]) return stmts else: print("HELLOOO!!!") self.generic_visit(node)
def augment_ast(root): mode = os.environ.get("PGZERO_MODE", "False") assert mode != "False" warning_prelude = "WARNING: Pygame Zero mode is turned on (Run → Pygame Zero mode)" try: import pgzero # @UnusedImport except ImportError: if mode == "True": print( warning_prelude + ",\nbut pgzero module is not found. Running program in regular mode.\n", file=sys.stderr, ) else: assert mode == "auto" return # Check if draw is defined for stmt in root.body: if isinstance(stmt, ast.FunctionDef) and stmt.name == "draw": break else: if mode == "auto": return else: print( warning_prelude + ",\nbut your program doesn't look like usual Pygame Zero program\n" + "(draw function is missing).\n", file=sys.stderr, ) # need more checks in auto mode if mode == "auto": # check that draw method is not called in the code for node in ast.walk(root): if (isinstance(node, ast.Call) and isinstance(node.func, ast.Name) and node.func.id == "draw"): return # prepend "import pgzrun as __pgzrun" imp = ast.Import([ast.alias("pgzrun", "__pgzrun")]) imp.lineno = 0 imp.col_offset = 0 ast.fix_missing_locations(imp) imp.tags = {"ignore"} root.body.insert(0, imp) # append "__pgzrun.go()" go = ast.Expr( ast.Call( ast.Attribute(ast.Name("__pgzrun", ast.Load()), "go", ast.Load()), [], [])) go.lineno = 1000000 go.col_offset = 0 ast.fix_missing_locations(go) go.tags = {"ignore"} root.body.append(go)
def __to_module(self): module_components = [ ast.ImportFrom(module="ChromeController.transport", names=[ast.alias('ChromeExecutionManager', None)], level=0), ast.ImportFrom(module="ChromeController.manager_base", names=[ast.alias('ChromeInterface', None)], level=0), self.interface_class, ] if sys.version_info >= (3, 8): mod = ast.Module(module_components, [], lineno=self.__get_line(), col_offset=1) else: mod = ast.Module(module_components, lineno=self.__get_line(), col_offset=1) mod = ast.fix_missing_locations(mod) return mod
def visit_Import(self, node): if node.names[0].name == 'PTO_ENV': node = ast.ImportFrom( module='PTO', names=[ast.alias(name='random_function', asname=None)], level=0) return node
def visit_Lambda(self, node): if modules['functools'] not in self.global_declarations.values(): import_ = ast.Import([ast.alias('functools', None)]) self.imports.append(import_) self.global_declarations['functools'] = modules['functools'] self.generic_visit(node) forged_name = "{0}_lambda{1}".format(self.prefix, len(self.lambda_functions)) ii = self.passmanager.gather(ImportedIds, node, self.ctx) ii.difference_update(self.lambda_functions) # remove current lambdas binded_args = [ast.Name(iin, ast.Load()) for iin in sorted(ii)] former_nbargs = len(node.args.args) node.args.args = ([ast.Name(iin, ast.Param()) for iin in sorted(ii)] + node.args.args) forged_fdef = ast.FunctionDef(forged_name, copy(node.args), [ast.Return(node.body)], []) self.lambda_functions.append(forged_fdef) proxy_call = ast.Name(forged_name, ast.Load()) if binded_args: return ast.Call( ast.Attribute(ast.Name('functools', ast.Load()), "partial", ast.Load()), [proxy_call] + binded_args, [], None, None) else: return proxy_call
def test_get_at_root(self) -> None: """Tests that `get_at_root` successfully gets the imports""" with open( path.join( path.dirname(__file__), "mocks", "eval{extsep}py".format(extsep=extsep) ) ) as f: imports = get_at_root(ast.parse(f.read()), (Import, ImportFrom)) self.assertIsInstance(imports, list) self.assertEqual(len(imports), 1) self.assertTrue( cmp_ast( imports[0], ast.Import( names=[ ast.alias( asname=None, name="cdd.tests.mocks", identifier=None, identifier_name=None, ) ], alias=None, ), ) )
def importFn(this_name, node): u""" :param this_name: str :param node: ast.Import :return: """ this_package = u".".join(this_name.split(u".")[:-1]) for ID in range(len(node.names)): alias = node.names[ID] alias_list = list() node.names[ID] = alias_list name = alias.name asname = alias.asname re_ed_name = u"%s.%s" % (this_package, name) if not (re_ed_name in module_dict or re_ed_name in package_dict): if name in module_dict or name in package_dict: con_ed_name = u"%s.%s" % (group_name, name) alias.name = con_ed_name if asname is None: head = ast.alias() head.asname = name.split(u".")[0] head.name = u"%s.%s" % (group_name, head.asname) alias.asname = u"_" alias_list.append(head) alias_list.append(alias) node.names = [t for i in node.names for t in i]
def _compile_import(self, args): name = args[0] asname = str(args[1]) if len(args) > 1 else None module = [ast.alias(str(name), asname)] return PilsResult() + ast.Import(module)
def visit_Module(self, node): node.body = [k for k in (self.visit(n) for n in node.body) if k] imports = [ast.Import([ast.alias(i, namespace + "::" + i)]) for i in self.imports] node.body = imports + node.body ast.fix_missing_locations(node) return node
def make_importfrom_alias(queue, body, context, name): """ Make an ast.alias node for the names list of an ast.ImportFrom. Parameters ---------- queue : deque Instruction Queue body : list Current body. context : DecompilationContext name : str Expected name of the IMPORT_FROM node to be popped. Returns ------- alias : ast.alias Side Effects ------------ Consumes IMPORT_FROM and STORE_NAME instructions from queue. """ import_from, store = queue.popleft(), queue.popleft() expect(import_from, instrs.IMPORT_FROM, "after IMPORT_NAME") if not import_from.arg == name: raise DecompilationError( "IMPORT_FROM name mismatch. Expected %r, but got %s." % ( name, import_from, ) ) return ast.alias( name=name, asname=store.arg if store.arg != name else None, )
def consolidate_imports( imports: List[Union[ast.Import, ast.ImportFrom]]) -> List[ast.Expr]: """Deduplicate and combine imports, leaving smallest possible set.""" deduped_imports = [ node for repr_, node in sorted({ast.dump(node): node for node in imports}.items()) ] def group_key(node): module = getattr(node, 'module', None) level = getattr(node, 'level', None) return module, level consolidated_imports = [] for (module, level), group in itertools.groupby(deduped_imports, key=group_key): if module is None: # ast.Import consolidated_imports.extend(group) continue # ast.ImportFrom all_names = sorted(name.name for node in group for name in node.names) combined_import = ast.ImportFrom( module=module, names=[ast.alias(name=name, asname=None) for name in all_names], level=level if level is not None else 0) consolidated_imports.append(combined_import) return [ ast.Expr(value=import_node) for import_node in consolidated_imports ]
def generate_init_module(self, modules): nodes = [ ast.ImportFrom(module=module, names=[ast.alias(name="* # noqa", asname=None)], level=1) for module in sorted(modules) ] return ast.Module(body=nodes)
def visit_ImportFrom(self, node): for alias in node.names: if alias.name == '*': node.names.pop() node.names.extend(ast.alias(fname, None) for fname in modules[node.module]) return node
def init_globals(opts, input_file): def make_stream(f): return Stream(imap(lambda x: Line(x.rstrip('\n\r')), iter(f))) pp = make_stream(input_file) globs = builtins.copy() globs['pp'] = pp for path in opts.import_paths: path = os.path.abspath(path) if path not in sys.path: sys.path.insert(0, path) for import_mod in opts.imports: import_node = ast.Import(names=[ast.alias(name=import_mod, asname=None)]) code = compile(ast.fix_missing_locations(ast.Module(body=[import_node])), 'import %s' % (import_mod,), 'exec') eval(code, globs) for eval_str in opts.evals: try: exec eval_str in globs except SyntaxError as e: raise Exit("got error: %s\nwhile evaluating: %s" % (e,eval_str)) files = [make_stream(open(f)) for f in opts.files] globs['files'] = files if len(files) > 0: # convenience for single file operation globs['ff'] = files[0] return globs
def visit_Module(self, node): """ Visit the whole module and add all import at the top level. >> import math Becomes >> import math as pythonic::math And >> import numpy.linalg Becomes >> import numpy as pythonic::numpy """ node.body = [k for k in (self.visit(n) for n in node.body) if k] imports = [ast.Import([ast.alias(i, namespace + "::" + i)]) for i in self.imports] node.body = imports + node.body ast.fix_missing_locations(node) return node
def test_add_existing_import(self): tree = ast.Module(body=[ ast.ImportFrom(level=0, module='a.b', names=[ast.alias(name='c', asname=None)]) ]) self.assertIsNone(import_utils.add_import(tree, 'a.b.c')) self.assertEqual('from a.b import c\n', pasta.dump(tree))