def wrap_with_imports(self, program): theano_import = ast.Import( names=[ast.alias(name='theano', asname=None)]) theano_tensor_import = ast.Import( names=[ast.alias(name='theano.tensor', asname='tt')]) numpy_import = ast.Import(names=[ast.alias(name='numpy', asname='np')]) typing_import = ast.ImportFrom(level=0, module='typing', names=[ ast.alias(name='List', asname=None), (ast.alias(name='Tuple', asname=None)) ]) functools_import = ast.ImportFrom( level=0, module='functools', names=[ast.alias(name='reduce', asname=None)]) body_with_needed_imports = [ theano_import, theano_tensor_import, numpy_import, functools_import, typing_import ] + program.body program.body = body_with_needed_imports return program
def visit_Module(self, node): """ Add the imports needed to run symbolically """ node = self.generic_visit(node) if self.se_dict: import_se_dict = ast.ImportFrom( module="sym_exec_lib.se_dict", names=[ast.alias(name="SeDict", asname=None)], level=0) import_instrumentation = ast.ImportFrom( module="symbolic.instrumentation", names=[ast.alias(name="whichBranch", asname=None)], level=0) import_extract = ast.ImportFrom( module="symbolic.symbolic_types", names=[ast.alias(name="getConcrete", asname=None)], level=0) ord_function = ast.parse(ord_str).body if self.se_dict: node.body = [ import_se_dict, import_instrumentation, import_extract ] + ord_function + node.body else: node.body = [import_instrumentation, import_extract ] + ord_function + node.body return node
def __init__(self): scope = {} import_asts = [] # add in required functions into scope for call availability for func_name in dir(function): fn = getattr(function, func_name) if callable(fn): scope[func_name] = fn import_ast = ast.ImportFrom( 'vectordatasource.meta.function', [ast.alias(func_name, None)], 0, ) import_asts.append(import_ast) scope['util'] = util utils_import_ast = ast.ImportFrom( 'vectordatasource', [ast.alias('util', None)], 0, ) import_asts.append(utils_import_ast) self.import_asts = import_asts self.scope = scope
def _generate_trait_file(self): """Generate the traits.py file which contains marshmallow schema's for the various traits. For example ExpandableSchema, QuerySchema etc. Only used to serialize the query parameters for now. It uses the `self._trait_nodes` to write the body. """ nodes = [ ast.Import(names=[ast.alias(name="marshmallow", asname=None)], level=0), ast.ImportFrom( module="marshmallow", names=[ast.alias(name="fields", asname=None)], level=0, ), ast.ImportFrom( module="commercetools.helpers", names=[ast.alias(name="RemoveEmptyValuesMixin", asname=None)], level=0, ), ast.ImportFrom( module="commercetools.helpers", names=[ast.alias(name="OptionalList", asname=None)], level=0, ), ] nodes.extend(self._trait_nodes) return ast.Module(body=nodes)
def visit_Module(self, node): html_imp = ast.ImportFrom(module='quixote.html', names=[ast.alias(name='TemplateIO', asname='_q_TemplateIO'), ast.alias(name='htmltext', asname='_q_htmltext'), ast.alias(name='_q_join', asname='_q_join'), ast.alias(name='_q_format', asname='_q_format'), ], level=0) ast.fix_missing_locations(html_imp) vars_imp = ast.ImportFrom(module='builtins', names=[ast.alias(name='vars', asname='_q_vars')], level=0) ast.fix_missing_locations(vars_imp) ptl_imports = [vars_imp, html_imp] # skip __future__ statements idx = 0 for i, stmt in enumerate(node.body): if isinstance(stmt, ast.ImportFrom) and stmt.module == '__future__': idx = i + 1 node.body[idx:idx] = ptl_imports return self.generic_visit(node)
def generate_code_file(mod_body, file, imports, external_functions_source=False, names="#"): for (module, name) in imports.as_imports: mod_body.insert( 0, ast.Import(names=[ast.alias(name=module, asname=name)], level=0)) for (module, name) in imports.from_imports: mod_body.insert( 0, ast.ImportFrom(module=module, names=[ast.alias(name=name, asname=None)], level=0)) if external_functions_source: mod_body.insert( 0, ast.ImportFrom(module=external_functions_source, names=[ast.alias(name='*', asname=None)], level=0)) mod = wrap_module(mod_body) print('Generating Source') source = names + ast.unparse(mod) return source
def _combine_import_from(self, node_list): prev_import = None alias = [] def combine(statement): if not isinstance(statement, ast.ImportFrom): return False if len(statement.names) == 1 and statement.names[0].name == '*': return False if prev_import is None: return True if statement.module == prev_import.module and statement.level == prev_import.level: return True return False for statement in node_list: if combine(statement): prev_import = statement alias += statement.names else: if alias: yield ast.ImportFrom(module=prev_import.module, names=alias, level=prev_import.level) alias = [] yield statement if alias: yield ast.ImportFrom(module=prev_import.module, names=alias, level=prev_import.level)
def emit_module(st: ast.Module, imports=True, file=sys.stdout): """ Emits a regular Python AST to source text, while adding imports to ensure that it can execute standalone """ # Any 'from __future__ import ...' command has to be the first # line(s) of any module, so we have to insert our imports after # that. body = st.body[:] if imports: ins = 0 while len(st.body) > ins and \ ((isinstance(st.body[ins], ast.ImportFrom) and \ st.body[ins].module == '__future__') or \ (isinstance(st.body[ins], ast.Expr) and \ isinstance(st.body[ins].value, ast.Str))): ins += 1 if not flags.optimized(): body.insert( ins, ast.ImportFrom(level=0, module='retic.transient', names=[ast.alias(name='*', asname=None)])) else: body.insert( ins, ast.ImportFrom(level=0, module='retic.opt_transient', names=[ast.alias(name='*', asname=None)])) print(codegen.to_source(ast.Module(body=body)), file=file)
def __to_module(self): module_components = [ ast.ImportFrom(module="ChromeController.transport", names=[ast.alias('ChromeSocketManager', None)], level=0), ast.ImportFrom(module="ChromeController.manager_base", names=[ast.alias('ChromeInterface', None)], level=0), self.interface_class, ] mod = ast.Module(module_components, lineno=self.__get_line(), col_offset=1) mod = ast.fix_missing_locations(mod) return mod
def test_combining_ImportFrom(self): # Combine ImportFrom when the 'from' clause matches. imp1 = ast.ImportFrom('X', [ast.alias('Y', None)], 1) imp2 = ast.ImportFrom('X', [ast.alias('Z', None)], 1) module = ast.Module([imp1, imp2]) new_ast = self.transform.visit(module) self.assertEqual(len(module.body), 1) imp = new_ast.body[0] self.assertEqual(len(imp.names), 2) for alias, (name, asname) in zip(imp.names, (('Y', None), ('Z', None))): self.assertEqual(alias.name, name) self.assertEqual(alias.asname, asname)
def test_interleaved_ImportFrom(self): # Test prevention of statement merging. import_from1 = ast.ImportFrom('X', [ast.alias('Y', None)], 1) imp = ast.Import([ast.alias('X', None)]) # Separate by Import import_from2 = ast.ImportFrom('X', [ast.alias('Z', None)], 1) # Different level import_from3 = ast.ImportFrom('X', [ast.alias('W', None)], 2) # Different 'from' clause import_from4 = ast.ImportFrom('Z', [ast.alias('Y', None)], 2) module = ast.Module([import_from1, imp, import_from2, import_from3, import_from4]) new_ast = self.transform.visit(module) self.assertEqual(len(module.body), 5)
def test_ImportFrom(self): # from X import Y from_X = ast.ImportFrom('X', [ast.alias('Y', None)], 0) self.verify(from_X, 'from X import Y') # from . import Y from_dot = ast.ImportFrom(None, [ast.alias('Y', None)], 1) self.verify(from_dot, 'from . import Y') # from .X import Y from_dot_X = ast.ImportFrom('X', [ast.alias('Y', None)], 1) self.verify(from_dot_X, 'from .X import Y') # from X import Y, Z from_X_multi = ast.ImportFrom( 'X', [ast.alias('Y', None), ast.alias('Z', None)], 0) self.verify(from_X_multi, 'from X import Y,Z')
def generate_imports(self, name, globl, call_obj, file_imports): """ Generate an import statement for a (name, runtime object) pair. """ if name in file_imports: return file_imports[name] # If we're importing a module, then add an import directly if inspect.ismodule(globl): # Add an alias if the imported name is different from the module name alias = name if globl.__name__ != name else None return ast.Import([ast.alias(name=globl.__name__, asname=alias)]) else: # Get module where global is defined mod = inspect.getmodule(globl) # TODO: When is mod None? if mod is None or mod is typing: try: mod_value = obj_to_ast(globl) return ast.Assign(targets=[make_name(name)], value=mod_value) except ObjConversionException: return ast.ImportFrom( module=inspect.getmodule(call_obj).__name__, names=[ast.alias(name=name, asname=None)], level=0) # Can't import builtins elif mod == __builtins__: return None # If the value is a class or function, then import it from the defining # module elif inspect.isclass(globl) or inspect.isfunction(globl): return ast.ImportFrom( module=mod.__name__, names=[ast.alias(name=name, asname=None)], level=0) # Otherwise import it from the module using the global elif call_obj is not None: return ast.ImportFrom( module=inspect.getmodule(call_obj).__name__, names=[ast.alias(name=name, asname=None)], level=0)
def generate_import_from(max_depth=None): module = _generate_module_name() level = random.randrange(3) num_names = random.randrange(1, 4) names = [_generate_alias() for _ in range(num_names)] return ast.ImportFrom(module, names, level)
def get_typing_imports(self, source): nodes = [] imported_nodes = defaultdict(list) for key, value in self._import_set[source]: imported_nodes[key].extend(list(value)) for module, objects in self._typing_imports[source].items(): objects = sorted(objects) node = ast.ImportFrom( module=module, names=[ ast.alias(name=obj, asname=None) for obj in objects if obj not in imported_nodes[f"commercetools.types.{module}"] ], level=1, ) if node.names: nodes.append(node) if not nodes: return [] nodes = sorted(nodes, key=operator.attrgetter("module")) return [ ast.If( test=ast.Attribute(value=ast.Name(id="typing"), attr="TYPE_CHECKING"), body=nodes, orelse=[], ) ]
def _extract_defs_ast(tree: ast.Module) -> DefsType: result: DefsType = dict() for node in tree.body: if isinstance(node, ast.Import): for name_node in node.names: name = name_node.asname or name_node.name result[name] = ast.Import( names=[name_node], lineno=1, col_offset=1, ctx=ast.Load(), ) continue if isinstance(node, ast.ImportFrom): if not node.module or node.level: continue for name_node in node.names: name = name_node.asname or name_node.name result[name] = ast.ImportFrom( module=node.module, names=[name_node], lineno=1, col_offset=1, ctx=ast.Load(), ) continue if isinstance(node, ast.Assign): for target in node.targets: if not isinstance(target, ast.Name): continue result[target.id] = node return result
def _extract_defs_astroid(tree: astroid.Module) -> DefsType: result: DefsType = dict() for node in tree.body: if isinstance(node, astroid.Import): for name, alias in node.names: result[alias or name] = ast.Import( names=[ast.alias(name=name, asname=alias)], lineno=1, col_offset=1, ctx=ast.Load(), ) continue if isinstance(node, astroid.ImportFrom): if not node.modname or node.level: continue for name, alias in node.names: result[alias or name] = ast.ImportFrom( module=node.modname, names=[ast.alias(name=name, asname=alias)], lineno=1, col_offset=1, ctx=ast.Load(), ) continue if isinstance(node, astroid.Assign): expr = ast.parse(node.as_string()).body[0] for target in node.targets: if not isinstance(target, astroid.AssignName): continue result[target.name] = expr return result
def test_add_imported_object_from_module_asname(self): import_stm = ast.ImportFrom(module="foo", names=[ast.Name(name="bar", asname="baz")]) self.module.add_imported_symbol(import_stm) self.assertEqual(self.module.imported_symbols["baz"].module_path, "foo") self.assertEqual(self.module.imported_symbols["baz"].symbol, "bar")
def _update_Import(self, node, stmt_list, idx): if not any(x for x in node.names if x.name == self._from_mod): return new_names = [] for i, alias in enumerate(node.names[:]): if alias.name == self._from_mod: new_names.append(alias) del node.names[i] if not node.names: del stmt_list[idx] if self._to_mod and self._to_id: for alias in new_names: new_node = ast.ImportFrom(module=self._to_mod, level=0, names=[ alias, ]) stmt_list.insert(idx, ast.copy_location(new_node, node)) elif self._to_mod: for alias in new_names: new_node = ast.Import(names=[ ast.alias(self._to_mod, alias.asname), ]) stmt_list.insert(idx, ast.copy_location(new_node, node))
def visit_Import(self, node): if node.names[0].name == 'PTO_ENV': node = ast.ImportFrom( module='PTO', names=[ast.alias(name='random_function', asname=None)], level=0) return node
def add_required_imports(module_name, module_ast, module_context): imports = module_context.get_imports() if has_type_var(module_ast): imports.add('TypeVar') module_to_names = {} for imp in imports: if imp not in ImportHandler.class_to_module: continue mod = ImportHandler.class_to_module[imp] if mod in module_to_names: module_to_names[mod].append(imp) else: module_to_names[mod] = [imp] for (mod, level), names in module_to_names.items(): if mod == module_name: continue aliases = [ast.alias(name=name, asname=None) for name in names] module_ast.body.insert(0, ast.ImportFrom( module=mod, names=aliases, level=level ))
def insert_npu_import(r_node): """Add NPU import modules""" npu_alias = ast.alias(name='*', asname=None) npu_import = ast.ImportFrom(module='npu_bridge.npu_init', names=[npu_alias], level=0) num = 5 if len(r_node.body) >= 5 else len(r_node.body) import_index = 0 is_insert = False for i in range(0, num): if isinstance(r_node.body[i], ast.Import): r_node.body.insert(i, npu_import) log_msg(i, "from npu_bridge.npu_init import *") is_insert = True break if isinstance(r_node.body[i], ast.ImportFrom): if r_node.body[i].module != "__future__": r_node.body.insert(i, npu_import) log_msg(i, "from npu_bridge.npu_init import *") is_insert = True break import_index = i + 1 if not is_insert: r_node.body.insert(import_index, npu_import) log_msg(import_index, "from npu_bridge.npu_init import *")
def import_(self, *aliases, **kwargs): names = list(map(to_alias, aliases)) names += [ Alias(name=to_alias(a).name, asname=asname) for asname, a in kwargs.items() ] return ast.ImportFrom(module=self.name, names=names, level=0)
def new_importfrom(module, names, level=0, parent=None): names = [ast.alias(name=name, asname=asname) for name, asname in names] node = ast.ImportFrom(module=module, names=names, level=level) node.__preinit__(parent) node.__postinit__() return node
def stubs_for_pydantic(models: Collection[Type[pd.BaseModel]], clsname: str = None) -> ast.Module: """ Generate stubs for Pydantic models Example: ast.unparse(stubs_for_models([db.User])) """ model_infos = [ModelInfo.from_pydantic_model(model) for model in models] ast_models = [model_info.to_ast() for model_info in model_infos] ast_imports = merge_imports(model_infos).to_ast() if clsname: ast_models = [ ast.ClassDef(clsname, bases=[], decorator_list=[], keywords=[], body=ast_models) ] return ast.Module([ ast.ImportFrom('__future__', [ast.alias('annotations')], level=0), ast.Import([ast.alias('pydantic')]), ast_imports, ast.parse('NoneType = type(None)'), *ast_models, ], type_ignores=[])
def generate_init_module(self, modules): nodes = [ ast.ImportFrom(module=module, names=[ast.alias(name="* # noqa", asname=None)], level=1) for module in sorted(modules) ] return ast.Module(body=nodes)
def future_top(f_dict): """ 过滤所有文件中的__future__将其置顶""" for path, code in f_dict.items(): if not build_file_check.match(path) is None: futures = [] nodes = ast.parse(code) def filter_fn(i): """ :type i: ast.AST """ if isinstance(i, ast.ImportFrom): if i.module == "__future__": for f in i.names: futures.append(f) return False return True filter_node(nodes, key=filter_fn) if len(futures) > 0: from_imp_futures = ast.ImportFrom() from_imp_futures.module = "__future__" from_imp_futures.level = 0 from_imp_futures.names = futures nodes.body.insert(0, from_imp_futures) code = astunparse.unparse(nodes) f_dict[path] = code return f_dict
def visit_Module(self, node): pre_nodes = list(itertools.takewhile( lambda node: (self._is_docstring(node) or self._is_future_import(node)), node.body)) rest_nodes = [self.visit(n) for n in node.body[len(pre_nodes):]] importnode = ast.ImportFrom( module='hesitate.driver', names=[ ast.alias( name='should_assert', asname=self.ASSERTION_TEST_IMPORTED_NAME), ast.alias( name='timed', asname=self.ASSERTION_TIMER_IMPORTED_NAME)], lineno=1, col_offset=0, level=0) if pre_nodes: importnode = ast.copy_location(importnode, pre_nodes[0]) new_mod = ast.Module( body=pre_nodes + [importnode] + rest_nodes, lineno=1, col_offset=0) return new_mod
def consolidate_imports( imports: List[Union[ast.Import, ast.ImportFrom]]) -> List[ast.Expr]: """Deduplicate and combine imports, leaving smallest possible set.""" deduped_imports = [ node for repr_, node in sorted({ast.dump(node): node for node in imports}.items()) ] def group_key(node): module = getattr(node, 'module', None) level = getattr(node, 'level', None) return module, level consolidated_imports = [] for (module, level), group in itertools.groupby(deduped_imports, key=group_key): if module is None: # ast.Import consolidated_imports.extend(group) continue # ast.ImportFrom all_names = sorted(name.name for node in group for name in node.names) combined_import = ast.ImportFrom( module=module, names=[ast.alias(name=name, asname=None) for name in all_names], level=level if level is not None else 0) consolidated_imports.append(combined_import) return [ ast.Expr(value=import_node) for import_node in consolidated_imports ]
def test_fqdn_importfrom(self): # 'from os import path' simple = ast.alias(name="path", asname=None) simple_name = ast.ImportFrom(module="os", names=[simple]) assert pi.ImportedName("path", simple_name, simple).canonical_name == "os.path" module = ast.alias(name="four", asname=None) module_name = ast.ImportFrom(module="one.two.three", names=[module]) assert pi.ImportedName("four", module_name, module).canonical_name == "one.two.three.four" alias = ast.alias(name="fourth_module", asname="four") alias_name = ast.ImportFrom(module="one.two.three", names=[alias]) assert ( pi.ImportedName("four", alias_name, alias).canonical_name == "one.two.three.fourth_module" )