def visit_Module(self, node): logging.info("Adding " + str(len(self.globalvars)) + " global variables, " + str(len(self.methods)) + " methods") node_body = node.body node.body = [] for n in node_body: if type(n) is ast.ImportFrom or type(n) is ast.Import: node.body.append(n) node_body.remove(n) node.body.append( Import(names=[alias(name='sys', asname=None)]) ) node.body.append(Expr(value=Call(func=Attribute( value=Attribute(value=Name(id='sys', ctx=Load()), attr='path', ctx=Load()), attr='append', ctx=Load()), args=[Constant(value=path_to_tracer, kind=None)], keywords=[])) ) node.body.append(Import(names=[alias(name='Tracer', asname=None)])) for globalvar in self.globalvars: node.body.append(globalvar) for method in self.methods: node.body.append(method) node.body.extend(node_body) return node
def find_macros(tree, *, filename, reload=False): '''Establish macro bindings from `tree`. Top-level entrypoint. Collect bindings from each macro-import statement (`from ... import macros, ...`) at the top level of `tree.body`. Transform each macro-import into `import ...`, where `...` is the absolute module name the macros are being imported from. As a side effect, import the macro definition modules. Primarily meant to be called with `tree` the AST of a module that uses macros, but works with any `tree` that has a `body` attribute. `filename`: str, full path to the `.py` being macroexpanded, for resolving relative macro-imports and for error reporting. In interactive use, can be an arbitrary label. `reload`: enable only if implementing a REPL. Will refresh modules, causing different uses of the same macros to point to different function objects. Return value is a dict `{macroname: function, ...}` with all collected bindings. ''' bindings = {} for index, statement in enumerate(tree.body): if ismacroimport(statement): module_absname, more_bindings = get_macros(statement, filename=filename, reload=reload) bindings.update(more_bindings) # Remove all names to prevent macros being used as regular run-time objects. # Always use an absolute import, for the unhygienic expose API guarantee. tree.body[index] = copy_location( Import(names=[alias(name=module_absname, asname=None)]), statement) return bindings
def get_replacing_node_body(self) -> 'list': """ Returns replacing node body, which is [] or [ImportNode], where ImportNode = 'import types', used for further imports transformation and shall be imported once. """ replacing_node_body = [] if not self.types_imported: replacing_node_body.append( Import(names=[alias(name='types', asname=None)])) self.types_imported = True return replacing_node_body
def letimport(self, tree): from ast import Import, alias, expr, Expr *imps, cont = tree[0:-1], tree[-1] imps = imps[0] imp = Import(names=imps) self.statements.append(cont) self.statements.append(imp) return imp
def _exec_module(self, module): # """Execute the module.""" tree = parse(inspect.getsource(module)) #################################################################### # special treatment for statements like "from __future__ import ..." tree.body = tree.body[next(( i for i, x in enumerate(tree.body) if isinstance(x, ImportFrom) and x.module == '__future__'), 0):] i = 0 while i < len(tree.body) and isinstance( tree.body[i], ImportFrom) and tree.body[i].module == '__future__': i += 1 #################################################################### tree.body.insert( i, Import(names=[alias(name='libct.concolic.bool', asname=None)])) tree.body.insert( i, Import(names=[alias(name='libct.concolic.float', asname=None)])) tree.body.insert( i, Import(names=[alias(name='libct.concolic.int', asname=None)])) tree.body.insert( i, Import(names=[alias(name='libct.concolic.str', asname=None)])) tree.body.insert( i, Import(names=[alias(name='libct.concolic.range', asname=None)])) tree.body.insert(i, Import(names=[alias(name='libct.utils', asname=None)])) tree = ConcolicWrapperCall().visit(tree) tree = ConcolicWrapperConstant().visit(tree) # tree = ConcolicWrapperCompare().visit(tree) tree = ConcolicWrapperAssign().visit(tree) # tree = ConcolicWrapperFunctionDef().visit(tree) # tree = ConcolicWrapperClassDef().visit(tree) # unwrap classes' docstrings fix_missing_locations(tree) code = compile(tree, module.__file__, 'exec') importlib._bootstrap._call_with_frames_removed(exec, code, module.__dict__)
def visit_Import(self, node: ast.Import) -> Any: """Remove ipython2cwl imports """ names = [] for name in node.names: # type: ast.alias if name.name == 'ipython2cwl' or name.name.startswith( 'ipython2cwl.'): continue names.append(name) if len(names) > 0: node.names = names return node else: return None
def find_macros(tree, name): """ Looks for `from ... import macros, ...` statements in the module body and returns a dict with names and implementations for found macros or an empty dict if no macros are used. """ bindings = {} for index, statement in enumerate(tree.body): if _is_macro_import(statement): bindings.update(_get_macros(statement, name)) # Remove all names to prevent macro names to be used module = statement.module tree.body[index] = copy_location( Import(names=[alias(name=module, asname=None)]), statement) return bindings
def visit_Module(self, n: Module) -> Module: """ Adds: 1. a global "__depth". TODO: make it a thread-local variable, instead of a global. 2. "import sys" """ self.generic_visit(n) # add it afterwards: so this variable doesn't get traced itself :) n.body.insert( 0, self._fix_location( Assign([Name(self._DEPTH_VAR, Store())], Constant(0)), n.body[0])) # add the import n.body.insert(0, self._fix_location(Import([alias("sys")]), n.body[0])) return n
def sort_imports(node): imports = [] import_froms = defaultdict(list) remainder = [] for stmnt in node.body: if isinstance(stmnt, Import): imports.extend(stmnt.names) elif isinstance(stmnt, ImportFrom): import_froms[stmnt.module].extend(stmnt.names) else: remainder.append(stmnt) new_body = [] for key in sorted(import_froms.keys()): aliases = sorted(import_froms[key], key=lambda i: i.name) new_body.append(ImportFrom(module=key, names=aliases, level=0)) for alias in sorted(imports, key=lambda i: i.name): new_body.append(Import(names=[alias])) new_body.extend(remainder) out = deepcopy(node) out.body = new_body return out
asname=None, identifier=None, identifier_name=None, ), ], level=1, identifier=None, ) _import_star_from_input_str = to_code(_import_star_from_input_ast) _import_gen_test_module_ast = Import( names=[ alias( name="gen_test_module", asname=None, identifier=None, identifier_name=None, ) ], alias=None, ) _import_gen_test_module_str = "{}\n".format( to_code(_import_gen_test_module_ast).rstrip("\n")) class TestGen(TestCase): """Test class for gen.py""" sys_path = deepcopy(sys.path) tempdir = None
def _imported(self): for imported in self.imported: yield Import(names=[alias(name=imported, asname=None)])
def find_macros(tree, *, filename, reload=False, self_module=None, transform=True): """Establish macro bindings from `tree`. Top-level entry point. Note that while this is a top-level entry point for the **macro** expander, expanding macros is only a part of the full import algorithm. See the function `mcpyrate.compiler.expand` for the 30,000ft (9,144m) view. Collect bindings from each macro-import statement (`from ... import macros, ...`) at the top level of `tree.body`. As a side effect, import the macro definition modules. (We must do this in order to load the macro function definitions, so that we can bind to them.) Primarily meant to be called with `tree` the AST of a module that uses macros, but works with any `tree` that has a `body` attribute, where that `body` is a `list` of statement AST nodes. `filename`: str, full path to the `.py` being macroexpanded, for resolving relative macro-imports and for error reporting. In interactive use, can be an arbitrary label. `reload`: If enabled, refresh modules, causing different uses of the same macros to point to different function objects. Enable only if implementing a REPL. `self_module`: str, optional, absolute dotted module name of the module being expanded. Used for supporting `from __self__ import macros, ...` for multi-phase compilation (a.k.a. staging). `transform`: If enabled, transform each macro-import into `import ...`, where `...` is the absolute module name the macros are being imported from. Usually this is the Right Thing to do, to honor the unhygienic expose API guarantee. The notable exception is multi-phase compilation, which needs to produce two versions of the code: one to run immediately (to produce the temporary module for the current phase), and another to be lifted into the next phase. The code for the next phase needs to have the original macro-imports so we can establish the same bindings again in that phase; but in the code to run immediately, macro-imports should be transformed away so that the temporary module works as expected. Return value is a dict `{macroname: function, ...}` with all collected bindings. """ stmts_to_delete = [] bindings = {} for index, statement in enumerate(tree.body): if ismacroimport(statement): module_absname, more_bindings = get_macros(statement, filename=filename, reload=reload, self_module=self_module) bindings.update(more_bindings) if transform: if self_module and statement.module == "__self__": # Remove self-macro-imports after establishing bindings. # No need to import a module at run time; the importer lifts all # the higher-phase code also into the code of the current phase. # # `statement` usually has location info, in which case we can replace the # self-macro-import with a coverage dummy node. But it might not, if we are # dealing with a dynamically generated module that's being multi-phase compiled. # In that case it's best to just delete the statement now that it's done its job. dummies = _insert_coverage_dummy_stmt( None, statement, "<self-macro-import>", filename) if dummies is not None: # had location info? tree.body[index] = dummies[0] else: stmts_to_delete.append(index) else: # Remove all names to prevent macros being used as regular run-time objects. # Always use an absolute import, for the unhygienic expose API guarantee. tree.body[index] = copy_location( Import( names=[alias(name=module_absname, asname=None)]), statement) for index in reversed(stmts_to_delete): tree.body.pop(index) return bindings
from ..config import __lang_name__ from .context import TranspilerContext from .retokenizer import retokenize def transpile_source(script_source: str, context: Optional[TranspilerContext]) -> AST: if context is None: context = TranspilerContext() prepared_script = retokenize(script_source, context) return transpile_ast(parse(prepared_script), context) SUBPROCESS_NAME = '__sb__' subprocess_import = Import(names=[alias(name='subprocess', asname=SUBPROCESS_NAME)]) std_import = ImportFrom( module=f'{__lang_name__}.std', names=[alias(name='*', asname=None)], level=0 ) class ShellCallTransformer(NodeTransformer): def __init__(self, context: TranspilerContext): self.context = context def generic_visit(self, node: AST) -> Optional[AST]: # populate 'parent' for child in iter_child_nodes(node): child.parent = node # type:ignore return super().generic_visit(node)
def parseImport(parser): parser.check(lexeme=keywords['IMPORT']) lineo = parser.currentToken[2] parser.next() return Import(parseModuleId(parser), lineo=lineo)