def __init__ (self, script=None, file=None, tree=None, globals=None, locals=None, **kwargs): if script is None and file is not None: # it's a pity that compile() does not accept a file as input # so we could avoid reading the whole file script= open (file).read () else: file= 'arg_to_main' self.environ= Environment (globals, locals, **kwargs) if tree is None: tree= ast.parse (script) # ImportFrom(module='bar', names=[alias(name='baz', asname=None)], level=0) node= ImportFrom (module='ayrton', names=[alias (name='CommandWrapper', asname=None)], level=0) node.lineno= 0 node.col_offset= 0 ast.fix_missing_locations (node) tree.body.insert (0, node) tree= CrazyASTTransformer(self.environ).visit (tree) self.options= {} self.source= compile (tree, file, 'exec')
def emit_import_fixed_modules( self, node: ImportFrom, mod_name: str, mod: Mapping[str, object] ) -> None: new_names = [n for n in node.names if n.name not in mod] if new_names: # We have names we don't know about, keep them around... new_node = ImportFrom(mod_name, new_names, node.level) super().visitImportFrom(new_node) # Load the module into TOS... self.emit("LOAD_NAME", "<fixed-modules>") self.emit("LOAD_CONST", mod_name) self.emit("BINARY_SUBSCR") # TOS = mod # Store all of the imported names from the module for _i, name in enumerate(node.names): var_name = name.name asname = name.asname or var_name value = mod.get(var_name) if value is not None: # duplicate TOS (mod) self.emit("DUP_TOP") # var name self.emit("LOAD_CONST", var_name) self.emit("BINARY_SUBSCR") self.emit("STORE_GLOBAL", asname) # remove TOS (mod) self.emit("POP_TOP")
def import_from(self, i): if len(i) == 1: module = i[0] names = alias(name='*', asname=None) else: module, names = i if isinstance(module, Tree): module = module.children module = '.'.join([m.id for m in module]) if names is not None: if isinstance(names, Tree): names = names.children if isinstance(names, list): temp = [] for name in names: if isinstance(name, Tree): base = name.children[0] if isinstance(base, Name): base = base.id if len(name.children) > 1: asname = name.children[1] if isinstance(asname, Name): asname = asname.id else: asname = None name = alias(name=base, asname=asname) temp.append(name) names = temp.copy() else: names = [names] return ImportFrom(module=module, names=names, level=0)
def add_import(node: Module, module: str, cls: str, try_to_import: bool = True) -> None: """Adds "from ... import ..." to the beginning of the script. Parameters ---------- node module cls Returns ------- """ class AddImportTransformer(NodeTransformer): def __init__(self, module: str, cls: str) -> None: self.done = False self.module = module self.cls = cls def visit_ImportFrom(self, node: ImportFrom) -> ImportFrom: if node.module == self.module: for aliass in node.names: if aliass.name == self.cls: self.done = True break else: node.names.append(alias(name=self.cls, asname=None)) self.done = True return node if try_to_import: try: imported_mod = importlib.import_module(module) except ModuleNotFoundError as e: raise SourceException(e) try: getattr(imported_mod, cls) except AttributeError as e: raise SourceException(e) tr = AddImportTransformer(module, cls) node = tr.visit(node) if not tr.done: node.body.insert( 0, ImportFrom(module=module, names=[alias(name=cls, asname=None)], level=0))
def __exit__(self, exc_type, exc_val, exc_tb): if exc_type: return modules = defaultdict(list) for parts, name in self.imports.items(): modules[parts[:-1]].append((parts[-1], name)) stmts = [] for mod, aliases in sorted(modules.items()): aliases = [alias(name=name, asname=asname) for name, asname in sorted(aliases)] stmts.append(ImportFrom(module='.'.join(mod), names=aliases, lineno=1, col_offset=0, level=0)) self.module.body[0:0] = stmts
def _do_wrap_class_register(tree, mod, base_class): if isinstance(tree, ClassDef): # Create the register call name = tree.name reg_name = name.lower() # BaseOutput.register member: attr = Attribute(value=Name(id=base_class, ctx=Load()), attr='register', ctx=Load()) # Function call to it passing reg_name and name do_register = Expr(value=Call(func=attr, args=[Str(s=reg_name), Name(id=name, ctx=Load())], keywords=[])) # Create the import do_import = ImportFrom(module=mod, names=[alias(name=base_class, asname=None)], level=1) return [do_import, tree, do_register] # Just in case somebody applies it to anything other than a class return tree # pragma: no cover
def test_skips_existent(): module = check_mod() with Names(module, prefix='') as subj: assert subj.dotted('test', 'mod', 'func_name') == 'func_name0' assert subj.dotted('test', 'mod2', 'func_name') == 'func_name1' assert subj.dotted('test', 'mod', 'func_name') == 'func_name0' assert subj.dotted('test', 'mod2', 'func_name') == 'func_name1' expect = ImportFrom( module=set(['mod']), names=[alias(name=set(['name']), asname=set(['asname']))]) print(dump(module)) assert match_ast(expect, module.body[0]) == { 'asname': 'func_name0', 'mod': 'test.mod', 'name': 'func_name' } assert match_ast(expect, module.body[1]) == { 'asname': 'func_name1', 'mod': 'test.mod2', 'name': 'func_name' }
def _do_wrap_class_register(tree, mod, base_class): if isinstance(tree, ClassDef): # Create the register call name = tree.name l_start = tree.lineno # Python 3.8: # l_end = tree.end_lineno # Python 3.7, this is good enough for our needs: l_end = l_start + 1 reg_name = name.lower() # BaseOutput.register member: attr = Attribute(value=Name(id=base_class, ctx=Load()), attr='register', ctx=Load()) # Function call to it passing reg_name and name # Put it in the last line. do_register = Expr(value=Call( func=attr, args=[Str(s=reg_name), Name(id=name, ctx=Load())], keywords=[]), lineno=l_end, col_offset=0, end_lineno=l_end, end_col_offset=50) # Create the import # Put it in the decorator line. do_import = ImportFrom(module=mod, names=[alias(name=base_class, asname=None)], level=1, lineno=l_start - 1, col_offset=0, end_lineno=l_start - 1, end_col_offset=50) return [do_import, tree, do_register] # Just in case somebody applies it to anything other than a class return tree # pragma: no cover
def sort_imports(node): imports = [] import_froms = defaultdict(list) remainder = [] for stmnt in node.body: if isinstance(stmnt, Import): imports.extend(stmnt.names) elif isinstance(stmnt, ImportFrom): import_froms[stmnt.module].extend(stmnt.names) else: remainder.append(stmnt) new_body = [] for key in sorted(import_froms.keys()): aliases = sorted(import_froms[key], key=lambda i: i.name) new_body.append(ImportFrom(module=key, names=aliases, level=0)) for alias in sorted(imports, key=lambda i: i.name): new_body.append(Import(names=[alias])) new_body.extend(remainder) out = deepcopy(node) out.body = new_body return out
def new_object_type(parent: ObjectTypeMeta, child: ObjectTypeMeta) -> AST: assert parent.type == child.base tree = Module(body=[], type_ignores=[]) if parent.type in built_in_types_names(): import_from = arcor2.object_types.abstract.__name__ else: import_from = f".{humps.depascalize(parent.type)}" tree.body.append( ImportFrom(module=import_from, names=[alias(name=parent.type, asname=None)], level=0)) c = ClassDef( name=child.type, bases=[get_name(parent.type)], keywords=[], body=[ Assign( targets=[Name(id="_ABSTRACT", ctx=Store())], value=NameConstant(value=False, kind=None), type_comment=None, ) ], decorator_list=[], ) # TODO add docstring with description (if provided) c.body.append(Pass()) tree.body.append(c) return tree
def __init__(self, filename): self.filename = filename self.uniquec = defaultdict(count) self.src_root = None self.module = Module( body=[ImportFrom(module='piglet', names=[ alias(name='runtime', asname='__piglet_rt')], level=0)] ) # Store references to generated functions corresponding to nested # py:blocks to facilitate retro-fitting the necessary function # arguments up the call chain. self.block_chain = [] self.module.body.extend([ Assign(targets=[Name(id='Markup', ctx=Store())], value=Attribute(value=Name(id='__piglet_rt', ctx=Load()), attr='Markup', ctx=Load())), Assign(targets=[StoreName('__piglet_rtdata')], value=LoadAttribute('__piglet_rt', 'data')) ])
def letfromimport(self, tree): from ast import ImportFrom, alias, Attribute from lark import Tree def flat_attrs(attr): if isinstance(attr, str): return attr elif isinstance(attr, Attribute): name = flat_attrs(attr.value) elif hasattr(attr, "value"): name = attr.value.id return ".".join((name, attr.attr)) def count_dots(fqmod): count = 0 for char in fqmod: if char == ".": count += 1 else: break return count is_idalias = isinstance(tree[0], Tree) and tree[0].data == "idalias" modfqname = flat_attrs( tree[0]) if not is_idalias else tree[0].children[0].id aliases = [ alias(a.children[0].id, a.children[1].id) if len(a.children) == 2 else alias(a.children[0].id) for a in tree[1:-1] ] cont = tree[-1] level = count_dots(modfqname) fimp = ImportFrom(module=modfqname, names=aliases, level=level) self.statements.append(cont) self.statements.append(fimp) return fimp
def import_module_all(name: str): return ImportFrom( module=name, names=[ast.alias(name='*', asname=None)], level=0 )
def exmod( module, emit_name, blacklist, whitelist, output_directory, dry_run, filesystem_layout="as_input", ): """ Expose module as `emit` types into `output_directory` :param module: Module name or path :type module: ```str``` :param emit_name: What type(s) to generate. :type emit_name: ```List[Literal["argparse", "class", "function", "sqlalchemy", "sqlalchemy_table"]]``` :param blacklist: Modules/FQN to omit. If unspecified will emit all (unless whitelist). :type blacklist: ```List[str]``` :param whitelist: Modules/FQN to emit. If unspecified will emit all (minus blacklist). :type whitelist: ```List[str]``` :param output_directory: Where to place the generated exposed interfaces to the given `--module`. :type output_directory: ```str``` :param dry_run: Show what would be created; don't actually write to the filesystem :type dry_run: ```bool``` :param filesystem_layout: Hierarchy of folder and file names generated. "java" is file per package per name. :type filesystem_layout: ```Literal["java", "as_input"]``` """ if dry_run: print("mkdir\t{output_directory!r}".format( output_directory=output_directory)) elif not path.isdir(output_directory): makedirs(output_directory) if blacklist: raise NotImplementedError("blacklist") elif whitelist: raise NotImplementedError("whitelist") module_name, new_module_name = map(path.basename, (module, output_directory)) module = (partial(module_from_file, module_name=module_name) if path.isdir(module) else import_module)(module) module_root_dir = path.dirname(module.__file__) + path.sep _mkdir_and_emit_file = partial( mkdir_and_emit_file, emit_name=emit_name, module_name=module_name, new_module_name=new_module_name, filesystem_layout=filesystem_layout, output_directory=output_directory, dry_run=dry_run, ) # Might need some `groupby` in case multiple files are in the one project; same for `get_module_contents` imports = list( map( _mkdir_and_emit_file, map( lambda name_source: ( name_source[0], (lambda filename: filename[len(module_name) + 1:] if filename.startswith(module_name) else filename) (relative_filename(getfile(name_source[1]))), { "params": OrderedDict(), "returns": OrderedDict() } if dry_run else parse.class_(name_source[1]), ), # sorted( map( lambda name_source: ( name_source[0][len(module_name) + 1:], name_source[1], ), get_module_contents( module, module_root_dir=module_root_dir).items(), ), # key=itemgetter(0), # ), ), ), ) assert len(imports), "Module contents are empty" modules_names = tuple( map( lambda name_module: ( name_module[0], tuple(map(itemgetter(1), name_module[1])), ), groupby( map( lambda node_mod: ( node_mod[0], node_mod[2].module, ), imports, ), itemgetter(0), ), )) init_filepath = path.join(output_directory, new_module_name, "__init__{extsep}py".format(extsep=extsep)) if dry_run: print("write\t{init_filepath!r}".format(init_filepath=init_filepath)) else: emit.file( Module( body=list( chain.from_iterable(( (Expr(set_value("\nExport internal imports\n")), ), map( lambda module_names: ImportFrom( module=module_names[0], names=list( map( lambda names: alias( names, None, identifier=None, identifier_name=None, ), module_names[1], )), level=1, identifier=None, ), modules_names, ), (Assign(targets=[Name("__all__", Store())], value=List( ctx=Load(), elts=list( map( set_value, sorted( frozenset( chain.from_iterable( map( itemgetter(1), modules_names, )), )), )), expr=None, ), expr=None, lineno=None, **maybe_type_comment), ), ))), stmt=None, type_ignores=[], ), init_filepath, mode="wt", )
slice=Index(value=Name(id='ChartCategoryData', ctx=Load())), ctx=Load() ) chart_bubble_data_value = Subscript( value=Name(id='Optional', ctx=Load()), slice=Index(value=Name(id='ChartBubbleData', ctx=Load())), ctx=Load() ) table_data_value = Subscript( value=Name(id='Optional', ctx=Load()), slice=Index(value=Name(id='TableData', ctx=Load())), ctx=Load() ) clone = Module( body=[ ImportFrom(module='dataclasses', names=[alias(name='dataclass', asname=None)], level=0), ImportFrom(module='typing', names=[alias(name='Optional', asname=None)], level=0), ImportFrom(module='PPTT.type', names=[ alias(name='SlideData', asname=None), alias(name='TextData', asname=None), alias(name='ChartCategoryData', asname=None), alias(name='ChartBubbleData', asname=None), alias(name='ChartXYData', asname=None), alias(name='TableData', asname=None), ], level=0), ClassDef( name='Slide1Content', bases=[], keywords=[], body=[
def mkdir_and_emit_file( name_orig_ir, emit_name, module_name, new_module_name, filesystem_layout, output_directory, dry_run, ): """ Generate Java-package—or match input—style file hierarchy from fully-qualified module name :param name_orig_ir: FQ module name, original filename path, IR :type name_orig_ir: ```Tuple[str, str, dict]``` :param emit_name: What type(s) to generate. :type emit_name: ```List[Literal["argparse", "class", "function", "sqlalchemy", "sqlalchemy_table"]]``` :param module_name: Name of [original] module :type module_name: ```str``` :param new_module_name: Name of [new] module :type new_module_name: ```str``` :param filesystem_layout: Hierarchy of folder and file names generated. "java" is file per package per name. :type filesystem_layout: ```Literal["java", "as_input"]``` :param output_directory: Where to place the generated exposed interfaces to the given `--module`. :type output_directory: ```str``` :param dry_run: Show what would be created; don't actually write to the filesystem :type dry_run: ```bool``` :returns: Import to generated module :rtype: ```ImportFrom``` """ mod_name, _, name = name_orig_ir[0].rpartition(".") original_relative_filename_path, ir = name_orig_ir[1], name_orig_ir[2] mod_path = path.join( output_directory, new_module_name, mod_name.replace(".", path.sep), ) if not path.isdir(mod_path): if dry_run: print("mkdir\t{mod_path!r}".format(mod_path=mod_path)) else: makedirs(mod_path) init_filepath = path.join( path.dirname(mod_path), "__init__{extsep}py".format(extsep=extsep) ) if dry_run: print("touch\t{init_filepath!r}".format(init_filepath=init_filepath)) else: open(init_filepath, "a").close() gen_node = getattr(emit, emit_name.replace("class", "class_"))( ir, **dict( **{"{emit_name}_name".format(emit_name=emit_name): name}, **{} if emit_name == "class" else {"function_type": "static"} ) ) __all___node = Assign( targets=[Name("__all__", Store())], value=List( ctx=Load(), elts=[set_value(name)], expr=None, ), expr=None, lineno=None, **maybe_type_comment ) if not isinstance(gen_node, Module): gen_node = Module( body=list( chain.from_iterable( ( ( Expr( set_value( "\nGenerated from {module_name}.{name}\n".format( module_name=module_name, name=name_orig_ir[0], ) ) ), ), ast.parse(imports_header).body, (gen_node, __all___node), ) ) ), stmt=None, type_ignores=[], ) emit_filename, init_filepath = ( map( partial(path.join, output_directory, new_module_name), ( original_relative_filename_path, path.join( path.dirname(original_relative_filename_path), "__init__{extsep}py".format(extsep=extsep), ), ), ) if filesystem_layout == "as_input" else map( partial(path.join, mod_path), ( "{name}{extsep}py".format(name=name, extsep=extsep), "__init__{extsep}py".format(extsep=extsep), ), ) ) if path.isfile(emit_filename): with open(emit_filename, "rt") as f: mod = ast.parse(f.read()) gen_node = merge_modules(mod, gen_node) merge_assignment_lists(gen_node, "__all__") if dry_run: print("write\t{emit_filename!r}".format(emit_filename=emit_filename)) else: emit.file(gen_node, filename=emit_filename, mode="wt") if name != "__init__" and not path.isfile(init_filepath): if dry_run: print("write\t{emit_filename!r}".format(emit_filename=emit_filename)) else: emit.file( Module( body=[ Expr( set_value("\n__init__ to expose internals of this module\n") ), ImportFrom( module=name, names=[ alias( name=name, asname=None, identifier=None, identifier_name=None, ), ], level=1, identifier=None, ), __all___node, ], stmt=None, type_ignores=[], ), filename=init_filepath, mode="wt", ) return ( mod_name, original_relative_filename_path, ImportFrom( module=name, names=[ alias( name=name, asname=None, identifier=None, identifier_name=None, ), ], level=1, identifier=None, ), )
from ast import ( NodeTransformer, ImportFrom, alias, ) from ..import_hook import create_hook import_math = ImportFrom( module="math", names=[alias(name="*")], level=0, ) class RewriteAst(NodeTransformer): def visit_Module(self, node): node.body = [import_math] + [self.generic_visit(n) for n in node.body] return node def transform_ast(filename, node): return RewriteAst().visit(node) def test_trigo(): create_hook( extensions=[".tr"], hook_name="trigo", transform_ast=transform_ast, )
def global_action_points_class(project: CachedProject) -> str: tree = Module(body=[]) tree.body.append( ImportFrom( module=arcor2.data.common.__name__, names=[ alias(name=ActionPoint.__name__, asname=None), alias(name=Position.__name__, asname=None), alias(name=Pose.__name__, asname=None), alias(name=ProjectRobotJoints.__name__, asname=None), ], level=0, )) tree.body.append( ImportFrom( module=copy.__name__, names=[alias(name=copy.deepcopy.__name__, asname=None)], level=0, )) tree.body.append( ImportFrom( module=RES_MODULE, names=[alias(name=RES_CLS, asname=None)], level=0, )) aps_init_body: List[Union[Assign, Pass]] = [] for ap in project.action_points: ap_cls_body: List[Assign] = [ Assign( targets=[ Attribute(value=Name(id="self", ctx=Load()), attr="_position", ctx=Store()) ], value=Attribute( value=Call( func=Attribute( value=Attribute(value=Name(id="res", ctx=Load()), attr="project", ctx=Load()), attr=CachedProject.bare_action_point.__name__, ctx=Load(), ), args=[Str(s=ap.id, kind="")], keywords=[], ), attr="position", ctx=Load(), ), type_comment=None, ) ] ap_type_name = humps.pascalize(ap.name) ap_joints_init_body: List[Assign] = [] for joints in project.ap_joints(ap.id): ap_joints_init_body.append( Assign( targets=[ Attribute(value=Name(id="self", ctx=Load()), attr=f"_{joints.name}", ctx=Store()) ], value=Call( func=Attribute( value=Attribute(value=Name(id="res", ctx=Load()), attr="project", ctx=Load()), attr="joints", ctx=Load(), ), args=[Str(s=joints.id, kind="")], keywords=[], ), type_comment=None, )) if ap_joints_init_body: ap_joints_cls_def = ClassDef( name=f"{ap_type_name}Joints", bases=[], keywords=[], body=[ FunctionDef( name="__init__", args=arguments( args=[ arg(arg="self", annotation=None, type_comment=None), arg(arg="res", annotation=Name(id=RES_CLS, ctx=Load()), type_comment=None), ], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=ap_joints_init_body, decorator_list=[], returns=None, type_comment=None, ) ], decorator_list=[], ) for joints in project.ap_joints(ap.id): ap_joints_cls_def.body.append( FunctionDef( name=joints.name, args=arguments( args=[ arg(arg="self", annotation=None, type_comment=None) ], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=[ Return(value=Call( func=Name(id=copy.deepcopy.__name__, ctx=Load()), args=[ Attribute(value=Name(id="self", ctx=Load()), attr=f"_{joints.name}", ctx=Load()) ], keywords=[], )) ], decorator_list=[Name(id="property", ctx=Load())], returns=Name(id=ProjectRobotJoints.__name__, ctx=Load()), type_comment=None, )) tree.body.append(ap_joints_cls_def) ap_cls_body.append( Assign( targets=[ Attribute(value=Name(id="self", ctx=Load()), attr="joints", ctx=Store()) ], value=Call( func=Name(id=f"{ap_type_name}Joints", ctx=Load()), args=[Name(id="res", ctx=Load())], keywords=[], ), type_comment=None, )) ap_orientations_init_body: List[Assign] = [] for ori in project.ap_orientations(ap.id): ap_orientations_init_body.append( Assign( targets=[ Attribute(value=Name(id="self", ctx=Load()), attr=f"_{ori.name}", ctx=Store()) ], value=Call( func=Attribute( value=Attribute(value=Name(id="res", ctx=Load()), attr="project", ctx=Load()), attr="pose", ctx=Load(), ), args=[Str(s=ori.id, kind="")], keywords=[], ), type_comment=None, )) if ap_orientations_init_body: ap_orientations_cls_def = ClassDef( name=f"{ap_type_name}Poses", bases=[], keywords=[], body=[ FunctionDef( name="__init__", args=arguments( args=[ arg(arg="self", annotation=None, type_comment=None), arg(arg="res", annotation=Name(id=RES_CLS, ctx=Load()), type_comment=None), ], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=ap_orientations_init_body, decorator_list=[], returns=None, type_comment=None, ) ], decorator_list=[], ) for ori in project.ap_orientations(ap.id): ap_orientations_cls_def.body.append( FunctionDef( name=ori.name, args=arguments( args=[ arg(arg="self", annotation=None, type_comment=None) ], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=[ Return(value=Call( func=Name(id=copy.deepcopy.__name__, ctx=Load()), args=[ Attribute(value=Name(id="self", ctx=Load()), attr=f"_{ori.name}", ctx=Load()) ], keywords=[], )) ], decorator_list=[Name(id="property", ctx=Load())], returns=Name(id=Pose.__name__, ctx=Load()), type_comment=None, )) tree.body.append(ap_orientations_cls_def) ap_cls_body.append( Assign( targets=[ Attribute(value=Name(id="self", ctx=Load()), attr="poses", ctx=Store()) ], value=Call( func=Name(id=f"{ap_type_name}Poses", ctx=Load()), args=[Name(id="res", ctx=Load())], keywords=[], ), type_comment=None, )) ap_cls_def = ClassDef( name=ap_type_name, bases=[], keywords=[], body=[ FunctionDef( name="__init__", args=arguments( args=[ arg(arg="self", annotation=None, type_comment=None), arg(arg="res", annotation=Name(id=RES_CLS, ctx=Load()), type_comment=None), ], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=ap_cls_body, decorator_list=[], returns=None, type_comment=None, ) ], decorator_list=[], ) # add copy property for position ap_cls_def.body.append( FunctionDef( name="position", args=arguments( args=[arg(arg="self", annotation=None, type_comment=None)], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=[ Return(value=Call( func=Name(id=copy.deepcopy.__name__, ctx=Load()), args=[ Attribute(value=Name(id="self", ctx=Load()), attr="_position", ctx=Load()) ], keywords=[], )) ], decorator_list=[Name(id="property", ctx=Load())], returns=Name(id=Position.__name__, ctx=Load()), type_comment=None, )) tree.body.append(ap_cls_def) aps_init_body.append( Assign( targets=[ Attribute(value=Name(id="self", ctx=Load()), attr=ap.name, ctx=Store()) ], value=Call(func=Name(id=ap_type_name, ctx=Load()), args=[Name(id="res", ctx=Load())], keywords=[]), type_comment=None, )) if not aps_init_body: # there are no action points aps_init_body.append(Pass()) aps_cls_def = ClassDef( name="ActionPoints", bases=[], keywords=[], body=[ FunctionDef( name="__init__", args=arguments( args=[ arg(arg="self", annotation=None, type_comment=None), arg(arg="res", annotation=Name(id=RES_CLS, ctx=Load()), type_comment=None), ], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=aps_init_body, decorator_list=[], returns=None, type_comment=None, ) ], decorator_list=[], ) tree.body.append(aps_cls_def) return tree_to_str(tree)
def get_future_imports(self) -> Iterable[stmt]: if self.visitor.future_imports: yield lineinfo( ImportFrom("__future__", list(self.visitor.future_imports), 0) )
def import_symbols(name: str, symbols): return ImportFrom(module=name, names=symbols, level=0)
with open(input_filename, "wt") as f: f.write(input_module_str) return input_filename, input_module_ast, input_class_ast, expected_class_ast _import_star_from_input_ast = ImportFrom( module="input", names=[ alias( name="input_map", asname=None, identifier=None, identifier_name=None, ), alias( name="Foo", asname=None, identifier=None, identifier_name=None, ), ], level=1, identifier=None, ) _import_star_from_input_str = to_code(_import_star_from_input_ast) _import_gen_test_module_ast = Import( names=[ alias( name="gen_test_module",
ast.Or: "or", ast.Eq: "==", ast.NotEq: "!=", ast.Lt: "<", ast.LtE: "<=", ast.Gt: ">", ast.GtE: ">=", ast.Is: "is", ast.IsNot: "is not", ast.In: "in", ast.NotIn: "not in", } prelude = [ ImportFrom(module="worm", names=[alias(name="magics", asname="_w")], level=0), ] def hook(debug=False): return create_hook( extensions=[".wm"], hook_name="worm", transform_ast=transform_ast, debug=debug, ) def transform_ast(filename, node): tree = RewriteTopLevel(filename).visit(node) return ast.fix_missing_locations(tree)
from .context import TranspilerContext from .retokenizer import retokenize def transpile_source(script_source: str, context: Optional[TranspilerContext]) -> AST: if context is None: context = TranspilerContext() prepared_script = retokenize(script_source, context) return transpile_ast(parse(prepared_script), context) SUBPROCESS_NAME = '__sb__' subprocess_import = Import(names=[alias(name='subprocess', asname=SUBPROCESS_NAME)]) std_import = ImportFrom( module=f'{__lang_name__}.std', names=[alias(name='*', asname=None)], level=0 ) class ShellCallTransformer(NodeTransformer): def __init__(self, context: TranspilerContext): self.context = context def generic_visit(self, node: AST) -> Optional[AST]: # populate 'parent' for child in iter_child_nodes(node): child.parent = node # type:ignore return super().generic_visit(node) def visit_Module(self, node: Module) -> Any: node.body.insert(0, subprocess_import)