def _if_digit(): return [ If( test=UnaryOp( op=Not(), operand=Call( func=Attribute( value=Name(id='value', ctx=Load()), attr='isdigit', ctx=Load(), ), args=[], keywords=[], ), ), body=[ Return(value=Constant(value=False, kind=None), ), ], orelse=[], ) ]
def test_emit_arg(self) -> None: """Tests that `arg` is emitted from `emit_arg`""" self.assertIsInstance( class_with_method_and_body_types_ast.body[1].args.args[1], arg ) self.assertIsInstance( emit_arg(class_with_method_and_body_types_ast.body[1].args.args[1]), arg ) assign = Assign( targets=[Name("yup", Store())], value=set_value("nup"), expr=None, **maybe_type_comment ) gen_ast = emit_arg(assign) self.assertIsInstance(gen_ast, arg) run_ast_test( self, gen_ast=gen_ast, gold=set_arg("yup"), )
def test_param2argparse_param_none_default(self) -> None: """ Tests that param2argparse_param works to reparse the default """ run_ast_test( gen_ast=param2argparse_param(("yup", {"default": NoneStr})), gold=Expr( Call( args=[set_value("--yup")], func=Attribute( Name("argument_parser", Load()), "add_argument", Load(), ), keywords=[], expr=None, expr_func=None, ) ), test_case_instance=self, )
def create_match_clause_body(self, pattern: Pattern, body: Expr): """Given a match clause pattern and a clause body, generates a Python function that when called with an ADT that matches the pattern, returns the result of evaluating the clause body. This function returns a function definition and the name of the generated function.""" def collect_var_assignments(pat, val): """This helper function ensures that the pattern is used to properly assign all subfields of the given AST for use in the clause body E.g., for PatternConstructor(A, PatternVar(v), PatternWildcard(), PatternConstructor(B, PatternVar(w))) we would want to have v = a.fields[0] w = a.fields[2].fields[0] """ if isinstance(pat, relay.PatternWildcard): return [] if isinstance(pat, relay.PatternVar): return [Assign([self.include_var(pat.var, assign=True)], val)] # constructor pattern: assign each field of the value # based on subpatterns assignments = [] for i in range(len(pat.patterns)): # we want the assignments for val.fields[i] field = ast.Subscript(ast.Attribute(val, "fields", Load()), ast.Index(Num(i)), Load()) assignments += collect_var_assignments(pat.patterns[i], field) return assignments func_name = self.generate_function_name("_match_clause_body") arg_name = self.generate_var_name("_match_clause_body") clause_body, defs = self.visit(body) assignments = collect_var_assignments(pattern, Name(arg_name, Load())) func_def = self.create_def(func_name, [arg_name], defs + assignments + [Return(clause_body)]) return (func_def, func_name)
def test_param2argparse_param_default_ast_binop(self) -> None: """ Tests that param2argparse_param works to change the type based on the default whence said default is a non specially handled ast.AST """ run_ast_test( gen_ast=param2argparse_param(( "byo", { "default": BinOp( set_value(5), Mult(), set_value(5), ), "typ": "str", }, ), ), gold=Expr( Call( args=[set_value("--byo")], func=Attribute( Name("argument_parser", Load()), "add_argument", Load(), ), keywords=[ keyword(arg="required", value=set_value(True), identifier=None), keyword( arg="default", value=set_value("```(5 * 5)```"), identifier=None, ), ], expr=None, expr_func=None, )), test_case_instance=self, )
def _generate_assignment((arg_name, arg_resource_handle)): # type: (Tuple[basestring, int]) -> If """ We have a function that looks like: def do_something(param, model_=INJECTED): <...> We insert into its beginning a statement like ___INJECT_CONTEXT_INTERNAL_RESOURCES = ___INJECT_CONTEXT_INTERNAL.resources if model_ is INJECTED: model_ = ___INJECT_CONTEXT_INTERNAL_RESOURCES[3] if model is ___INJECT_CONTEXT_INTERNAL: # means that no resource is available ___INJECT_CONTEXT_INTERNAL_RESOURCES.flag_missing('model_') Code outside of this function sets a global variable _INJECTED__model to point at the right thing. """ target_attribute = Subscript( value=Name(id=INTERNAL_RESOURCES_NAME, ctx=Load()), slice=Index(value=Num(n=arg_resource_handle)), ctx=Load()) consequence = [ Assign(targets=[Name(id=arg_name, ctx=Store())], value=target_attribute), If(test=Compare( left=Name(id=arg_name, ctx=Load()), ops=[Is()], comparators=[Name(id=INTERNAL_CONTEXT_NAME, ctx=Load())]), body=[ Expr(value=Call(func=Attribute(value=Name( id=INTERNAL_CONTEXT_NAME, ctx=Load()), attr='flag_missing', ctx=Load()), keywords=[], starargs=None, kwargs=None, args=[Str(s=arg_name)])) ], orelse=[]) ] # type: List[Union[Assign, If] return If(test=Compare( left=Name(id=arg_name, ctx=Load()), ops=[Is()], comparators=[default_nodes_mapping[arg_name]]), body=consequence, orelse=[])
def convert(self, prog: Expr): """This method converts the passed Relay expression into a Python AST object with equivalent semantics. The Python AST can be executed using exec(); it can be turned into text and inspected using astor. """ optimized = self.optimize(prog) # start with conversion prelude (imports) and convert global defs body = [] body += PROLOGUE body += self.convert_module() prog_body, extra_defs = self.visit(optimized) body += extra_defs # we finally must assign the final expression to the output var # so it can be read after running EXEC body.append(Assign([Name(OUTPUT_VAR_NAME, Store())], prog_body)) return ast.fix_missing_locations(ast.Module(body=body))
def test_slice(): result = match_ast(pattern, parse('x[:44:3]')) actual = deslicify(result['slice']) check = match_ast( Call(func=Attribute(value=Name(id='__builtins__', ctx=Load()), attr='slice', ctx=Load()), args=[ name_constant(value=set(['start'])), Num(n=set(['stop'])), Num(n=set(['step'])) ]), actual) assert check == { 'start': None, 'stop': 44, 'step': 3 } or check == { 'start': 'None', 'stop': 44, 'step': 3 }
def test_find_in_ast_with_val(self) -> None: """Tests that `find_in_ast` correctly gives AST node from `def class C(object): def function_name(self,dataset_name: str='foo',…)`""" gen_ast = find_in_ast( "C.function_name.dataset_name".split("."), class_with_method_and_body_types_ast, ) self.assertIsInstance(gen_ast.default, Constant if PY_GTE_3_8 else Str) self.assertEqual(get_value(gen_ast.default), "~/tensorflow_datasets") run_ast_test( self, gen_ast, set_arg( annotation=Name( "str", Load(), ), arg="dataset_name", ), )
def visit_Assign(self, node): existing_node = self.generic_visit(node) if any(map(self._is_untraceable_attribute, existing_node.targets)): return existing_node line_numbers = set() self._find_line_numbers(existing_node, line_numbers) first_line_number = min(line_numbers) last_line_number = max(line_numbers) new_nodes = [] format_string = self._wrap_assignment_targets(existing_node.targets) if (len(existing_node.targets) == 1 and isinstance(existing_node.targets[0], Tuple)): existing_node.value = Call(func=Name(id='tuple', ctx=Load()), args=[existing_node.value], keywords=[], starargs=None, kwargs=None) existing_node.value = self._create_bare_context_call( 'set_assignment_value', [existing_node.value]) new_nodes.append(self._create_context_call('start_assignment')) try_body = [existing_node] if format_string is not None: try_body.append( self._create_context_call( 'report_assignment', [Str(s=format_string), Num(n=existing_node.lineno)])) end_assignment = self._create_context_call('end_assignment') finally_body = [end_assignment] new_nodes.append( TryFinally(body=try_body, finalbody=finally_body, handlers=[], orelse=[], lineno=first_line_number)) self._set_statement_line_numbers(try_body, first_line_number) self._set_statement_line_numbers(finally_body, last_line_number) return new_nodes
def visit_Subscript(self, node: Subscript) -> AST: """Implementation of PEP585 and PEP604.""" if not isinstance(node.value, Name): return node name = node.value.id idf = self.alias.get(_m(self.root, name), name) if idf == 'typing.Union': if not isinstance(node.slice, Tuple): return node.slice b = node.slice.elts[0] for e in node.slice.elts[1:]: b = BinOp(b, BitOr(), e) return b elif idf == 'typing.Optional': return BinOp(node.slice, BitOr(), Constant(None)) elif idf in PEP585: logger.warning(f"{node.lineno}:{node.col_offset}: " f"find deprecated name {idf}, " f"recommended to use {PEP585[idf]}") return Subscript(Name(PEP585[idf], Load), node.slice, node.ctx) else: return node
def test_param2argparse_param_default_ast_list(self) -> None: """ Tests that param2argparse_param works to change the type based on the default whence said default is an ast.List """ run_ast_test( gen_ast=param2argparse_param(( "byo", { "default": List( elts=[], ctx=Load(), expr=None, ), "typ": "str", }, ), ), gold=Expr( Call( args=[set_value("--byo")], func=Attribute( Name("argument_parser", Load()), "add_argument", Load(), ), keywords=[ keyword(arg="action", value=set_value("append"), identifier=None), keyword(arg="required", value=set_value(True), identifier=None), ], expr=None, expr_func=None, )), test_case_instance=self, )
def _compile_function(self, srcnode, parent, funcname, append=True, descend=True): """ Convenience function to create an ast.FunctionDef node and append it to the parent. :param srcnode: source intermediate node :param parent: parent ast node :param funcname: name of the function :param append: append the created function node to the parent's body? :param descend: compile srcnode.children into the created function node's body? """ fn = FunctionDef(name=str(funcname), args=arguments(args=[], defaults=[], vararg=None, kwonlyargs=[], kwarg=None, kw_defaults=[]), body=[], decorator_list=[ Attribute(value=Name(id='__piglet_rt', ctx=Load()), attr='flatten', ctx=Load())], returns=None) set_pos(fn, srcnode) if append: parent.body.append(fn) self.add_builtins(fn) if descend: for item in srcnode.children: self._compile(item, fn) return fn
def to_annotation(typ): """ Converts the typ to an annotation :param typ: A string representation of the type to annotate with. Else return give identity. :type typ: ```Union[str, AST]``` :returns: The annotation as a `Name` (usually) or else some more complex type :rtype: ```AST``` """ if isinstance(typ, AST): return typ return ( Name(typ, Load()) if typ in simple_types else get_value( ( lambda parsed: parsed.body[0] if getattr(parsed, "body", None) else parsed )(ast.parse(typ)) ) )
def visit_For(self, node): # For(target=Name(id='x', ctx=Store()), iter=List(elts=[], ctx=Load()), # body=[Pass()], orelse=[]) # For(target=Tuple(elts=[Name(id='band', ctx=Store()), Name(id='color', ctx=Store())], ctx=Store()), # iter=Tuple(elts=[...], ctx=Load()), # body=[Pass()], orelse=[]) self.bind(node.target) self.generic_visit(node) # if iter is Command, set _out=Capture # so this works as expected: # for line in ls(): ... # For(target=Name(id='line', ctx=Store()), # iter=Call(func=Call(func=Name(id='Command', ctx=Load()), ... if is_executable(node.iter): update_keyword( node.iter, keyword(arg='_out', value=Name(id='Capture', ctx=Load()))) update_keyword(node.iter, keyword(arg='_bg', value=NameConstant(value=True))) return node
def visit_Await(self, node): self.generic_visit(node) new = Call( func=Attribute( value=Call( func=Attribute( value=Name( id='_asyncio', ctx=Load() ), attr='get_event_loop', ctx=Load()), args=[], keywords=[] ), attr='run_until_complete', ctx=Load() ), args=[node.value], keywords=[] ) return ast.copy_location(new, node)
def new_object_type(parent: ObjectTypeMeta, child: ObjectTypeMeta) -> AST: assert parent.type == child.base tree = Module(body=[], type_ignores=[]) if parent.type in built_in_types_names(): import_from = arcor2.object_types.abstract.__name__ else: import_from = f".{humps.depascalize(parent.type)}" tree.body.append( ImportFrom(module=import_from, names=[alias(name=parent.type, asname=None)], level=0)) c = ClassDef( name=child.type, bases=[get_name(parent.type)], keywords=[], body=[ Assign( targets=[Name(id="_ABSTRACT", ctx=Store())], value=NameConstant(value=False, kind=None), type_comment=None, ) ], decorator_list=[], ) # TODO add docstring with description (if provided) c.body.append(Pass()) tree.body.append(c) return tree
def make_slide_class(slide: Slide, slide_index: int) -> list: slide_class_name = f"Slide{slide_index}" content_class_name = f"{slide_class_name}Content" klass = [ make_slide_content_class(content_class_name, slide.shapes), ClassDef( name=slide_class_name, bases=[Name(id='SlideData', ctx=Load())], keywords=[], body=[ AnnAssign( target=Name(id='contents', ctx=Store()), annotation=ast_optional_subscript(content_class_name), value=ast_default_field(None), simple=1 ), AnnAssign( target=Name(id='slide_pos', ctx=Store()), annotation=Name(id='int', ctx=Load()), value=Constant(value=slide_index, kind=None), simple=1 ) ], decorator_list=[dataclass_decorator]), ] if slide.has_notes_slide: note = slide.notes_slide.notes_text_frame.text if note_name_regx.match(note): name = note_name_regx.search(note).group('name') alias_slide_class = ClassDef( name=f'{name}Slide', bases=[Name(id=slide_class_name, ctx=Load())], keywords=[], body=[Pass()], decorator_list=[], ) alias_content_class = ClassDef( name=f'{name}Content', bases=[Name(id=content_class_name, ctx=Load())], keywords=[], body=[Pass()], decorator_list=[], ) klass += [alias_slide_class, alias_content_class] return klass
def visit_FunctionDef(self, node): """ Instrument a function definition by creating a new report builder for this stack frame and putting it in a local variable. The local variable has the same name as the global variable so all calls can use the same CONTEXT_NAME symbol, but it means that I had to use this: x = globals()['x'].start_frame() Kind of ugly, but I think it was worth it to handle recursive calls. """ new_node = self.generic_visit(node) line_numbers = set() self._find_line_numbers(new_node, line_numbers) first_line_number = min(line_numbers) last_line_number = max(line_numbers) args = [Num(n=first_line_number), Num(n=last_line_number)] try_body = new_node.body globals_call = Call(func=Name(id='globals', ctx=Load()), args=[], keywords=[], starargs=None, kwargs=None) global_context = Subscript(value=globals_call, slice=Index(value=Str(s=CONTEXT_NAME)), ctx=Load()) start_frame_call = Call(func=Attribute(value=global_context, attr='start_frame', ctx=Load()), args=args, keywords=[], starargs=None, kwargs=None) context_assign = Assign(targets=[Name(id=CONTEXT_NAME, ctx=Store())], value=start_frame_call) new_node.body = [context_assign] if isinstance(try_body[0], Expr) and isinstance( try_body[0].value, Str): # Move docstring back to top of function. new_node.body.insert(0, try_body.pop(0)) # trace function parameter values for target in new_node.args.args: if isinstance(target, Name) and target.id == 'self': continue if arg and isinstance(target, arg) and target.arg == 'self': continue new_node.body.append(self._trace_assignment(target, node.lineno)) if new_node.args.vararg is not None: new_node.body.append( self._trace_assignment(new_node.args.vararg, node.lineno)) if new_node.args.kwarg is not None: new_node.body.append( self._trace_assignment(new_node.args.kwarg, node.lineno)) if try_body: handler_body = [self._create_context_call('exception'), Raise()] new_node.body.append( TryExcept(body=try_body, handlers=[ExceptHandler(body=handler_body)], orelse=[], finalbody=[])) self._set_statement_line_numbers(try_body, first_line_number) self._set_statement_line_numbers(handler_body, last_line_number) return new_node
def visit_FunctionDef(self, node: FunctionDef): nonlocal function_name, context if node is not function_head: return node function_body = [] function_name = node.name function_args = [arg.arg for arg in node.args.args] inlined_start = 1 if inspect.ismethod(method): inlined_start += 1 iterator_name = function_args[inlined_start - 1] function_args[:inlined_start] = [] arity = len(function_args) try: vararg = as_arg(node.args.vararg.arg) except Exception: if arity != len(inline_args): raise ArgumentCountMismatch else: context[vararg] = inline_args[arity:] for name, value in zip(function_args, inline_args): targets = [Name(id=as_var(name), ctx=Store())] if isinstance(value, PassAsConstant): context[as_var(name)] = value.value continue if isinstance(value, (int, str, bytes)): context[as_var(name)] = value continue context[as_arg(name)] = value function_body.append( Assign(targets=targets, value=Call(func=Name(id='next', ctx=Load()), args=[Name(id=as_arg(name), ctx=Load())], keywords=[]))) if node.args.vararg: name = node.args.vararg.arg function_body.append( Assign(targets=[Name(id=as_var(name), ctx=Store())], value=Call( func=Name(id='tuple', ctx=Load()), args=[ GeneratorExp( elt=Call(func=Name(id='next', ctx=Load()), args=[ Name(id=as_tmp(name), ctx=Load()) ], keywords=[]), generators=[ comprehension( is_async=0, target=Name(id=as_tmp(name), ctx=Store()), iter=Name(id=as_arg(name), ctx=Load()), ifs=[]) ]) ], keywords=[]))) function_body.extend(node.body) context[as_arg(iterator_name)] = iterator function_body = [ For(target=Name(id=as_var(iterator_name), ctx=Store()), iter=Name(id=as_arg(iterator_name), ctx=Load()), body=function_body, orelse=[]) ] node.body = function_body node.args.args = [arg(arg=as_var('self'))] node.args.vararg = None node.decorator_list = [] return node
def document(sentences, **kw): """ This macro takes literal strings and converts them into: _help_ID = type_hint+STRING where: ID is the first target of the last assignment. type_hint is the assigned type and default value (only works for a few types) STRING is the literal string """ for n in range(len(sentences)): s = sentences[n] if not n: prev = s continue # The whole sentence is a string? if (isinstance(s, Expr) and isinstance(s.value, Str) and # and the previous is an assign isinstance(prev, Assign)): # noqa: E128 # Apply it to the first target target = prev.targets[0] value = prev.value # Extract its name # variables and attributes are supported if isinstance(target, Name): # pragma: no cover (Internal) # Note: The support for variables isn't currently used name = target.id is_attr = False elif isinstance(target, Attribute): name = target.attr is_attr = True else: # Just in case we put anything other than an attr/var assignment continue # pragma: no cover (Internal) # Remove starting underscore if name[0] == '_': name = name[1:] # Create a _help_ID doc_id = '_help_' + name # Create the type hint for numbers, strings and booleans type_hint = '' post_hint = '' if isinstance(value, Num): type_hint = '[number={}]'.format(value.n) elif isinstance(value, UnaryOp) and isinstance( value.operand, Num) and isinstance(value.op, USub): # -Num type_hint = '[number={}]'.format(-value.operand.n) elif isinstance(value, Str): type_hint = "[string='{}']".format(value.s) elif isinstance(value, NameConstant) and isinstance( value.value, bool): type_hint = '[boolean={}]'.format(str(value.value).lower()) elif isinstance(value, Attribute): # Used for the default options. I.e. GS.def_global_option val = eval(unparse(value)) if isinstance(val, bool): # Not used yet type_hint = '[boolean={}]'.format( str(val).lower()) # pragma: no cover (Internal) elif isinstance(val, (int, float)): # Not used yet type_hint = '[number={}]'.format( val) # pragma: no cover (Internal) elif isinstance(val, str): type_hint = "[string='{}']".format(val) post_hint += '. Affected by global options' # Transform the string into an assign for _help_ID if is_attr: target = Attribute(value=Name(id='self', ctx=Load()), attr=doc_id, ctx=Store()) else: # pragma: no cover (Internal) target = Name(id=doc_id, ctx=Store()) # Reuse the s.value Str help_str = s.value help_str.s = type_hint + s.value.s.rstrip() + post_hint sentences[n] = Assign(targets=[target], value=help_str) # Copy the line number from the original docstring copy_location(target, s) copy_location(sentences[n], s) prev = s # Return the modified AST return sentences
from cdd.pure_utils import tab _class_doc_str_expr = Expr( set_value( "\n" "Class mock" "\n\n" ":cvar a: One swell num" "\n\n" ":cvar b: Unlucky num" "\n" ) ) assign_with_type_comment = Assign( targets=[Name("res", Store())], value=BinOp( left=Name("a", Load()), op=Add(), right=Name("b", Load()), ), type_comment=Name("int", Load()), lineno=None, ) ann_assign_with_annotation = AnnAssign( annotation=assign_with_type_comment.type_comment, value=assign_with_type_comment.value, simple=1, target=assign_with_type_comment.targets[0], type_comment=None, expr=None,
def visit_ClassDef(self, node): """Process property defaults for Scenic classes.""" if node.name in self.constructors: # constructor definition newBody = [] for child in node.body: child = self.visit(child) if isinstance(child, AnnAssign): # default value for property origValue = child.annotation target = child.target # extract any attributes for this property metaAttrs = [] if isinstance(target, Subscript): sl = target.slice if not isinstance(sl, Index): self.parseError( sl, 'malformed attributes for property default') sl = sl.value if isinstance(sl, Name): metaAttrs.append(sl.id) elif isinstance(sl, Tuple): for elt in sl.elts: if not isinstance(elt, Name): self.parseError( elt, 'malformed attributes for property default' ) metaAttrs.append(elt.id) else: self.parseError( sl, 'malformed attributes for property default') newTarget = Name(target.value.id, Store()) copy_location(newTarget, target) target = newTarget # find dependencies of the default value properties = AttributeFinder.find('self', origValue) # create default value object args = [ Set([Str(prop) for prop in properties]), Set([Str(attr) for attr in metaAttrs]), Lambda(selfArg, origValue) ] value = Call(Name(createDefault, Load()), args, []) copy_location(value, origValue) newChild = AnnAssign(target=target, annotation=value, value=None, simple=True) child = copy_location(newChild, child) newBody.append(child) node.body = newBody return node else: # ordinary Python class # catch some mistakes where 'class' was used instead of 'constructor' for base in node.bases: name = None if isinstance(base, Call): name = base.func.id elif isinstance(base, Name): name = base.id if name is not None and name in self.constructors: self.parseError( node, f'must use "{constructorStatement}" to subclass objects' ) return self.generic_visit(node)
def gen( name_tpl, input_mapping, type_, output_filename, prepend=None, imports_from_file=None, emit_call=False, emit_default_doc=True, decorator_list=None, ): """ Generate classes, functions, and/or argparse functions from the input mapping :param name_tpl: Template for the name, e.g., `{name}Config`. :type name_tpl: ```str``` :param input_mapping: Import location of dictionary/mapping/2-tuple collection. :type input_mapping: ```str``` :param type_: What type to generate. :type type_: ```Literal["argparse", "class", "function"]``` :param output_filename: Output file to write to :type output_filename: ```str``` :param prepend: Prepend file with this. Use '\n' for newlines. :type prepend: ```Optional[str]``` :param imports_from_file: Extract imports from file and append to `output_file`. If module or other symbol path given, resolve file then use it. :type imports_from_file: ```Optional[str]``` :param emit_call: Whether to emit a `__call__` method from the `_internal` IR subdict :type emit_call: ```bool``` :param emit_default_doc: Whether help/docstring should include 'With default' text :type emit_default_doc: ```bool``` :param decorator_list: List of decorators :type decorator_list: ```Optional[Union[List[Str], List[]]]``` """ extra_symbols = {} if imports_from_file is None: imports = "" else: if prepend: prepend_imports = get_at_root( ast.parse(prepend.strip()), (Import, ImportFrom) ) # def rewrite_typings(node): # """ # Python < 3.8 must use `typings_extensions` for `Literal` # # :param node: import node # :type node: ```Union[Import, ImportFrom]``` # # :returns: The import potentially rewritten or None # :rtype: ```Optional[Union[Import, ImportFrom]]``` # """ # if isinstance(node, ImportFrom) and node.module == "typing": # len_names = len(node.names) # if len_names == 1 and node.names[0].name == "Literal": # rewrite_typings.found_literal = True # return None # else: # node.names = list( # filter( # None, # map( # lambda _alias: None # if _alias.name == "Literal" # else _alias, # node.names, # ), # ) # ) # if len(node.names) != len_names: # rewrite_typings.found_literal = True # return node # # rewrite_typings.found_literal = False # prepend_imports = list(filter(None, map(rewrite_typings, prepend_imports))) # if rewrite_typings.found_literal: # prepend_imports.append( # ImportFrom( # level=0, # module="typing_extensions" # if sys.version_info[:2] < (3, 8) # else "typing", # names=[alias(asname=None, name="Literal")], # lineno=None, # col_offset=None, # ) # ) eval( compile( to_code( ast.fix_missing_locations( Module(body=prepend_imports, stmt=None, type_ignores=[]) ) ), filename="<string>", mode="exec", ), extra_symbols, ) # This leaks to the global scope globals().update(extra_symbols) with open( imports_from_file if path.isfile(imports_from_file) else getfile(get_module(imports_from_file, extra_symbols=extra_symbols)), "rt", ) as f: imports = "".join( map(to_code, get_at_root(ast.parse(f.read()), (Import, ImportFrom))) ) module_path, _, symbol_name = input_mapping.rpartition(".") input_mapping = getattr( get_module(module_path, extra_symbols=extra_symbols), symbol_name ) input_mapping_it = ( input_mapping.items() if hasattr(input_mapping, "items") else input_mapping ) global__all__ = [] content = "{prepend}{imports}\n{functions_and_classes}\n{__all}".format( prepend="" if prepend is None else prepend, imports=imports, # TODO: Optimize imports programmatically (akin to `autoflake --remove-all-unused-imports`) functions_and_classes="\n\n".join( print("Generating: {!r}".format(name)) or global__all__.append(name_tpl.format(name=name)) or to_code( getattr( emit, type_.replace("class", "class_").replace( "argparse", "argparse_function" ), )( ( lambda is_func: getattr( parse, "function" if is_func else "class_", )( obj, **{} if is_func else {"merge_inner_function": "__init__"} ) )( isinstance(obj, FunctionDef) or isfunction(obj) ), # TODO: Figure out if it's a function or argparse function emit_default_doc=emit_default_doc, **( lambda _name: { "class": { "class_name": _name, "decorator_list": decorator_list, "emit_call": emit_call, }, "function": { "function_name": _name, }, "argparse": {"function_name": _name}, }[type_] )(name_tpl.format(name=name)) ) ) for name, obj in input_mapping_it ), __all=to_code( Assign( targets=[Name("__all__", Store())], value=ast.parse( # `TypeError: Type List cannot be instantiated; use list() instead` str( list( map( lambda s: s.rstrip("\n").strip("'").strip('"'), map(to_code, map(set_value, global__all__)), ) ) ) ) .body[0] .value, expr=None, lineno=None, **maybe_type_comment ) ), ) parsed_ast = ast.parse(content) # TODO: Shebang line first, then docstring, then imports doc_str = ast.get_docstring(parsed_ast) whole = tuple( map( lambda node: (node, None) if isinstance(node, (Import, ImportFrom)) else (None, node), parsed_ast.body, ) ) parsed_ast.body = list( filter( None, chain.from_iterable( ( parsed_ast.body[:1] if doc_str else iter(()), sorted( map(itemgetter(0), whole), key=lambda import_from: getattr(import_from, "module", None) == "__future__", reverse=True, ), map(itemgetter(1), whole[1:] if doc_str else whole), ), ), ) ) with open(output_filename, "a") as f: f.write(to_code(parsed_ast))
def pack(self): compiler = AstCompiler() requirements = set() for scriptlet in self.scriptlets: if type(scriptlet.dependencies) == dict: for dependency in scriptlet.dependencies.get('all', []): requirements.add(dependency) for dependency in scriptlet.dependencies.get(self.os, []): requirements.add(dependency) else: for dependency in scriptlet.dependencies: requirements.add(dependency) if requirements: compiler.add_ast( parse('\n'.join([ 'import pupyimporter', dependencies.importer(requirements, os=self.os) ]) + '\n')) for scriptlet, kwargs in self.scriptlets.iteritems(): template = WRAPPING_TEMPLATE.format(scriptlet=scriptlet.name) # Select part with proper OS if any # Should be top-level if statement if string test while True: os_selection_idx = None for idx, item in enumerate(scriptlet.ast.body): if not (type(item) == If and type(item.test) == Str and \ item.test.s.startswith('__os:') and item.test.s.endswith('__')): continue os_selection_idx = idx break if os_selection_idx is None: break new_body = select_body_by_os( scriptlet.ast.body[os_selection_idx], self.os) scriptlet.ast.body = \ scriptlet.ast.body[:os_selection_idx] + \ new_body + scriptlet.ast.body[os_selection_idx+1:] # Bind args # There should be top level function main main_found = False shadow_kwargs = {'logger', 'pupy'} for item in scriptlet.ast.body: if not (type(item) == FunctionDef and item.name == 'main'): continue main_found = True lineno = 0 col_offset = 0 item.name = scriptlet.name + '_main' for idx, (arg, value) in enumerate( zip(item.args.args, item.args.defaults)): lineno = value.lineno col_offset = value.col_offset vtype = type(value) if arg.id in shadow_kwargs: shadow_kwargs.remove(arg.id) elif arg.id in kwargs: default = kwargs[arg.id] if vtype == Num: if type(default) not in (int, long): default = str_to_int(default) value.n = default elif vtype == Str: if type(default) not in (str, unicode): default = str(default) value.s = default elif vtype == Name: if value.id in ('True', 'False'): if default.lower() in ('true', 'yes', 'on', '1'): value.id = 'True' elif default.lower() in ('false', 'no', 'off', '0'): value.id = 'False' else: raise ValueError( 'Expect True/False value for {}'. format(arg.id)) else: new_value = None try: new_value = Num(str_to_int(default)) except ValueError: new_value = Str(default) new_value.lineno = value.lineno new_value.col_offset = value.col_offset item.args.defaults[idx] = new_value elif vtype == Str and value.s.startswith( '__global:') and value.s.endswith('__'): global_name = value.s[9:-2] global_ref = Name(global_name, Load()) global_ref.lineno = value.lineno global_ref.col_offset = value.col_offset item.args.defaults[idx] = global_ref for idx, shadow_kwarg in enumerate(shadow_kwargs): shadow_name = Name(shadow_kwarg, Param()) shadow_name.lineno = lineno shadow_name.col_offset = col_offset + (idx * 16) item.args.args.append(shadow_name) shadow_value = Name('None', Load()) shadow_value.lineno = lineno shadow_value.col_offset = col_offset + (idx * 16) + 7 item.args.defaults.append(shadow_value) break if not main_found: raise ValueError('Scriptlet {} - Invalid source code. ' '"def main():" not found'.format( scriptlet.name)) placeholder_idx = None # Wrap in try/except, and other things template_ast = parse(template) for item in template_ast.body: if not (type(item) == FunctionDef and item.name == '__{}_closure__'.format(scriptlet.name)): continue assert (len(item.body) == 1 and type(item.body[0]) == TryExcept) closure = item.body[0] for idx, payload in enumerate(closure.body): if type(payload) is not Expr: continue if type(payload.value ) is Str and payload.value.s == 'PLACEHOLDER': placeholder_idx = idx break assert (placeholder_idx is not None) closure.body = closure.body[:placeholder_idx] + scriptlet.ast.body + \ closure.body[placeholder_idx+1:] break if placeholder_idx is None: raise ValueError( 'Template placeholder not found. Fill the bug report') compiler.add_ast(template_ast) return compiler.compile('sbundle', raw=True)
def update_loop(self, lineno, loop_start, loop_end): update_call = Expr(value=Call(func=Name(id=Environment.update_loop_func, ctx=Load()), args=[Num(n=self.lineno), Num(n=lineno), Num(n=loop_start), Num(n=loop_end), Name(id=Environment.iter_num, ctx=Load())])) self.lineno = lineno+1 return update_call
def init_loop(self, loop_start): return Expr(value=Call(func=Name(id=Environment.init_loop_func, ctx=Load()), args=[Num(n=loop_start)]))
def __delete_var(self, lineno, var): return Delete(targets=[Subscript(value=Subscript(value=Name(id=Environment.name,ctx=Load()), slice=Index(value=Num(n=lineno)), ctx=Load()), slice=Index(value=Str(s=var)), ctx=Del())])
def __assign_var(self, lineno, var, loop=False): lineno_key = Subscript(value=Name(id=Environment.name,ctx=Load()), slice=Index(value=Num(n=lineno)), ctx=Load()) var_key = lineno_key if not loop else Subscript(value=lineno_key, slice=Index(value=Name(id=Environment.iter_num, ctx=Load())), ctx=Load()) return Assign(targets=[Subscript(value=var_key, slice=Index(value=Str(s=var)), ctx=Store())], value=Name(id=var,ctx=Load()))
def init(self, lineno, args): init_call = Assign(targets=[Subscript(value=Name(id=Environment.name,ctx=Load()), slice=Index(value=Num(n=lineno)), ctx=Store())], value=Dict()) param_init = self.assign(lineno,args) self.lineno = lineno+1 return [init_call] + param_init