def _parse_return(e, intermediate_repr, function_def, emit_default_doc): """ Parse return into a param dict :param e: Return AST node :type e: Return :param intermediate_repr: a dictionary of form { "name": Optional[str], "type": Optional[str], "doc": Optional[str], "params": OrderedDict[str, {'typ': str, 'doc': Optional[str], 'default': Any}] "returns": Optional[OrderedDict[Literal['return_type'], {'typ': str, 'doc': Optional[str], 'default': Any}),)]] } :type intermediate_repr: ```dict``` :param function_def: AST node for function definition :type function_def: ```FunctionDef``` :param emit_default_doc: Whether help/docstring should include 'With default' text :type emit_default_doc: ```bool``` :returns: Name, dict with keys: 'typ', 'doc', 'default' :rtype: ```Tuple[str, dict]``` """ assert isinstance(e, Return) return set_default_doc( ( "return_type", { "doc": extract_default( next( line.partition(",")[2].lstrip() for line in get_value( function_def.body[0].value).split("\n") if line.lstrip().startswith(":return")), emit_default_doc=emit_default_doc, )[0], "default": to_code(e.value.elts[1]).rstrip("\n"), "typ": to_code( get_value( ast.parse( intermediate_repr["returns"]["return_type"] ["typ"]).body[0].value.slice).elts[1]).rstrip() # 'Tuple[ArgumentParser, {typ}]'.format(typ=intermediate_repr['returns']['typ']) }, ), emit_default_doc=emit_default_doc, )
def _interpolate_return(function_def, intermediate_repr): """ Interpolate the return value into the IR. :param function_def: function definition :type function_def: ```FunctionDef``` :param intermediate_repr: a dictionary of form { "name": Optional[str], "type": Optional[str], "doc": Optional[str], "params": OrderedDict[str, {'typ': str, 'doc': Optional[str], 'default': Any}] "returns": Optional[OrderedDict[Literal['return_type'], {'typ': str, 'doc': Optional[str], 'default': Any}),)]] } :type intermediate_repr: ```dict``` :returns: a dictionary of form { "name": Optional[str], "type": Optional[str], "doc": Optional[str], "params": OrderedDict[str, {'typ': str, 'doc': Optional[str], 'default': Any}] "returns": Optional[OrderedDict[Literal['return_type'], {'typ': str, 'doc': Optional[str], 'default': Any}),)]] } :rtype: ```dict``` """ return_ast = next( filter(rpartial(isinstance, Return), function_def.body[::-1]), None) if return_ast is not None and return_ast.value is not None: if intermediate_repr.get("returns") is None: intermediate_repr["returns"] = OrderedDict((("return_type", {}), )) if ("typ" in intermediate_repr["returns"]["return_type"] and "[" not in intermediate_repr["returns"]["return_type"]["typ"]): del intermediate_repr["returns"]["return_type"]["typ"] intermediate_repr["returns"]["return_type"]["default"] = ( lambda default: "({})".format(default) if isinstance(return_ast.value, Tuple) and (not default.startswith("(") or not default.endswith(")")) else (lambda default_: default_ if isinstance(default_, (str, int, float, complex, ast.Num, ast.Str, ast. Constant)) else "```{}```".format(default)) (get_value(get_value(return_ast))))(to_code( return_ast.value).rstrip("\n")) if hasattr(function_def, "returns") and function_def.returns is not None: intermediate_repr["returns"] = intermediate_repr.get( "returns") or OrderedDict((("return_type", {}), )) intermediate_repr["returns"]["return_type"]["typ"] = to_code( function_def.returns).rstrip("\n") return intermediate_repr
def file(node, filename, mode="a", skip_black=False): """ Convert AST to a file :param node: AST node :type node: ```Union[Module, ClassDef, FunctionDef]``` :param filename: emit to this file :type filename: ```str``` :param mode: Mode to open the file in, defaults to append :type mode: ```str``` :param skip_black: Whether to skip formatting with black :type skip_black: ```bool``` :returns: None :rtype: ```NoneType``` """ if not isinstance(node, Module): node = Module(body=[node], type_ignores=[], stmt=None) src = to_code(node) if not skip_black: src = black.format_str( src, mode=black.Mode( target_versions=set(), line_length=119, is_pyi=False, string_normalization=False, ), ) with open(filename, mode) as f: f.write(src)
def populate_files(tempdir, input_module_str=None): """ Populate files in the tempdir :param tempdir: Temporary directory :type tempdir: ```str``` :param input_module_str: Input string to write to the input_filename. If None, uses preset mock module. :type input_module_str: ```Optional[str]``` :returns: input filename, input str, expected_output :rtype: ```Tuple[str, str, str, Module]``` """ input_filename = os.path.join(tempdir, "input{extsep}py".format(extsep=extsep)) input_class_name = "Foo" input_class_ast = emit.class_( parse.function(deepcopy(method_adder_ast)), emit_call=False, class_name=input_class_name, ) input_module_ast = Module( body=[ input_class_ast, Assign(targets=[Name("input_map", Store())], value=Dict( keys=[set_value(input_class_name)], values=[Name(input_class_name, Load())], expr=None, ), expr=None, lineno=None, **maybe_type_comment), Assign( targets=[Name("__all__", Store())], value=List( ctx=Load(), elts=[set_value(input_class_name), set_value("input_map")], expr=None, ), expr=None, lineno=None, **maybe_type_comment), ], type_ignores=[], stmt=None, ) input_module_str = input_module_str or to_code(input_module_ast) # expected_output_class_str = ( # "class FooConfig(object):\n" # ' """\n' # " The amazing Foo\n\n" # " :cvar a: An a. Defaults to 5\n" # ' :cvar b: A b. Defaults to 16"""\n' # " a = 5\n" # " b = 16\n\n" # " def __call__(self):\n" # " self.a = 5\n" # " self.b = 16\n" # ) expected_class_ast = emit.class_( parse.function(deepcopy(method_adder_ast)), emit_call=True, class_name="{input_class_name}Config".format( input_class_name=input_class_name), ) with open(input_filename, "wt") as f: f.write(input_module_str) return input_filename, input_module_ast, input_class_ast, expected_class_ast
name="input_map", asname=None, identifier=None, identifier_name=None, ), alias( name="Foo", asname=None, identifier=None, identifier_name=None, ), ], level=1, identifier=None, ) _import_star_from_input_str = to_code(_import_star_from_input_ast) _import_gen_test_module_ast = Import( names=[ alias( name="gen_test_module", asname=None, identifier=None, identifier_name=None, ) ], alias=None, ) _import_gen_test_module_str = "{}\n".format( to_code(_import_gen_test_module_ast).rstrip("\n"))
def class_( class_def, class_name=None, merge_inner_function=None, infer_type=False, parse_original_whitespace=False, word_wrap=True, ): """ Converts an AST to our IR :param class_def: Class AST or Module AST with a ClassDef inside :type class_def: ```Union[Module, ClassDef]``` :param class_name: Name of `class`. If None, gives first found. :type class_name: ```Optional[str]``` :param merge_inner_function: Name of inner function to merge. If None, merge nothing. :type merge_inner_function: ```Optional[str]``` :param infer_type: Whether to try inferring the typ (from the default) :type infer_type: ```bool``` :param parse_original_whitespace: Whether to parse original whitespace or strip it out :type parse_original_whitespace: ```bool``` :param word_wrap: Whether to word-wrap. Set `DOCTRANS_LINE_LENGTH` to configure length. :type word_wrap: ```bool``` :returns: a dictionary of form { "name": Optional[str], "type": Optional[str], "doc": Optional[str], "params": OrderedDict[str, {'typ': str, 'doc': Optional[str], 'default': Any}] "returns": Optional[OrderedDict[Literal['return_type'], {'typ': str, 'doc': Optional[str], 'default': Any}),)]] } :rtype: ```dict``` """ assert not isinstance(class_def, FunctionDef) is_supported_ast_node = isinstance(class_def, (Module, ClassDef)) if not is_supported_ast_node and isinstance(class_def, type): return _class_from_memory( class_def, class_name, infer_type, merge_inner_function, word_wrap ) assert ( is_supported_ast_node ), "Expected 'Union[Module, ClassDef]' got `{node_name!r}`".format( node_name=type(class_def).__name__ ) class_def = find_ast_type(class_def, class_name) doc_str = get_docstring(class_def) intermediate_repr = ( { "name": class_name, "type": "static", "doc": "", "params": OrderedDict(), "returns": None, } if doc_str is None else docstring( doc_str, emit_default_doc=False, parse_original_whitespace=parse_original_whitespace, ) ) if "return_type" in intermediate_repr["params"]: intermediate_repr["returns"] = OrderedDict( (("return_type", intermediate_repr["params"].pop("return_type")),) ) body = class_def.body if doc_str is None else class_def.body[1:] for e in body: if isinstance(e, AnnAssign): typ = to_code(e.annotation).rstrip("\n") val = ( lambda v: {"default": NoneStr} if v is None else { "default": v if type(v).__name__ in simple_types else ( lambda value: { "{}": {} if isinstance(v, Dict) else set(), "[]": [], "()": (), }.get(value, parse_to_scalar(value)) )(to_code(v).rstrip("\n")) } )(get_value(get_value(e))) # if 'str' in typ and val: val["default"] = val["default"].strip("'") # Unquote? typ_default = dict(typ=typ, **val) for key in "params", "returns": if e.target.id in (intermediate_repr[key] or iter(())): intermediate_repr[key][e.target.id].update(typ_default) typ_default = False break if typ_default: k = "returns" if e.target.id == "return_type" else "params" if intermediate_repr.get(k) is None: intermediate_repr[k] = OrderedDict() intermediate_repr[k][e.target.id] = typ_default elif isinstance(e, Assign): val = get_value(e) if val is not None: val = get_value(val) deque( map( lambda target: setitem( *( (intermediate_repr["params"][target.id], "default", val) if isinstance(target, Name) and target.id in intermediate_repr["params"] else ( intermediate_repr["params"], target.id if isinstance(target, Name) else get_value(get_value(target)), {"default": val}, ) ) ), e.targets, ), maxlen=0, ) intermediate_repr.update( { "params": OrderedDict( map( partial( _set_name_and_type, infer_type=infer_type, word_wrap=word_wrap ), intermediate_repr["params"].items(), ) ), "_internal": { "body": list( filterfalse(rpartial(isinstance, (AnnAssign, Assign)), body) ), "from_name": class_def.name, "from_type": "cls", }, } ) if merge_inner_function is not None: assert isinstance(class_def, ClassDef) _merge_inner_function( class_def, infer_type=infer_type, intermediate_repr=intermediate_repr, merge_inner_function=merge_inner_function, ) # intermediate_repr['_internal']["body"]= list(filterfalse(rpartial(isinstance,(AnnAssign,Assign)),class_def.body)) return intermediate_repr
def gen( name_tpl, input_mapping, parse_name, emit_name, output_filename, prepend=None, imports_from_file=None, emit_call=False, emit_default_doc=True, decorator_list=None, ): """ Generate classes, functions, and/or argparse functions from the input mapping :param name_tpl: Template for the name, e.g., `{name}Config`. :type name_tpl: ```str``` :param input_mapping: Import location of dictionary/mapping/2-tuple collection. :type input_mapping: ```str``` :param parse_name: What type to parse. :type parse_name: ```Literal["argparse", "class", "function", "sqlalchemy", "sqlalchemy_table"]``` :param emit_name: What type to generate. :type emit_name: ```Literal["argparse", "class", "function", "sqlalchemy", "sqlalchemy_table"]``` :param output_filename: Output file to write to :type output_filename: ```str``` :param prepend: Prepend file with this. Use '\n' for newlines. :type prepend: ```Optional[str]``` :param imports_from_file: Extract imports from file and append to `output_file`. If module or other symbol path given, resolve file then use it. :type imports_from_file: ```Optional[str]``` :param emit_call: Whether to emit a `__call__` method from the `_internal` IR subdict :type emit_call: ```bool``` :param emit_default_doc: Whether help/docstring should include 'With default' text :type emit_default_doc: ```bool``` :param decorator_list: List of decorators :type decorator_list: ```Optional[Union[List[Str], List[]]]``` """ extra_symbols = {} if imports_from_file is None: imports = "" else: if prepend: prepend_imports = get_at_root(ast.parse(prepend.strip()), (Import, ImportFrom)) # def rewrite_typings(node): # """ # Python < 3.8 must use `typings_extensions` for `Literal` # # :param node: import node # :type node: ```Union[Import, ImportFrom]``` # # :returns: The import potentially rewritten or None # :rtype: ```Optional[Union[Import, ImportFrom]]``` # """ # if isinstance(node, ImportFrom) and node.module == "typing": # len_names = len(node.names) # if len_names == 1 and node.names[0].name == "Literal": # rewrite_typings.found_literal = True # return None # else: # node.names = list( # filter( # None, # map( # lambda _alias: None # if _alias.name == "Literal" # else _alias, # node.names, # ), # ) # ) # if len(node.names) != len_names: # rewrite_typings.found_literal = True # return node # # rewrite_typings.found_literal = False # prepend_imports = list(filter(None, map(rewrite_typings, prepend_imports))) # if rewrite_typings.found_literal: # prepend_imports.append( # ImportFrom( # level=0, # module="typing_extensions" # if sys.version_info[:2] < (3, 8) # else "typing", # names=[alias(asname=None, name="Literal")], # lineno=None, # col_offset=None, # ) # ) eval( compile( to_code( ast.fix_missing_locations( Module(body=prepend_imports, stmt=None, type_ignores=[]))), filename="<string>", mode="exec", ), extra_symbols, ) # This leaks to the global scope globals().update(extra_symbols) with open( imports_from_file if path.isfile(imports_from_file) else getfile( get_module(imports_from_file, extra_symbols=extra_symbols)), "rt", ) as f: imports = "".join( map(to_code, get_at_root(ast.parse(f.read()), (Import, ImportFrom)))) module_path, _, symbol_name = input_mapping.rpartition(".") input_mapping = getattr( get_module(module_path, extra_symbols=extra_symbols), symbol_name) input_mapping_it = (input_mapping.items() if hasattr( input_mapping, "items") else input_mapping) global__all__ = [] emit_name = emit_name.replace("class", "class_").replace("argparse", "argparse_function") content = "{prepend}{imports}\n{functions_and_classes}\n{__all__}".format( prepend="" if prepend is None else prepend, imports= imports, # TODO: Optimize imports programmatically (akin to `autoflake --remove-all-unused-imports`) functions_and_classes="\n\n".join( print("\nGenerating: {name!r}".format(name=name)) or global__all__.append(name_tpl.format(name=name)) or to_code( getattr(emit, emit_name)( getattr( parse, infer(obj) if parse_name in (None, "infer") else parse_name, )(obj), emit_default_doc=emit_default_doc, **(lambda _name: { "argparse_function": { "function_name": _name }, "class_": { "class_name": _name, "decorator_list": decorator_list, "emit_call": emit_call, }, "function": { "function_name": _name, }, "sqlalchemy": { "table_name": _name }, "sqlalchemy_table": { "table_name": _name }, }[emit_name])(name_tpl.format(name=name)), )) for name, obj in input_mapping_it), __all__=to_code( Assign( targets=[Name("__all__", Store())], value=ast. parse( # `TypeError: Type List cannot be instantiated; use list() instead` str( list( map( lambda s: s.rstrip("\n").strip("'").strip('"'), map(to_code, map(set_value, global__all__)), )))).body[0].value, expr=None, lineno=None, **maybe_type_comment, )), ) parsed_ast = ast.parse(content) # TODO: Shebang line first, then docstring, then imports doc_str = ast.get_docstring(parsed_ast) whole = tuple( map( lambda node: (node, None) if isinstance(node, (Import, ImportFrom)) else (None, node), parsed_ast.body, )) parsed_ast.body = list( filter( None, chain.from_iterable(( parsed_ast.body[:1] if doc_str else iter(()), sorted( map(itemgetter(0), whole), key=lambda import_from: getattr(import_from, "module", None ) == "__future__", reverse=True, ), map(itemgetter(1), whole[1:] if doc_str else whole), ), ), )) with open(output_filename, "a") as f: f.write(to_code(parsed_ast))
def sync_property( input_eval, input_param, input_ast, input_filename, output_param, output_param_wrap, output_ast, ): """ Sync a single property :param input_eval: Whether to evaluate the `param`, or just leave it :type input_eval: ```bool``` :param input_param: Location within file of property. Can be top level like `'a'` for `a=5` or with the `.` syntax as in `output_params`. :type input_param: ```List[str]``` :param input_ast: AST of the input file :type input_ast: ```AST``` :param input_filename: Filename of the input (used in `eval`) :type input_filename: ```str``` :param output_param: Parameters to update. E.g., `'A.F'` for `class A: F = None`, `'f.g'` for `def f(g): pass` :type output_param: ```str``` :param output_param_wrap: Wrap all input_str params with this. E.g., `Optional[Union[{output_param}, str]]` :param output_param_wrap: ```Optional[str]``` :param output_ast: AST of the input file :type output_ast: ```AST``` :returns: New AST derived from `output_ast` :rtype: ```AST``` """ search = list(strip_split(output_param, ".")) if input_eval: if input_param.count(".") != 0: raise NotImplementedError("Anything not on the top-level of the module") local = {} output = eval(compile(input_ast, filename=input_filename, mode="exec"), local) assert output is None replacement_node = ast.AnnAssign( annotation=it2literal(local[input_param]), simple=1, target=ast.Name( # input_param search[-1], ast.Store(), ), value=None, expr=None, expr_annotation=None, expr_target=None, ) else: assert isinstance(input_ast, ast.Module) annotate_ancestry(input_ast) replacement_node = find_in_ast(list(strip_split(input_param, ".")), input_ast) assert replacement_node is not None if output_param_wrap is not None: if hasattr(replacement_node, "annotation"): if replacement_node.annotation is not None: replacement_node.annotation = ( ast.parse( output_param_wrap.format( output_param=to_code(replacement_node.annotation) ) ) .body[0] .value ) else: raise NotImplementedError(type(replacement_node).__name__) rewrite_at_query = RewriteAtQuery( search=search, replacement_node=replacement_node, ) gen_ast = rewrite_at_query.visit(output_ast) assert ( rewrite_at_query.replaced is True ), "Failed to update with {replacement_node_str!r}".format( replacement_node_str=to_code(replacement_node) ) return gen_ast