def get_route_meta(mod): """ Get the (func_name, app_name, route_path, http_method)s :param mod: Parsed AST containing routes :type mod: ```Module``` :returns: Iterator of tuples of (func_name, app_name, route_path, http_method) :rtype: ```Iterator[Tuple[str, str, str, str]]``` """ return map( lambda func: ( func.name, *next( map( lambda call: ( call.func.value.id, get_value(call.args[0]), call.func.attr, ), filter( lambda call: call.args and call.func.attr in methods, filter(rpartial(isinstance, Call), func.decorator_list ), ), )), ), filter(rpartial(isinstance, FunctionDef), mod.body), )
def merge_modules(mod0, mod1, remove_imports_from_second=True): """ Merge modules (removing module docstring) :param mod0: Module :type mod0: ```Module``` :param mod1: Module :type mod1: ```Module``` :param remove_imports_from_second: Whether to remove global imports from second module :type remove_imports_from_second: ```bool``` :returns: Merged module (copy) :rtype: ```Module``` """ mod1_body = ( mod1.body[1:] if mod1.body and isinstance(mod1.body[0], (Str, Constant)) else mod1.body ) new_mod = deepcopy(mod0) new_mod.body += ( list( filterfalse( rpartial(isinstance, (ImportFrom, Import)), mod1_body, ) ) if remove_imports_from_second else deepcopy(mod1_body) ) return new_mod
def test_gen(self) -> None: """Tests `gen`""" output_filename = os.path.join( self.tempdir, "test_gen_output{extsep}py".format(extsep=extsep)) with patch("sys.stdout", new_callable=StringIO), patch("sys.stderr", new_callable=StringIO): self.assertIsNone( gen( name_tpl="{name}Config", input_mapping="gen_test_module.input_map", emit_name="class", parse_name="infer", output_filename=output_filename, prepend="PREPENDED\n", emit_call=True, emit_default_doc=False, )) with open(output_filename, "rt") as f: gen_module_str = f.read() gen_module_ast = ast.parse(gen_module_str) run_ast_test( self, gen_ast=next( filter(rpartial(isinstance, ClassDef), gen_module_ast.body)), gold=self.expected_class_ast, )
def get_ass_where_name(node, name): """ Find all `Assign`/`AnnAssign` in node body where id matches name :param node: AST node with a '.body' :type node: ```Union[Module, ClassDef, FunctionDef, If, Try, While, With, AsyncFor, AsyncFunctionDef, AsyncWith, ExceptHandler, Expression, For, IfExp, Interactive, Lambda ]``` :param name: Name to match (matches against `id` field of `Name`) :type name: ```str``` :returns: Generator of all matches names (.value) :rtype: ```Generator[Any]``` """ return ( get_value(node) for node in node.body if isinstance(node, Assign) and name in frozenset( map(attrgetter("id"), filter(rpartial(isinstance, Name), node.targets)) ) or isinstance(node, AnnAssign) and node.target == name )
def del_ass_where_name(node, name): """ Delete all `Assign`/`AnnAssign` in node body where id matches name :param node: AST node with a '.body' :type node: ```Union[Module, ClassDef, FunctionDef, If, Try, While, With, AsyncFor, AsyncFunctionDef, AsyncWith, ExceptHandler, Expression, For, IfExp, Interactive, Lambda ]``` :param name: Name to match (matches against `id` field of `Name`) :type name: ```str``` """ node.body = list( filter( None, ( None if isinstance(_node, Assign) and name in frozenset( map( attrgetter("id"), filter(rpartial(isinstance, Name), _node.targets), ) ) or isinstance(_node, AnnAssign) and _node.target == name else _node for _node in node.body ), ) )
def get_names(functions): """ Derive a method_name -> FunctionDef dictionary :param functions: Routing functions :type functions: ```Iterator[FunctionDef]``` :returns: Dict of `method_name` to `FunctionDef` :rtype: ```Dict[str, FunctionDef]``` """ return dict( map( lambda func: ( next( map( lambda call: call.func.attr, filter( lambda call: all(( isinstance(call.func, Attribute), call.func.attr in methods, )), filter(rpartial(isinstance, Call), func. decorator_list), ), )), func, ), functions, ))
def test_from_class_with_body_in_method_to_method_with_body(self) -> None: """Tests if this can make the roundtrip from a full function to a full function""" annotate_ancestry(class_with_method_and_body_types_ast) function_def = reindent_docstring( next( filter( rpartial(isinstance, FunctionDef), class_with_method_and_body_types_ast.body, ))) ir = parse.function( find_in_ast( "C.function_name".split("."), class_with_method_and_body_types_ast, ), ) gen_ast = emit.function( ir, emit_default_doc=False, function_name="function_name", function_type="self", indent_level=1, emit_separating_tab=True, emit_as_kwonlyargs=False, ) # emit.file(gen_ast, os.path.join(os.path.dirname(__file__), # "delme{extsep}py".format(extsep=extsep), mode="wt") run_ast_test( self, gen_ast=gen_ast, gold=function_def, )
def test_to_function_with_type_annotations(self) -> None: """ Tests that `function` can generate a function_def with inline types """ function_def = deepcopy( next( filter(rpartial(isinstance, FunctionDef), class_with_method_types_ast.body))) function_name = function_def.name function_type = get_function_type(function_def) reindent_docstring(function_def) gen_ast = emit.function( parse.function( function_def, function_name=function_name, function_type=function_type, ), function_name=function_name, function_type=function_type, emit_default_doc=False, type_annotations=True, emit_separating_tab=True, indent_level=1, emit_as_kwonlyargs=False, ) # emit.file(gen_ast, os.path.join(os.path.dirname(__file__), 'delme.py'), mode='wt') run_ast_test( self, gen_ast=gen_ast, gold=function_def, )
def test_to_function_emit_as_kwonlyargs(self) -> None: """ Tests whether `function` produces method with keyword only arguments """ function_def = reindent_docstring( deepcopy( next( filter( rpartial(isinstance, FunctionDef), ast.parse( class_with_method_types_str.replace( "self", "self, *")).body[0].body, )))) function_name = function_def.name function_type = get_function_type(function_def) gen_ast = emit.function( parse.docstring(docstring_str), function_name=function_name, function_type=function_type, emit_default_doc=False, type_annotations=True, emit_separating_tab=True, indent_level=1, emit_as_kwonlyargs=True, ) run_ast_test( self, gen_ast=gen_ast, gold=function_def, )
def test_to_function(self) -> None: """ Tests whether `function` produces method from `class_with_method_types_ast` given `docstring_str` """ function_def = reindent_docstring( deepcopy( next( filter( rpartial(isinstance, FunctionDef), class_with_method_types_ast.body, )))) function_name = function_def.name function_type = get_function_type(function_def) gen_ast = emit.function( parse.docstring(docstring_str), function_name=function_name, function_type=function_type, emit_default_doc=False, type_annotations=True, emit_separating_tab=True, indent_level=1, emit_as_kwonlyargs=False, ) run_ast_test( self, gen_ast=gen_ast, gold=function_def, )
def test_to_function_with_docstring_types(self) -> None: """ Tests that `function` can generate a function_def with types in docstring """ # Sanity check run_ast_test( self, class_with_method_ast, gold=ast.parse(class_with_method_str).body[0], ) function_def = reindent_docstring( deepcopy( next( filter(rpartial(isinstance, FunctionDef), class_with_method_ast.body)))) ir = parse.function(function_def) gen_ast = reindent_docstring( emit.function( ir, function_name=function_def.name, function_type=get_function_type(function_def), emit_default_doc=False, type_annotations=False, indent_level=1, emit_separating_tab=True, emit_as_kwonlyargs=False, word_wrap=False, )) run_ast_test(self, gen_ast=gen_ast, gold=function_def)
def test_diff(self) -> None: """Tests that `diff` gives correct results""" lstrip_l = partial(diff, op=str.lstrip) self.assertTupleEqual(lstrip_l("A"), (0, "A")) self.assertTupleEqual(lstrip_l(""), (0, "")) self.assertTupleEqual(lstrip_l(" A"), (1, "A")) self.assertTupleEqual(diff("AB", op=rpartial(str.lstrip, "A")), (1, "B"))
def test_parse_out_param(self) -> None: """Test that parse_out_param parses out the right dict""" self.assertDictEqual( parse_out_param( next( filter(rpartial(isinstance, Expr), argparse_func_ast.body[::-1])))[1], # Last element: intermediate_repr["params"]["data_loader_kwargs"], )
def _merge_inner_function( class_def, infer_type, intermediate_repr, merge_inner_function ): """ Merge the inner function if found within the class, with the class IR :param class_def: Class AST :type class_def: ```ClassDef``` :param infer_type: Whether to try inferring the typ (from the default) :type infer_type: ```bool``` :param intermediate_repr: a dictionary of form { "name": Optional[str], "type": Optional[str], "doc": Optional[str], "params": OrderedDict[str, {'typ': str, 'doc': Optional[str], 'default': Any}] "returns": Optional[OrderedDict[Literal['return_type'], {'typ': str, 'doc': Optional[str], 'default': Any}),)]] } :type intermediate_repr: ```dict``` :param merge_inner_function: Name of inner function to merge. If None, merge nothing. :type merge_inner_function: ```Optional[str]``` :returns: a dictionary of form { "name": Optional[str], "type": Optional[str], "doc": Optional[str], "params": OrderedDict[str, {'typ': str, 'doc': Optional[str], 'default': Any}] "returns": Optional[OrderedDict[Literal['return_type'], {'typ': str, 'doc': Optional[str], 'default': Any}),)]] } :rtype: ```dict``` """ function_def = next( filter( lambda func: func.name == merge_inner_function, filter(rpartial(isinstance, FunctionDef), ast.walk(class_def)), ), None, ) if function_def is not None: function_type = ( "static" if not function_def.args.args else function_def.args.args[0].arg ) inner_ir = function( function_def, function_name=merge_inner_function, function_type=function_type, infer_type=infer_type, ) ir_merge(other=inner_ir, target=intermediate_repr) return intermediate_repr
def infer(*args, **kwargs): """ Infer the `parse` type :param args: The arguments :type args: ```Tuple[args]``` :param kwargs: Keyword arguments :type kwargs: ```dict``` :returns: Name of inferred parser :rtype: ```str``` """ node = (args[0] if args else kwargs.get( "class_def", kwargs.get("function_def", kwargs.get("call_or_name")))) is_supported_ast_node = isinstance( node, (Module, Assign, AnnAssign, Call, ClassDef, FunctionDef)) if not is_supported_ast_node and (isinstance(node, (type, FunctionType)) or type(node).__name__ == "function"): return infer(ast.parse(getsource(node)).body[0]) if not is_supported_ast_node: if not isinstance(node, str): node = get_value(node) if (isinstance(node, str) and not node.startswith("def ") and not node.startswith("class ")): return "docstring" assert is_supported_ast_node if isinstance(node, FunctionDef): if next( filter(partial(eq, "argument_parser"), map(attrgetter("arg"), node.args.args)), False, ): return "argparse_ast" return "function" elif isinstance(node, ClassDef): if any( filter( partial(eq, "Base"), map(attrgetter("id"), filter(rpartial(hasattr, "id"), node.bases)), )): return "sqlalchemy" return "class_" elif isinstance(node, (AnnAssign, Assign)): return infer(node.value) elif isinstance(node, Call): if len(node.args) > 2 and node.args[1].id == "metadata": return "sqlalchemy_table"
def check_emission(self, tempdir, dry_run=False): """ Confirm whether emission conforms to gen by verifying their IRs are equivalent :param tempdir: Temporary directory :type tempdir: ```str``` :param dry_run: Show what would be created; don't actually write to the filesystem :type dry_run: ```bool``` """ new_module_name = path.basename(tempdir) for name, folder in self.module_hierarchy: gen_folder = path.join(tempdir, new_module_name, folder) gold_folder = path.join(self.gold_dir, self.module_name, folder) def _open(_folder): """ :param _folder: Folder to join on :type _folder: ```str`` :returns: Open IO :rtype: ```open``` """ return open( path.join( _folder, "{name}{extsep}py".format(name=name, extsep=extsep) ), "rt", ) self.assertTrue(path.isdir(gold_folder)) gen_is_dir = path.isdir(gen_folder) if dry_run: self.assertFalse(gen_is_dir) else: self.assertTrue(gen_is_dir) with _open(gen_folder) as gen, _open(gold_folder) as gold: gen_ir, gold_ir = map( lambda node: parse.class_( next( filter( rpartial(isinstance, ClassDef), ast_parse(node.read()).body, ) ) ), (gen, gold), ) self.assertDictEqual(gold_ir, gen_ir)
def _interpolate_return(function_def, intermediate_repr): """ Interpolate the return value into the IR. :param function_def: function definition :type function_def: ```FunctionDef``` :param intermediate_repr: a dictionary of form { "name": Optional[str], "type": Optional[str], "doc": Optional[str], "params": OrderedDict[str, {'typ': str, 'doc': Optional[str], 'default': Any}] "returns": Optional[OrderedDict[Literal['return_type'], {'typ': str, 'doc': Optional[str], 'default': Any}),)]] } :type intermediate_repr: ```dict``` :returns: a dictionary of form { "name": Optional[str], "type": Optional[str], "doc": Optional[str], "params": OrderedDict[str, {'typ': str, 'doc': Optional[str], 'default': Any}] "returns": Optional[OrderedDict[Literal['return_type'], {'typ': str, 'doc': Optional[str], 'default': Any}),)]] } :rtype: ```dict``` """ return_ast = next( filter(rpartial(isinstance, Return), function_def.body[::-1]), None) if return_ast is not None and return_ast.value is not None: if intermediate_repr.get("returns") is None: intermediate_repr["returns"] = OrderedDict((("return_type", {}), )) if ("typ" in intermediate_repr["returns"]["return_type"] and "[" not in intermediate_repr["returns"]["return_type"]["typ"]): del intermediate_repr["returns"]["return_type"]["typ"] intermediate_repr["returns"]["return_type"]["default"] = ( lambda default: "({})".format(default) if isinstance(return_ast.value, Tuple) and (not default.startswith("(") or not default.endswith(")")) else (lambda default_: default_ if isinstance(default_, (str, int, float, complex, ast.Num, ast.Str, ast. Constant)) else "```{}```".format(default)) (get_value(get_value(return_ast))))(to_code( return_ast.value).rstrip("\n")) if hasattr(function_def, "returns") and function_def.returns is not None: intermediate_repr["returns"] = intermediate_repr.get( "returns") or OrderedDict((("return_type", {}), )) intermediate_repr["returns"]["return_type"]["typ"] = to_code( function_def.returns).rstrip("\n") return intermediate_repr
def get_at_root(node, types): """ Get the imports from a node :param node: AST node with .body, probably an `ast.Module` :type node: ```AST``` :param types: The types to search for (uses in an `isinstance` check) :type types: ```Tuple[str,...]```` :returns: List of imports. Doesn't handle those within a try/except, condition, or not in root scope :rtype: ```List[Union[]]``` """ assert hasattr(node, "body") and isinstance(node.body, (list, tuple)) return list(filter(rpartial(isinstance, types), node.body))
def parse_model(filename): """ :param filename: The filename to open and parse AST out of :type filename: ```str``` :returns: Iterable of tuples of the found kind :rtype: ```Iterable[Tuple[AST, ...], ...]``` """ with open(filename, "rb") as f: parsed_ast = ast.parse(f.read()) return filter( lambda node: (infer(node) or "").startswith("sqlalchemy"), filter(rpartial(isinstance, (Call, ClassDef)), ast.walk(parsed_ast)), )
def parse_route(filename): """ :param filename: The filename to open and parse AST out of :type filename: ```str``` :returns: Iterable of tuples of the found kind :rtype: ```Iterable[Tuple[AST, ...], ...]``` """ with open(filename, "rb") as f: parsed_ast = ast.parse(f.read()) return filter( lambda node: next( get_route_meta(Module(body=[node], type_ignores=[], stmt=None)) )[1] == app_name, filter(rpartial(isinstance, FunctionDef), parsed_ast.body), )
def _parse(scan, partitioned=None): """ Parse the scanned input (Google) :param scan: Scanned input :type scan: ```List[str]``` :param partitioned: Prep-partitioned `scan`, if given doesn't partition on `scan`, just uses this :type partitioned: ```Optional[Tuple[str, str, str]]``` :returns: dict of shape {'name': ..., 'typ': ..., 'doc': ..., 'default': ..., 'required': ... } :rtype: ```dict``` """ offset = next(idx for idx, ch in enumerate(scan[0]) if ch == ":") s = white_spacer(scan[0][:offset]) name, delim, typ = partitioned or s.partition("(") name, typ = name.strip(), (delim + typ).rstrip() cur = {"name": name} if typ: assert typ.startswith("(") and typ.endswith( ")" ), "Expected third partition to be paren wrapped {s!r}".format(s=s) cur["typ"] = typ[1:-1] if " or " in cur["typ"]: cur["typ"] = "Union[{types}]".format( types=", ".join(cur["typ"].split(" or ")) ) end = white_spacer(scan[0][offset + 1 :]) if len(end) > 3 and end.startswith("{") and end.endswith("}"): # PyTorch invented their own syntax for this I guess? cur["typ"], scan[0] = ( "Literal{literal_type_list}".format( literal_type_list=list( map(rpartial(str.strip, "'"), end[1:-1].split(", ")) ) ), "", ) cur["doc"] = (lambda s_: s_ if parse_original_whitespace else s_.strip())( "\n".join( chain.from_iterable( ((white_spacer(scan[0][offset + 1 :]),), scan[1:]) ) ) ) return cur
def find_ast_type(node, node_name=None, of_type=ClassDef): """ Finds first AST node of the given type and possibly name :param node: Any AST node :type node: ```AST``` :param node_name: Name of AST node. If None, gives first found. :type node_name: ```Optional[str]``` :param of_type: Of which type to find :type of_type: ```AST``` :returns: Found AST node :rtype: ```AST``` """ if isinstance(node, Module): it = filter(rpartial(isinstance, of_type), node.body) if node_name is not None: return next( filter( lambda e: hasattr(e, "name") and e.name == node_name, it, ) ) matching_nodes = tuple(it) if len(matching_nodes) > 1: # We could convert every one I guess? raise NotImplementedError() elif matching_nodes: return matching_nodes[0] else: raise TypeError( "No {type_name!r} in AST".format(type_name=type(of_type).__name__) ) elif isinstance(node, AST): assert node_name is None or not hasattr(node, "name") or node.name == node_name return node else: raise NotImplementedError(type(node).__name__)
def upsert_routes(app, routes, routes_path, route, primary_key): """ Upsert the `routes` to the `routes_path`, on merge use existing body and replace interface/prototype :param app: Variable name (Bottle App) :type app: ```str``` :param routes: Iterator of functions representing relevant CRUD operations :type routes: ```Iterator[FunctionDef]``` :param route: The path of the resource :type route: ```str``` :param primary_key: The primary key or id to lookup on for the route :type primary_key: ```str``` :param routes_path: The path/module-resolution whence the routes are / will be :type routes_path: ```str``` """ routes_path = filename_from_mod_or_filename(routes_path) if not path.isfile(routes_path): with open(routes_path, "wt") as f: f.write("\n\n".join( chain.from_iterable(( (route_prelude.replace("rest_api =", "{app} =".format(app=app)), ), map(to_code, routes), )))) return with open(routes_path, "rt") as f: mod = ast.parse(f.read()) def get_names(functions): """ Derive a method_name -> FunctionDef dictionary :param functions: Routing functions :type functions: ```Iterator[FunctionDef]``` :returns: Dict of `method_name` to `FunctionDef` :rtype: ```Dict[str, FunctionDef]``` """ return dict( map( lambda func: ( next( map( lambda call: call.func.attr, filter( lambda call: all(( isinstance(call.func, Attribute), call.func.attr in methods, )), filter(rpartial(isinstance, Call), func. decorator_list), ), )), func, ), functions, )) routes_required = get_names(routes) routes_existing = get_names( filter( lambda node: any( filter( lambda call: all(( isinstance(call.func, Attribute), call.func.attr in methods, get_value(call.args[0]) == "{route}{rest}".format( route=route, rest="" if call.func.attr == "post" else "/:{primary_key}".format(primary_key=primary_key), ), call.func.value.id == app, )), filter(rpartial(isinstance, Call), node.decorator_list), )), filter(rpartial(isinstance, FunctionDef), ast.walk(mod)), )) missing_routes = (routes_required.keys() & routes_existing.keys() ^ routes_required.keys()) if not missing_routes: return with open(routes_path, "a") as f: f.write("\n\n".join( map( to_code, map( routes_required.__getitem__, sorted( missing_routes, key={ "post": 0, "get": 1, "update": 2, "delete": 3, }.__getitem__, ), ), )))
def _build_parser(): """ Parser builder :returns: instanceof ArgumentParser :rtype: ```ArgumentParser``` """ parser = ArgumentParser( prog="python -m cdd", description=__description__, ) parser.add_argument( "--version", action="version", version="%(prog)s {__version__}".format(__version__=__version__), ) subparsers = parser.add_subparsers() subparsers.required = True subparsers.dest = "command" parse_emit_types = "argparse", "class", "function", "sqlalchemy", "sqlalchemy_table" ############ # Property # ############ property_parser = subparsers.add_parser( "sync_properties", help=( "Synchronise one or more properties between input and input_str Python" " files" ), ) property_parser.add_argument( "--input-filename", help="File to find `--input-param` from", required=True, type=str, ) property_parser.add_argument( "--input-param", help=( "Location within file of property. Can be top level like `a` for `a=5` or" " with the `.` syntax as in `--output-param`." ), required=True, action="append", type=str, dest="input_params", ) property_parser.add_argument( "--input-eval", help="Whether to evaluate the input-param, or just leave it", action="store_true", ) property_parser.add_argument( "--output-filename", help=( "Edited in place, the property within this file (to update) is selected by" " --output-param" ), type=str, required=True, ) property_parser.add_argument( "--output-param", help=( "Parameter to update. E.g., `A.F` for `class A: F`, `f.g` for `def f(g):" " pass`" ), required=True, action="append", type=str, dest="output_params", ) property_parser.add_argument( "--output-param-wrap", type=str, help=( "Wrap all input_str params with this. E.g., `Optional[Union[{output_param}," " str]]`" ), ) ######## # Sync # ######## sync_parser = subparsers.add_parser( "sync", help="Force argparse, classes, and/or methods to be equivalent" ) sync_parser.add_argument( "--argparse-function", help="File where argparse function is `def`ined.", action="append", type=str, dest="argparse_functions", ) sync_parser.add_argument( "--argparse-function-name", help="Name of argparse function.", action="append", type=str, dest="argparse_function_names", ) sync_parser.add_argument( "--class", help="File where class `class` is declared.", action="append", type=str, dest="classes", ) sync_parser.add_argument( "--class-name", help="Name of `class`", action="append", type=str, dest="class_names", ) sync_parser.add_argument( "--function", help="File where function is `def`ined.", action="append", type=str, dest="functions", ) sync_parser.add_argument( "--function-name", help=( "Name of Function. If method, use Python resolution syntax," " i.e., ClassName.function_name" ), action="append", type=str, dest="function_names", ) sync_parser.add_argument( "--truth", help=( "Single source of truth. Others will be generated from this. Will run with" " first found choice." ), choices=( "argparse_function", "class", "function", "sqlalchemy", "sqlalchemy_table", ), type=str, required=True, ) ####### # Gen # ####### gen_parser = subparsers.add_parser( "gen", help=( "Generate classes, functions, argparse function, sqlalchemy tables and/or sqlalchemy classes" " from the input mapping" ), ) gen_parser.add_argument( "--name-tpl", help="Template for the name, e.g., `{name}Config`.", required=True ) gen_parser.add_argument( "--input-mapping", help="Import location of dictionary/mapping/2-tuple collection.", required=True, ) gen_parser.add_argument( "--prepend", help="Prepend file with this. Use '\\n' for newlines.", type=lambda arg: decode(str(arg), "unicode_escape"), ) gen_parser.add_argument( "--imports-from-file", help=( "Extract imports from file and append to `output_file`. " "If module or other symbol path given, resolve file then use it." ), ) gen_parser.add_argument( "--parse", help="What type the input is.", choices=parse_emit_types, default="infer", dest="parse_name", ) gen_parser.add_argument( "--emit", help="What type to generate.", choices=parse_emit_types, required=True, dest="emit_name", ) gen_parser.add_argument( "--output-filename", "-o", help="Output file to write to.", required=True ) gen_parser.add_argument( "--emit-call", action="store_true", help=( "Whether to place all the previous body into a new `__call__` internal" " function" ), ) gen_parser.add_argument( "--decorator", help="List of decorators.", action="append", type=str, dest="decorator_list", ) ############## # gen_routes # ############## routes_parser = subparsers.add_parser( "gen_routes", help="Generate per model route(s)" ) routes_parser.add_argument( "--crud", help="What of (C)reate, (R)ead, (U)pdate, (D)elete to generate", choices=("CRUD", "CR", "C", "R", "U", "D", "CR", "CU", "CD", "CRD"), required=True, ) routes_parser.add_argument( "--app-name", help="Name of app (e.g., `app_name = Bottle();\n@app_name.get('/api')\ndef slash(): pass`)", default="rest_api", ) routes_parser.add_argument( "--model-path", help="Python module resolution (foo.models) or filepath (foo/models)", required=True, ) routes_parser.add_argument( "--model-name", help="Name of model to generate from", required=True ) routes_parser.add_argument( "--routes-path", help="Python module resolution 'foo.routes' or filepath 'foo/routes'", required=True, ) routes_parser.add_argument( "--route", help="Name of the route, defaults to `/api/{model_name.lower()}`", ) ########### # openapi # ########### openapi_parser = subparsers.add_parser( "openapi", help="Generate OpenAPI schema from specified project(s)" ) openapi_parser.add_argument( "--app-name", help="Name of app (e.g., `app_name = Bottle();\n@app_name.get('/api')\ndef slash(): pass`)", default="rest_api", ) openapi_parser.add_argument( "--model-paths", help="Python module resolution (foo.models) or filepath (foo/models)", required=True, ) openapi_parser.add_argument( "--routes-paths", help="Python module resolution 'foo.routes' or filepath 'foo/routes'", nargs="*", required=True, ) ############ # doctrans # ############ doctrans_parser = subparsers.add_parser( "doctrans", help=( "Convert docstring format of all classes and functions within target file" ), ) doctrans_parser.add_argument( "--filename", help="Python file to convert docstrings within. Edited in place.", type=str, required=True, ) doctrans_parser.add_argument( "--format", help="The docstring format to replace existing format with.", type=str, choices=tuple(filterfalse(rpartial(eq, "auto"), Style.__members__.keys())), required=True, ) doctrans_parser_group = doctrans_parser.add_mutually_exclusive_group(required=True) doctrans_parser_group.add_argument( "--type-annotations", help="Inline the type, i.e., annotate PEP484 (outside docstring. Requires 3.6+)", dest="type_annotations", action="store_true", ) doctrans_parser_group.add_argument( "--no-type-annotations", help="Ensure all types are in docstring (rather than a PEP484 type annotation)", dest="type_annotations", action="store_false", ) ######### # exmod # ######### exmod_parser = subparsers.add_parser( "exmod", help=( "Expose module hierarchy->{functions,classes,vars} for parameterisation " "via {REST API + database,CLI,SDK}" ), ) exmod_parser.add_argument( "--module", "-m", help="The module or fully-qualified name (FQN) to expose.", required=True, ) exmod_parser.add_argument( "--emit", help="What type to generate.", choices=parse_emit_types, required=True, action="append", ) exmod_parser.add_argument( "--blacklist", help="Modules/FQN to omit. If unspecified will emit all (unless whitelist).", action="append", ) exmod_parser.add_argument( "--whitelist", help="Modules/FQN to emit. If unspecified will emit all (minus blacklist).", action="append", ) exmod_parser.add_argument( "--output-directory", "-o", help="Where to place the generated exposed interfaces to the given `--module`.", required=True, ) exmod_parser.add_argument( "--dry-run", help="Show what would be created; don't actually write to the filesystem.", action="store_true", ) return parser
def sqlalchemy(class_def): """ Parse out a `class C(Base): __tablename__= 'tbl'; dataset_name = Column(String, doc="p", primary_key=True)`, as constructed on an SQLalchemy declarative `Base`. :param class_def: A class inheriting from declarative `Base`, where `Base = sqlalchemy.orm.declarative_base()` :type class_def: ```Union[ClassDef]``` :returns: a dictionary of form { "name": Optional[str], "type": Optional[str], "doc": Optional[str], "params": OrderedDict[str, {'typ': str, 'doc': Optional[str], 'default': Any}] "returns": Optional[OrderedDict[Literal['return_type'], {'typ': str, 'doc': Optional[str], 'default': Any}),)]] } :rtype: ```dict``` """ assert isinstance(class_def, ClassDef), "Expected `ClassDef` got `{}`".format( type(class_def).__name__ ) # Parse into the same format that `sqlalchemy_table` can read, then return with a call to it name = get_value( next( filter( lambda assign: any( filter( partial(eq, "__tablename__"), map(attrgetter("id"), assign.targets), ) ), filter(rpartial(isinstance, Assign), class_def.body), ) ).value ) doc_string = get_docstring(class_def) def _merge_name_to_column(assign): """ Merge `a = Column()` into `Column("a")` :param assign: Of form `a = Column()` :type assign: ```Assign``` :returns: Unwrapped Call with name prepended :rtype: ```Call``` """ assign.value.args.insert(0, set_value(assign.targets[0].id)) return assign.value return sqlalchemy_table( Call( func=Name("Table", Load()), args=list( chain.from_iterable( ( iter((set_value(name), Name("metadata", Load()))), map( _merge_name_to_column, filterfalse( lambda assign: any( map( lambda target: target.id == "__tablename__" or hasattr(target, "value") and isinstance(target.value, Call) and target.func.rpartition(".")[2] == "Column", assign.targets, ), ), filter(rpartial(isinstance, Assign), class_def.body), ), ), ) ) ), keywords=[] if doc_string is None else [keyword(arg="comment", value=set_value(doc_string), identifier=None)], expr=None, expr_func=None, ) )
def class_( class_def, class_name=None, merge_inner_function=None, infer_type=False, parse_original_whitespace=False, word_wrap=True, ): """ Converts an AST to our IR :param class_def: Class AST or Module AST with a ClassDef inside :type class_def: ```Union[Module, ClassDef]``` :param class_name: Name of `class`. If None, gives first found. :type class_name: ```Optional[str]``` :param merge_inner_function: Name of inner function to merge. If None, merge nothing. :type merge_inner_function: ```Optional[str]``` :param infer_type: Whether to try inferring the typ (from the default) :type infer_type: ```bool``` :param parse_original_whitespace: Whether to parse original whitespace or strip it out :type parse_original_whitespace: ```bool``` :param word_wrap: Whether to word-wrap. Set `DOCTRANS_LINE_LENGTH` to configure length. :type word_wrap: ```bool``` :returns: a dictionary of form { "name": Optional[str], "type": Optional[str], "doc": Optional[str], "params": OrderedDict[str, {'typ': str, 'doc': Optional[str], 'default': Any}] "returns": Optional[OrderedDict[Literal['return_type'], {'typ': str, 'doc': Optional[str], 'default': Any}),)]] } :rtype: ```dict``` """ assert not isinstance(class_def, FunctionDef) is_supported_ast_node = isinstance(class_def, (Module, ClassDef)) if not is_supported_ast_node and isinstance(class_def, type): return _class_from_memory( class_def, class_name, infer_type, merge_inner_function, word_wrap ) assert ( is_supported_ast_node ), "Expected 'Union[Module, ClassDef]' got `{node_name!r}`".format( node_name=type(class_def).__name__ ) class_def = find_ast_type(class_def, class_name) doc_str = get_docstring(class_def) intermediate_repr = ( { "name": class_name, "type": "static", "doc": "", "params": OrderedDict(), "returns": None, } if doc_str is None else docstring( doc_str, emit_default_doc=False, parse_original_whitespace=parse_original_whitespace, ) ) if "return_type" in intermediate_repr["params"]: intermediate_repr["returns"] = OrderedDict( (("return_type", intermediate_repr["params"].pop("return_type")),) ) body = class_def.body if doc_str is None else class_def.body[1:] for e in body: if isinstance(e, AnnAssign): typ = to_code(e.annotation).rstrip("\n") val = ( lambda v: {"default": NoneStr} if v is None else { "default": v if type(v).__name__ in simple_types else ( lambda value: { "{}": {} if isinstance(v, Dict) else set(), "[]": [], "()": (), }.get(value, parse_to_scalar(value)) )(to_code(v).rstrip("\n")) } )(get_value(get_value(e))) # if 'str' in typ and val: val["default"] = val["default"].strip("'") # Unquote? typ_default = dict(typ=typ, **val) for key in "params", "returns": if e.target.id in (intermediate_repr[key] or iter(())): intermediate_repr[key][e.target.id].update(typ_default) typ_default = False break if typ_default: k = "returns" if e.target.id == "return_type" else "params" if intermediate_repr.get(k) is None: intermediate_repr[k] = OrderedDict() intermediate_repr[k][e.target.id] = typ_default elif isinstance(e, Assign): val = get_value(e) if val is not None: val = get_value(val) deque( map( lambda target: setitem( *( (intermediate_repr["params"][target.id], "default", val) if isinstance(target, Name) and target.id in intermediate_repr["params"] else ( intermediate_repr["params"], target.id if isinstance(target, Name) else get_value(get_value(target)), {"default": val}, ) ) ), e.targets, ), maxlen=0, ) intermediate_repr.update( { "params": OrderedDict( map( partial( _set_name_and_type, infer_type=infer_type, word_wrap=word_wrap ), intermediate_repr["params"].items(), ) ), "_internal": { "body": list( filterfalse(rpartial(isinstance, (AnnAssign, Assign)), body) ), "from_name": class_def.name, "from_type": "cls", }, } ) if merge_inner_function is not None: assert isinstance(class_def, ClassDef) _merge_inner_function( class_def, infer_type=infer_type, intermediate_repr=intermediate_repr, merge_inner_function=merge_inner_function, ) # intermediate_repr['_internal']["body"]= list(filterfalse(rpartial(isinstance,(AnnAssign,Assign)),class_def.body)) return intermediate_repr
def function( function_def, infer_type=False, word_wrap=True, function_type=None, function_name=None, ): """ Converts a method to our IR :param function_def: AST node for function definition :type function_def: ```Union[FunctionDef, FunctionType]``` :param infer_type: Whether to try inferring the typ (from the default) :type infer_type: ```bool``` :param word_wrap: Whether to word-wrap. Set `DOCTRANS_LINE_LENGTH` to configure length. :type word_wrap: ```bool``` :param function_type: Type of function, static is static or global method, others just become first arg :type function_type: ```Literal['self', 'cls', 'static']``` :param function_name: name of function_def :type function_name: ```str``` :returns: a dictionary of form { "name": Optional[str], "type": Optional[str], "doc": Optional[str], "params": OrderedDict[str, {'typ': str, 'doc': Optional[str], 'default': Any}] "returns": Optional[OrderedDict[Literal['return_type'], {'typ': str, 'doc': Optional[str], 'default': Any}),)]] } :rtype: ```dict``` """ if isinstance(function_def, FunctionType): # Dynamic function, i.e., this isn't source code; and is in your memory ir = _inspect(function_def, function_name, word_wrap) parsed_source = ast.parse(getsource(function_def).lstrip()).body[0] body = ( parsed_source.body if ast.get_docstring(parsed_source) is None else parsed_source.body[1:] ) ir["_internal"] = { "body": list(filterfalse(rpartial(isinstance, AnnAssign), body)), "from_name": parsed_source.name, "from_type": "cls", } return ir assert isinstance( function_def, FunctionDef ), "Expected 'FunctionDef' got `{node_name!r}`".format( node_name=type(function_def).__name__ ) assert ( function_name is None or function_def.name == function_name ), "Expected {function_name!r} got {function_def_name!r}".format( function_name=function_name, function_def_name=function_def.name ) found_type = get_function_type(function_def) # Read docstring doc_str = ( get_docstring(function_def) if isinstance(function_def, FunctionDef) else None ) function_def = deepcopy(function_def) function_def.args.args = ( function_def.args.args if found_type == "static" else function_def.args.args[1:] ) if doc_str is None: intermediate_repr = { "name": function_name or function_def.name, "params": OrderedDict(), "returns": None, } else: intermediate_repr = docstring( doc_str.replace(":cvar", ":param"), infer_type=infer_type ) intermediate_repr.update( { "name": function_name or function_def.name, "type": function_type or found_type, } ) function_def.body = function_def.body if doc_str is None else function_def.body[1:] if function_def.body: intermediate_repr["_internal"] = { "body": function_def.body, "from_name": function_def.name, "from_type": found_type, } params_to_append = OrderedDict() if ( hasattr(function_def.args, "kwarg") and function_def.args.kwarg and function_def.args.kwarg.arg in intermediate_repr["params"] ): _param = intermediate_repr["params"].pop(function_def.args.kwarg.arg) assert "typ" in _param _param["default"] = NoneStr params_to_append[function_def.args.kwarg.arg] = _param del _param # Set defaults # Fill with `None`s when no default is given to make the `zip` below it work cleanly for args, defaults in ( ("args", "defaults"), ("kwonlyargs", "kw_defaults"), ): diff = len(getattr(function_def.args, args)) - len( getattr(function_def.args, defaults) ) if diff: setattr( function_def.args, defaults, list(islice(cycle((None,)), 10)) + getattr(function_def.args, defaults), ) ir_merge( intermediate_repr, { "params": OrderedDict( ( func_arg2param( getattr(function_def.args, args)[idx], default=getattr(function_def.args, defaults)[idx], ) for args, defaults in ( ("args", "defaults"), ("kwonlyargs", "kw_defaults"), ) for idx in range(len(getattr(function_def.args, args))) ) ), "returns": None, }, ) intermediate_repr["params"].update(params_to_append) intermediate_repr["params"] = OrderedDict( map( partial(_set_name_and_type, infer_type=infer_type, word_wrap=word_wrap), intermediate_repr["params"].items(), ) ) # Convention - the final top-level `return` is the default intermediate_repr = _interpolate_return(function_def, intermediate_repr) if "return_type" in (intermediate_repr.get("returns") or iter(())): intermediate_repr["returns"] = OrderedDict( map( partial(_set_name_and_type, infer_type=infer_type, word_wrap=word_wrap), intermediate_repr["returns"].items(), ) ) return intermediate_repr
def _class_from_memory( class_def, class_name, infer_type, merge_inner_function, word_wrap ): """ Merge the inner function if found within the class, with the class IR. Internal func just for internal memory. Uses `inspect`. :param class_def: Class AST :type class_def: ```ClassDef``` :param class_name: Class name :type class_name: ```str``` :param infer_type: Whether to try inferring the typ (from the default) :type infer_type: ```bool``` :param merge_inner_function: Name of inner function to merge. If None, merge nothing. :type merge_inner_function: ```Optional[str]``` :param word_wrap: Whether to word-wrap. Set `DOCTRANS_LINE_LENGTH` to configure length. :type word_wrap: ```bool``` :returns: a dictionary of form { "name": Optional[str], "type": Optional[str], "doc": Optional[str], "params": OrderedDict[str, {'typ': str, 'doc': Optional[str], 'default': Any}] "returns": Optional[OrderedDict[Literal['return_type'], {'typ': str, 'doc': Optional[str], 'default': Any}),)]] } :rtype: ```dict``` """ ir = _inspect(class_def, class_name, word_wrap) src = get_source(class_def) if src is None: return ir parsed_body = ast.parse(src.lstrip()).body[0] parsed_body.body = ( parsed_body.body if get_docstring(parsed_body) is None else parsed_body.body[1:] ) if merge_inner_function is not None: _merge_inner_function( parsed_body, infer_type=infer_type, intermediate_repr=ir, merge_inner_function=merge_inner_function, ) return ir ir["_internal"] = { "body": list( filterfalse( rpartial(isinstance, AnnAssign), parsed_body.body, ) ), "from_name": class_name, "from_type": "cls", } body_ir = class_( class_def=parsed_body, class_name=class_name, merge_inner_function=merge_inner_function, ) ir_merge(ir, body_ir) return ir
def test_exmod_dry_run(self) -> None: """Tests `exmod` dry_run""" try: with TemporaryDirectory( prefix="gold", suffix="gold" ) as existent_module_dir, TemporaryDirectory( prefix="gen", suffix="gen" ) as new_module_dir: self.create_fs(existent_module_dir) self._pip(["install", "."], existent_module_dir) with patch("sys.stdout", new_callable=StringIO) as f: exmod( module=self.module_name, emit_name="class", blacklist=tuple(), whitelist=tuple(), output_directory=new_module_dir, dry_run=True, ) r = f.getvalue() result = dict( map( lambda k_v: ( k_v[0], tuple( sorted( set( map( partial( relative_filename, remove_hints=( ( lambda directory: unquote( repr(directory) ) + path.sep if platform == "win32" else directory )( path.join( new_module_dir, path.basename( new_module_dir ), ), ), ), ), map(unquote, map(itemgetter(1), k_v[1])), ) ) ) ), ), groupby( map(rpartial(str.split, "\t", 2), sorted(r.splitlines())), key=itemgetter(0), ), ) ) all_tests_running = len(result["write"]) == 7 key_counts = ( (("mkdir", 7), ("touch", 4), ("write", 7)) if all_tests_running else (("mkdir", 7), ("touch", 4), ("write", 4)) ) for key, count in key_counts: self.assertEqual(len(result[key]), count, key) gold_module_name = next( map( lambda p: p.partition(path.sep)[0], filter(rpartial(str.startswith, "gold"), result["write"]), ), "", ) expect = { k: tuple(map(unquote, map(repr, v))) for k, v in { "mkdir": ( new_module_dir, self.module_hierarchy[0][1], self.module_hierarchy[1][1], path.join( self.module_hierarchy[1][1], self.module_hierarchy[1][0], ), self.module_hierarchy[2][1], path.join( self.module_hierarchy[2][1], self.module_hierarchy[2][0], ), path.join( self.module_hierarchy[0][1], self.module_hierarchy[0][0], ), ), "touch": ( "__init__{extsep}py".format(extsep=extsep), path.join( self.module_hierarchy[0][1], "__init__{extsep}py".format(extsep=extsep), ), path.join( self.module_hierarchy[1][1], "__init__{extsep}py".format(extsep=extsep), ), path.join( self.module_hierarchy[2][1], "__init__{extsep}py".format(extsep=extsep), ), ), "write": ( lambda write_block: tuple( sorted( chain.from_iterable( ( map( partial(path.join, gold_module_name), write_block[1:], ), write_block, ) ) ) ) if all_tests_running else write_block )( ( "__init__{extsep}py".format(extsep=extsep), path.join( self.module_hierarchy[1][1], "{name}{extsep}py".format( name=self.module_hierarchy[1][0], extsep=extsep, ), ), path.join( self.module_hierarchy[2][1], "{name}{extsep}py".format( name=self.module_hierarchy[2][0], extsep=extsep, ), ), path.join( self.module_hierarchy[0][1], "{name}{extsep}py".format( name=self.module_hierarchy[0][0], extsep=extsep, ), ), ) ), }.items() } self.assertDictEqual(result, expect) self.check_emission(new_module_dir, dry_run=True) finally: self._pip(["uninstall", "-y", self.module_name])
def create_fs(self, tempdir): """ Populate filesystem from `tempdir` root with module hierarchy &etc. for later exposure (exmod) :param tempdir: Temporary directory :type tempdir: ```str``` :returns: tempdir :rtype: ```str``` """ self.module_name, self.gold_dir = path.basename(tempdir), tempdir self.parent_name, self.parent_dir = "parent", "parent_dir" self.child_name, self.child_dir = "child", path.join( self.parent_dir, "child_dir" ) self.grandchild_name, self.grandchild_dir = "grandchild", path.join( self.child_dir, "grandchild_dir" ) self.module_hierarchy = ( (self.parent_name, self.parent_dir), (self.child_name, self.child_dir), (self.grandchild_name, self.grandchild_dir), ) with open( path.join(tempdir, "setup{extsep}py".format(extsep=extsep)), "wt" ) as f: f.write( setup_py_mock.format(encoding=ENCODING, package_name=self.module_name) ) open(path.join(tempdir, "README{extsep}md".format(extsep=extsep)), "a").close() mkdir(path.join(tempdir, self.module_name)) with open( path.join( tempdir, self.module_name, "__init__{extsep}py".format(extsep=extsep) ), "wt", ) as f: f.write( "{encoding}\n\n" "{imports}\n" "__author__ = {author!r}\n" "__version__ = {version!r}\n\n" "{all__}\n".format( encoding=ENCODING, imports="\n".join( ( "import {module_name}.{other_imports}\n".format( module_name=self.module_name, other_imports="\nimport {module_name}.".format( module_name=self.module_name ).join( map( rpartial(str.replace, path.sep, "."), map(itemgetter(1), self.module_hierarchy), ) ), ), ) ), # module_name=self.module_name, # parent_name=self.parent_name, # cls_name="{name}Class".format(name=self.parent_name.title()), author=environ.get("CDD_AUTHOR", "Samuel Marks"), version=environ.get("CDD_VERSION", "0.0.0"), all__="__all__ = {__all__!r}".format( __all__=list( map( rpartial(add, "_dir"), ( self.parent_name, self.child_name, self.grandchild_name, ), ) ) ), ) ) for name, _folder in self.module_hierarchy: folder = path.join(tempdir, self.module_name, _folder) mkdir(folder) cls_name = "{name}Class".format(name=name.title()) with open( path.join(folder, "__init__{extsep}py".format(extsep=extsep)), "wt" ) as f: f.write( "{encoding}\n\n" "from .{name} import {cls_name}\n\n" "__all__ = [{cls_name!r}]\n".format( encoding=ENCODING, name=name, cls_name=cls_name, ) ) with open( path.join(folder, "{name}{extsep}py".format(name=name, extsep=extsep)), "wt", ) as f: f.write( "{encoding}\n\n" "{imports_header}\n" "{class_str}\n\n" "__all__ = [{cls_name!r}]\n".format( encoding=ENCODING, imports_header=imports_header, class_str=class_str.replace("ConfigClass", cls_name), cls_name=cls_name, ) ) return tempdir