def test_annotate_ancestry(self) -> None: """Tests that `annotate_ancestry` properly decorates""" node = Module( body=[ AnnAssign( annotation=Name( "str", Load(), ), simple=1, target=Name("dataset_name", Store()), value=set_value("~/tensorflow_datasets"), expr=None, expr_target=None, expr_annotation=None, ), Assign( annotation=None, simple=1, targets=[Name("epochs", Store())], value=set_value("333"), expr=None, expr_target=None, expr_annotation=None, **maybe_type_comment ), ], stmt=None, ) self.assertFalse(hasattr(node.body[0], "_location")) self.assertFalse(hasattr(node.body[1], "_location")) annotate_ancestry(node) self.assertEqual(node.body[0]._location, ["dataset_name"]) self.assertEqual(node.body[1]._location, ["epochs"])
def test_param2argparse_param_default_simple_type(self) -> None: """ Tests that param2argparse_param works to change the type based on the default """ run_ast_test( gen_ast=param2argparse_param( ("byo", {"default": 5, "typ": "str"}), ), gold=Expr( Call( args=[set_value("--byo")], func=Attribute( Name("argument_parser", Load()), "add_argument", Load(), ), keywords=[ keyword(arg="type", value=Name("int", Load()), identifier=None), keyword(arg="required", value=set_value(True), identifier=None), keyword(arg="default", value=set_value(5), identifier=None), ], expr=None, expr_func=None, ) ), test_case_instance=self, )
def test_parse_out_param_fails(self) -> None: """Test that parse_out_param throws NotImplementedError when unsupported type given""" self.assertRaises( NotImplementedError, lambda: parse_out_param( Expr( Call( args=[set_value("--num")], func=Attribute( Name("argument_parser", Load()), "add_argument", Load(), ), keywords=[ keyword( arg="type", value=Subscript( expr_context_ctx=None, expr_slice=None, expr_value=None, ), identifier=None, ), keyword( arg="required", value=set_value(True), identifier=None, ), ], expr=None, expr_func=None, ))), )
def test_parse_to_scalar(self) -> None: """Test various inputs and outputs for `parse_to_scalar`""" for fst, snd in ( (5, 5), ("5", "5"), (set_value(5), 5), (ast.Expr(None), NoneStr), ): self.assertEqual(parse_to_scalar(fst), snd) self.assertEqual( get_value(parse_to_scalar(ast.parse("[5]").body[0]).elts[0]), 5 ) self.assertTrue( cmp_ast( parse_to_scalar(ast.parse("[5]").body[0]), List([set_value(5)], Load()), ) ) self.assertEqual(parse_to_scalar(ast.parse("[5]")), "[5]") parse_to_scalar(ast.parse("[5]").body[0]) self.assertRaises(NotImplementedError, parse_to_scalar, memoryview(b"")) self.assertRaises(NotImplementedError, parse_to_scalar, memoryview(b""))
def test_param2argparse_param_default_function(self) -> None: """ Tests that param2argparse_param works to change the type based on the default whence said default is an in-memory function """ function_str = ( "from operator import add\n" "def adder(a, b):\n" "{tab}return add(a, b)".format(tab=tab) ) adder = getattr( inspectable_compile(function_str), "adder", ) pickled_adder = pickle.dumps(adder) # eww run_ast_test( gen_ast=param2argparse_param( ( "byo", { "default": adder, "typ": "str", }, ), ), gold=Expr( Call( args=[set_value("--byo")], func=Attribute( Name("argument_parser", Load()), "add_argument", Load(), ), keywords=[ keyword( arg="type", value=Name("pickle.loads", Load()), identifier=None, ), keyword( arg="default", value=set_value(pickled_adder), identifier=None, ), ], expr=None, expr_func=None, ) ), test_case_instance=self, )
def test_param2argparse_param_default_torch(self) -> None: """ Tests that param2argparse_param works to change the type based on the default whence said default is a proxy for an internal PyTorch type """ class FakeTorch(object): """Not a real torch""" def __str__(self): """But a real str :returns: An actual str :rtype: ```Literal['<required parameter>']``` """ return "<required parameter>" # type("FakeTorch", tuple(), {"__str__": lambda _: "<required parameter>"}) run_ast_test( gen_ast=param2argparse_param( ( "byo", { "default": FakeTorch(), }, ), ), gold=Expr( Call( args=[set_value("--byo")], func=Attribute( Name("argument_parser", Load()), "add_argument", Load(), ), keywords=[ keyword( arg="type", value=Name(FakeTorch.__name__, Load()), identifier=None, ), keyword(arg="required", value=set_value(True), identifier=None), ], expr=None, expr_func=None, ) ), test_case_instance=self, )
def test_emit_ann_assign(self) -> None: """Tests that AnnAssign is emitted from `emit_ann_assign`""" self.assertIsInstance(class_ast.body[1], AnnAssign) self.assertIsInstance(emit_ann_assign(class_ast.body[1]), AnnAssign) self.assertIsInstance(emit_ann_assign(class_ast.body[1]), AnnAssign) gen_ast = emit_ann_assign( find_in_ast( "C.function_name.dataset_name".split("."), class_with_method_and_body_types_ast, ) ) self.assertIsInstance(gen_ast, AnnAssign) run_ast_test( self, gen_ast, AnnAssign( annotation=Name( "str", Load(), ), simple=1, target=Name("dataset_name", Store()), value=set_value("~/tensorflow_datasets"), expr=None, expr_target=None, expr_annotation=None, ), )
def test_replace_in_ast_with_val_on_non_function(self) -> None: """ Tests that `RewriteAtQuery` can actually replace a node at given location """ parsed_ast = ast_parse(class_str) rewrite_at_query = RewriteAtQuery( search="ConfigClass.dataset_name".split("."), replacement_node=AnnAssign( annotation=Name("int", Load()), simple=1, target=Name("dataset_name", Store()), value=set_value(15), expr=None, expr_target=None, expr_annotation=None, ), ) gen_ast = rewrite_at_query.visit(parsed_ast) self.assertTrue(rewrite_at_query.replaced, True) run_ast_test( self, gen_ast, ast.parse( class_str.replace( 'dataset_name: str = "mnist"', "dataset_name: int = 15" ) ), )
def test__set_name_and_type(self) -> None: """ Tests that `_set_name_and_type` parsed AST code into a code str. Not working since I explicitly deleted the typ from ``` quoted defaults. Changed mock to match. """ self.assertTupleEqual( _set_name_and_type( ( "adder", { "default": BinOp( set_value(5), Mult(), set_value(5), ), }, ), infer_type=True, word_wrap=True, ), ("adder", { "default": "```(5 * 5)```" }), ) self.assertTupleEqual( _set_name_and_type( ( "adder", { "default": BinOp( set_value(5), Mult(), set_value(5), ), "doc": ["5", "b"], }, ), infer_type=True, word_wrap=True, ), ("adder", { "default": "```(5 * 5)```", "doc": "5b" }), )
def test_param2ast_with_wrapped_default(self) -> None: """Check that `param2ast` behaves correctly with a wrapped default""" run_ast_test( self, param2ast( ("zion", {"typ": None, "default": set_value(NoneStr)}), ), gold=AnnAssign( annotation=Name("object", Load()), simple=1, target=Name("zion", Store()), value=set_value(None), expr=None, expr_target=None, expr_annotation=None, ), )
def test_param2argparse_param_default_ast_binop(self) -> None: """ Tests that param2argparse_param works to change the type based on the default whence said default is a non specially handled ast.AST """ run_ast_test( gen_ast=param2argparse_param( ( "byo", { "default": BinOp( set_value(5), Mult(), set_value(5), ), "typ": "str", }, ), ), gold=Expr( Call( args=[set_value("--byo")], func=Attribute( Name("argument_parser", Load()), "add_argument", Load(), ), keywords=[ keyword(arg="required", value=set_value(True), identifier=None), keyword( arg="default", value=set_value("```(5 * 5)```"), identifier=None, ), ], expr=None, expr_func=None, ) ), test_case_instance=self, )
def _merge_name_to_column(assign): """ Merge `a = Column()` into `Column("a")` :param assign: Of form `a = Column()` :type assign: ```Assign``` :returns: Unwrapped Call with name prepended :rtype: ```Call``` """ assign.value.args.insert(0, set_value(assign.targets[0].id)) return assign.value
def test_param2argparse_param_default_ast_tuple(self) -> None: """ Tests that param2argparse_param works to change the type based on the default whence said default is an ast.Tuple """ run_ast_test( gen_ast=param2argparse_param( ( "byo", { "default": Tuple( elts=[], ctx=Load(), expr=None, ), "typ": "str", }, ), ), gold=Expr( Call( args=[set_value("--byo")], func=Attribute( Name("argument_parser", Load()), "add_argument", Load(), ), keywords=[ keyword( arg="type", value=Name("loads", Load()), identifier=None ), keyword(arg="required", value=set_value(True), identifier=None), keyword(arg="default", value=set_value("()"), identifier=None), ], expr=None, expr_func=None, ) ), test_case_instance=self, )
def test_gen_with_imports_from_file_and_prepended_import(self) -> None: """Tests `gen` with `imports_from_file` and `prepend`""" output_filename = os.path.join( self.tempdir, "test_gen_with_imports_from_file_and_prepended_import_output{extsep}py" .format(extsep=extsep), ) with patch("sys.stdout", new_callable=StringIO), patch("sys.stderr", new_callable=StringIO): self.assertIsNone( gen( name_tpl="{name}Config", input_mapping="gen_test_module.input_map", imports_from_file="gen_test_module", emit_name="class", parse_name="infer", prepend=_import_gen_test_module_str, output_filename=output_filename, emit_call=True, emit_default_doc=False, )) with open(output_filename, "rt") as f: gen_ast = ast.parse(f.read()) gold = Module( body=[ _import_gen_test_module_ast, _import_star_from_input_ast, self.expected_class_ast, # self.input_module_ast.body[1], Assign(targets=[Name("__all__", Store())], value=List( ctx=Load(), elts=[set_value("FooConfig")], expr=None, ), expr=None, lineno=None, **maybe_type_comment), ], type_ignores=[], stmt=None, ) run_ast_test( self, gen_ast=gen_ast, gold=gold, )
def test_module_docstring(self) -> None: """Tests that module gets the right new docstring""" module_node = Module(body=[Expr(set_value("\nModule\n"))], stmt=None, type_ignores=[]) original = deepcopy(module_node) doc_trans = DocTrans( docstring_format="rest", type_annotations=True, existing_type_annotations=True, whole_ast=module_node, ) doc_trans.visit_Module(module_node) run_ast_test(self, gen_ast=module_node, gold=original)
def test_param2ast_with_assign(self) -> None: """Check that `param2ast` behaves correctly with a non annotated (typeless) input""" run_ast_test( self, param2ast( ("zion", {"typ": None}), ), gold=AnnAssign( annotation=Name("object", Load()), simple=1, target=Name("zion", Store()), value=set_value(None), expr=None, expr_target=None, expr_annotation=None, ), )
def test_find_in_ast(self) -> None: """Tests that `find_in_ast` successfully finds nodes in AST""" run_ast_test( self, find_in_ast("ConfigClass.dataset_name".split("."), class_ast), AnnAssign( annotation=Name( "str", Load(), ), simple=1, target=Name("dataset_name", Store()), value=set_value("mnist"), expr=None, expr_target=None, expr_annotation=None, ), )
def test_emit_arg(self) -> None: """Tests that `arg` is emitted from `emit_arg`""" self.assertIsInstance( class_with_method_and_body_types_ast.body[1].args.args[1], arg ) self.assertIsInstance( emit_arg(class_with_method_and_body_types_ast.body[1].args.args[1]), arg ) assign = Assign( targets=[Name("yup", Store())], value=set_value("nup"), expr=None, **maybe_type_comment ) gen_ast = emit_arg(assign) self.assertIsInstance(gen_ast, arg) run_ast_test( self, gen_ast=gen_ast, gold=set_arg("yup"), )
def test_param2ast_with_bad_default(self) -> None: """Check that `param2ast` behaves correctly with a bad default""" run_ast_test( self, param2ast( ( "stateful_metrics", {"typ": "NoneType", "default": "the `Model`'s metrics"}, ), ), gold=AnnAssign( annotation=Name("NoneType", Load()), simple=1, target=Name("stateful_metrics", Store()), value=set_value("```the `Model`'s metrics```"), expr=None, expr_annotation=None, expr_target=None, ), )
def test_param2argparse_param_none_default(self) -> None: """ Tests that param2argparse_param works to reparse the default """ run_ast_test( gen_ast=param2argparse_param(("yup", {"default": NoneStr})), gold=Expr( Call( args=[set_value("--yup")], func=Attribute( Name("argument_parser", Load()), "add_argument", Load(), ), keywords=[], expr=None, expr_func=None, ) ), test_case_instance=self, )
def reindent_docstring(node, indent_level=1): """ Reindent the docstring :param node: AST node :type node: ```ast.AST``` :param indent_level: docstring indentation level whence: 0=no_tabs, 1=one tab; 2=two tabs :type indent_level: ```int``` :returns: Node with reindent docstring :rtype: ```ast.AST``` """ doc_str = ast.get_docstring(node) if doc_str is not None: node.body[0] = ast.Expr( set_value( "\n{tab}{s}\n{tab}".format( tab=tab * abs(indent_level), s="\n".join( map( lambda line: "{sep}{line}".format( sep=tab * 2, line=line.lstrip() ) if line.startswith(tab) and len(line) > len(tab) and line[ len(tab) : line.lstrip().find(" ") + len(tab) ].rstrip(":s") not in frozenset((False,) + TOKENS.rest) else line, reindent(doc_str).splitlines(), ) ), ) ) ) return node
def exmod( module, emit_name, blacklist, whitelist, output_directory, dry_run, filesystem_layout="as_input", ): """ Expose module as `emit` types into `output_directory` :param module: Module name or path :type module: ```str``` :param emit_name: What type(s) to generate. :type emit_name: ```List[Literal["argparse", "class", "function", "sqlalchemy", "sqlalchemy_table"]]``` :param blacklist: Modules/FQN to omit. If unspecified will emit all (unless whitelist). :type blacklist: ```List[str]``` :param whitelist: Modules/FQN to emit. If unspecified will emit all (minus blacklist). :type whitelist: ```List[str]``` :param output_directory: Where to place the generated exposed interfaces to the given `--module`. :type output_directory: ```str``` :param dry_run: Show what would be created; don't actually write to the filesystem :type dry_run: ```bool``` :param filesystem_layout: Hierarchy of folder and file names generated. "java" is file per package per name. :type filesystem_layout: ```Literal["java", "as_input"]``` """ if dry_run: print("mkdir\t{output_directory!r}".format( output_directory=output_directory)) elif not path.isdir(output_directory): makedirs(output_directory) if blacklist: raise NotImplementedError("blacklist") elif whitelist: raise NotImplementedError("whitelist") module_name, new_module_name = map(path.basename, (module, output_directory)) module = (partial(module_from_file, module_name=module_name) if path.isdir(module) else import_module)(module) module_root_dir = path.dirname(module.__file__) + path.sep _mkdir_and_emit_file = partial( mkdir_and_emit_file, emit_name=emit_name, module_name=module_name, new_module_name=new_module_name, filesystem_layout=filesystem_layout, output_directory=output_directory, dry_run=dry_run, ) # Might need some `groupby` in case multiple files are in the one project; same for `get_module_contents` imports = list( map( _mkdir_and_emit_file, map( lambda name_source: ( name_source[0], (lambda filename: filename[len(module_name) + 1:] if filename.startswith(module_name) else filename) (relative_filename(getfile(name_source[1]))), { "params": OrderedDict(), "returns": OrderedDict() } if dry_run else parse.class_(name_source[1]), ), # sorted( map( lambda name_source: ( name_source[0][len(module_name) + 1:], name_source[1], ), get_module_contents( module, module_root_dir=module_root_dir).items(), ), # key=itemgetter(0), # ), ), ), ) assert len(imports), "Module contents are empty" modules_names = tuple( map( lambda name_module: ( name_module[0], tuple(map(itemgetter(1), name_module[1])), ), groupby( map( lambda node_mod: ( node_mod[0], node_mod[2].module, ), imports, ), itemgetter(0), ), )) init_filepath = path.join(output_directory, new_module_name, "__init__{extsep}py".format(extsep=extsep)) if dry_run: print("write\t{init_filepath!r}".format(init_filepath=init_filepath)) else: emit.file( Module( body=list( chain.from_iterable(( (Expr(set_value("\nExport internal imports\n")), ), map( lambda module_names: ImportFrom( module=module_names[0], names=list( map( lambda names: alias( names, None, identifier=None, identifier_name=None, ), module_names[1], )), level=1, identifier=None, ), modules_names, ), (Assign(targets=[Name("__all__", Store())], value=List( ctx=Load(), elts=list( map( set_value, sorted( frozenset( chain.from_iterable( map( itemgetter(1), modules_names, )), )), )), expr=None, ), expr=None, lineno=None, **maybe_type_comment), ), ))), stmt=None, type_ignores=[], ), init_filepath, mode="wt", )
Name, Return, Store, arguments, ) from copy import deepcopy from cdd.ast_utils import set_arg, set_value from cdd.pure_utils import tab _class_doc_str_expr = Expr( set_value( "\n" "Class mock" "\n\n" ":cvar a: One swell num" "\n\n" ":cvar b: Unlucky num" "\n" ) ) assign_with_type_comment = Assign( targets=[Name("res", Store())], value=BinOp( left=Name("a", Load()), op=Add(), right=Name("b", Load()), ), type_comment=Name("int", Load()), lineno=None,
def populate_files(tempdir, input_module_str=None): """ Populate files in the tempdir :param tempdir: Temporary directory :type tempdir: ```str``` :param input_module_str: Input string to write to the input_filename. If None, uses preset mock module. :type input_module_str: ```Optional[str]``` :returns: input filename, input str, expected_output :rtype: ```Tuple[str, str, str, Module]``` """ input_filename = os.path.join(tempdir, "input{extsep}py".format(extsep=extsep)) input_class_name = "Foo" input_class_ast = emit.class_( parse.function(deepcopy(method_adder_ast)), emit_call=False, class_name=input_class_name, ) input_module_ast = Module( body=[ input_class_ast, Assign(targets=[Name("input_map", Store())], value=Dict( keys=[set_value(input_class_name)], values=[Name(input_class_name, Load())], expr=None, ), expr=None, lineno=None, **maybe_type_comment), Assign( targets=[Name("__all__", Store())], value=List( ctx=Load(), elts=[set_value(input_class_name), set_value("input_map")], expr=None, ), expr=None, lineno=None, **maybe_type_comment), ], type_ignores=[], stmt=None, ) input_module_str = input_module_str or to_code(input_module_ast) # expected_output_class_str = ( # "class FooConfig(object):\n" # ' """\n' # " The amazing Foo\n\n" # " :cvar a: An a. Defaults to 5\n" # ' :cvar b: A b. Defaults to 16"""\n' # " a = 5\n" # " b = 16\n\n" # " def __call__(self):\n" # " self.a = 5\n" # " self.b = 16\n" # ) expected_class_ast = emit.class_( parse.function(deepcopy(method_adder_ast)), emit_call=True, class_name="{input_class_name}Config".format( input_class_name=input_class_name), ) with open(input_filename, "wt") as f: f.write(input_module_str) return input_filename, input_module_ast, input_class_ast, expected_class_ast
def test_infer_docstring(self) -> None: """ Test `infer` can figure out the right parser name when its expected to be `docstring` """ self.assertEqual(infer(""), "docstring") self.assertEqual(infer(set_value("")), "docstring")
def _make_call_meth(body, return_type, param_names, docstring_format, word_wrap): """ Construct a `__call__` method from the provided `body` :param body: The body, probably from a FunctionDef.body :type body: ```List[AST]``` :param return_type: The return type of the parent symbol (probably class). Used to fill in `__call__` return. :type return_type: ```Optional[str]``` :param param_names: Container of AST `id`s to match for rename :type param_names: ```Optional[Iterator[str]]``` :param docstring_format: Format of docstring :type docstring_format: ```Literal['rest', 'numpydoc', 'google']``` :param word_wrap: Whether to word-wrap. Set `DOCTRANS_LINE_LENGTH` to configure length. :type word_wrap: ```bool``` :returns: Internal function for `__call__` :rtype: ```FunctionDef``` """ body_len = len(body) if body_len: if isinstance(body, dict): body = list( filter( None, ( None if body.get("doc") in none_types else Expr( set_value( emit_param_str( ( "return_type", { "doc": multiline( indent_all_but_first( body["doc"])) }, ), style=docstring_format, word_wrap=word_wrap, ))), RewriteName(param_names).visit( Return( get_value( ast.parse(return_type.strip("`")).body[0]), expr=None, )) if code_quoted(body["default"]) else Return( set_value(body["default"]), expr=None), ), )) return (ast.fix_missing_locations( FunctionDef(args=arguments( args=[set_arg("self")], defaults=[], kw_defaults=[], kwarg=None, kwonlyargs=[], posonlyargs=[], vararg=None, arg=None, ), body=body, decorator_list=[], name="__call__", returns=None, arguments_args=None, identifier_name=None, stmt=None, lineno=None, **maybe_type_comment)) if body else None)
from os.path import extsep from shutil import rmtree from tempfile import mkdtemp from unittest import TestCase from unittest.mock import patch from cdd import emit, parse from cdd.ast_utils import maybe_type_comment, set_value from cdd.gen import gen from cdd.pure_utils import rpartial from cdd.source_transformer import to_code from cdd.tests.mocks.methods import function_adder_ast from cdd.tests.utils_for_tests import run_ast_test method_adder_ast = deepcopy(function_adder_ast) method_adder_ast.body[0] = Expr(set_value(" C class (mocked!) ")) method_adder_ast.decorator_list = [Name("staticmethod", Load())] del function_adder_ast def populate_files(tempdir, input_module_str=None): """ Populate files in the tempdir :param tempdir: Temporary directory :type tempdir: ```str``` :param input_module_str: Input string to write to the input_filename. If None, uses preset mock module. :type input_module_str: ```Optional[str]``` :returns: input filename, input str, expected_output
def parse_out_param(expr, require_default=False, emit_default_doc=True): """ Turns the class_def repr of '--dataset_name', type=str, help='name of dataset.', required=True, default='mnist' into Tuple[Literal['dataset_name'], {"typ": Literal["str"], "doc": Literal["name of dataset."], "default": Literal["mnist"]}] :param expr: Expr :type expr: ```Expr``` :param require_default: Whether a default is required, if not found in doc, infer the proper default from type :type require_default: ```bool``` :param emit_default_doc: Whether help/docstring should include 'With default' text :type emit_default_doc: ```bool``` :returns: Name, dict with keys: 'typ', 'doc', 'default' :rtype: ```Tuple[str, dict]``` """ required = get_value( get_value( next( (keyword for keyword in expr.value.keywords if keyword.arg == "required"), set_value(False), ))) typ = next( (_handle_value(get_value(key_word)) for key_word in expr.value.keywords if key_word.arg == "type"), "str", ) name = get_value(expr.value.args[0])[len("--"):] default = next( (get_value(key_word.value) for key_word in expr.value.keywords if key_word.arg == "default"), None, ) doc = (lambda help_: help_ if help_ is None else (help_ if default is None or emit_default_doc is False or (hasattr(default, "__len__") and len(default) == 0 ) or "defaults to" in help_ or "Defaults to" in help_ else "{help} Defaults to {default}".format( help=help_ if help_.endswith(".") else "{}.".format(help_), default=default, )))(next( (get_value(key_word.value) for key_word in expr.value.keywords if key_word.arg == "help" and key_word.value), None, )) if default is None: doc, default = extract_default(doc, emit_default_doc=emit_default_doc) if default is None: if required: # if name.endswith("kwargs"): # default = NoneStr # else: default = simple_types[typ] if typ in simple_types else NoneStr elif require_default or typ.startswith("Optional"): default = NoneStr action = next( (get_value(key_word.value) for key_word in expr.value.keywords if key_word.arg == "action"), None, ) typ = next( (_handle_keyword(keyword, typ) for keyword in expr.value.keywords if keyword.arg == "choices"), typ, ) if action == "append": typ = "List[{typ}]".format(typ=typ) if not required and "Optional" not in typ: typ = "Optional[{typ}]".format(typ=typ) return name, dict(doc=doc, typ=typ, **({} if default is None else { "default": default }))
def generate_repr_method(params, cls_name, docstring_format): """ Generate a `__repr__` method with all params, using `str.format` syntax :param params: an `OrderedDict` of form OrderedDict[str, {'typ': str, 'doc': Optional[str], 'default': Any}] :type params: ```OrderedDict``` :param cls_name: Name of class :type cls_name: ```str``` :param docstring_format: Format of docstring :type docstring_format: ```Literal['rest', 'numpydoc', 'google']``` :returns: `__repr__` method :rtype: ```FunctionDef``` """ keys = tuple(params.keys()) return FunctionDef( name="__repr__", args=arguments( posonlyargs=[], arg=None, args=[ arg(arg="self", annotation=None, expr=None, identifier_arg=None, **maybe_type_comment) ], kwonlyargs=[], kw_defaults=[], defaults=[], vararg=None, kwarg=None, ), body=[ Expr( set_value("""\n{sep}{_repr_docstring}""".format( sep=tab * 2, _repr_docstring=(docstring_repr_str if docstring_format == "rest" else docstring_repr_google_str).lstrip(), ))), Return( value=Call( func=Attribute( set_value("{cls_name}({format_args})".format( cls_name=cls_name, format_args=", ".join( map("{0}={{{0}!r}}".format, keys)), )), "format", Load(), ), args=[], keywords=list( map( lambda key: ast.keyword( arg=key, value=Attribute(Name("self", Load()), key, Load()), identifier=None, ), keys, )), expr=None, expr_func=None, ), expr=None, ), ], decorator_list=[], arguments_args=None, identifier_name=None, stmt=None, lineno=None, returns=None, **maybe_type_comment)
def param_to_sqlalchemy_column_call(param, include_name): """ Turn a param into a `Column(…)` :param param: Name, dict with keys: 'typ', 'doc', 'default' :type param: ```Tuple[str, dict]``` :param include_name: Whether to include the name (exclude in declarative base) :type include_name: ```bool``` :returns: Form of: `Column(…)` :rtype: ```Call``` """ if system() == "Darwin": print("param_to_sqlalchemy_column_call::include_name:", include_name, ";") name, _param = param del param args, keywords, nullable = [], [], None if _param["typ"].startswith("Optional["): _param["typ"] = _param["typ"][len("Optional["):-1] nullable = True if include_name: args.append(set_value(name)) if "Literal[" in _param["typ"]: parsed_typ = get_value(ast.parse(_param["typ"]).body[0]) assert (parsed_typ.value.id == "Literal" ), "Only basic Literal support is implemented, not {}".format( parsed_typ.value.id) args.append( Call( func=Name("Enum", Load()), args=get_value(parsed_typ.slice).elts, keywords=[ ast.keyword(arg="name", value=set_value(name), identifier=None) ], expr=None, expr_func=None, )) else: args.append(Name(typ2column_type[_param["typ"]], Load())) has_default = _param.get("default", ast) is not ast pk = _param.get("doc", "").startswith("[PK]") if pk: _param["doc"] = _param["doc"][4:].lstrip() elif has_default and _param["default"] not in none_types: nullable = False keywords.append( ast.keyword(arg="doc", value=set_value(_param["doc"].rstrip(".")), identifier=None)) if has_default: if _param["default"] == NoneStr: _param["default"] = None keywords.append( ast.keyword( arg="default", value=set_value(_param["default"]), identifier=None, )) # Sorting :\ if pk: keywords.append( ast.keyword(arg="primary_key", value=set_value(True), identifier=None), ) if isinstance(nullable, bool): keywords.append( ast.keyword(arg="nullable", value=set_value(nullable), identifier=None)) return Call( func=Name("Column", Load()), args=args, keywords=keywords, expr=None, expr_func=None, )