def test_to_docstring(self) -> None: """ Tests whether `docstring` produces `docstring_str` given `class_ast` """ self.assertEqual( emit.docstring(parse.class_(class_ast), emit_default_doc=True), docstring_str, )
def test_from_class(self) -> None: """ Tests whether `class_` produces `intermediate_repr_no_default_doc` from `class_ast` """ ir = parse.class_(class_ast) del ir["_internal"] # Not needed for this test self.assertDictEqual(ir, intermediate_repr_no_default_doc)
def test_to_docstring_emit_default_doc_false(self) -> None: """ Tests whether `docstring` produces `docstring_str` given `class_ast` """ ir = parse.class_(class_ast) self.assertEqual( emit.docstring(ir, emit_default_doc=False), docstring_no_default_str, )
def test_from_class_and_function_in_memory(self) -> None: """ Tests that the parser can combine the outer class docstring + structure with the inner function parameter defaults """ parsed_ir = parse.class_( RewriteAtQuery, merge_inner_function="__init__", infer_type=True, ) del parsed_ir["_internal"] # Not needed for this test self.assertDictEqual( parsed_ir, { "doc": "Replace the node at query with given node", "name": "RewriteAtQuery", "params": OrderedDict(( ( "search", { "doc": "Search query, e.g., " "['node_name', 'function_name', " "'arg_name']", "typ": "List[str]", }, ), ( "replacement_node", { "doc": "Node to replace this search", "typ": "AST" }, ), ( "replaced", { "doc": "Whether a node has been replaced " "(only replaces first " "occurrence)" }, ), )), "returns": None, }, )
def check_emission(self, tempdir, dry_run=False): """ Confirm whether emission conforms to gen by verifying their IRs are equivalent :param tempdir: Temporary directory :type tempdir: ```str``` :param dry_run: Show what would be created; don't actually write to the filesystem :type dry_run: ```bool``` """ new_module_name = path.basename(tempdir) for name, folder in self.module_hierarchy: gen_folder = path.join(tempdir, new_module_name, folder) gold_folder = path.join(self.gold_dir, self.module_name, folder) def _open(_folder): """ :param _folder: Folder to join on :type _folder: ```str`` :returns: Open IO :rtype: ```open``` """ return open( path.join( _folder, "{name}{extsep}py".format(name=name, extsep=extsep) ), "rt", ) self.assertTrue(path.isdir(gold_folder)) gen_is_dir = path.isdir(gen_folder) if dry_run: self.assertFalse(gen_is_dir) else: self.assertTrue(gen_is_dir) with _open(gen_folder) as gen, _open(gold_folder) as gold: gen_ir, gold_ir = map( lambda node: parse.class_( next( filter( rpartial(isinstance, ClassDef), ast_parse(node.read()).body, ) ) ), (gen, gold), ) self.assertDictEqual(gold_ir, gen_ir)
def test_to_argparse(self) -> None: """ Tests whether `to_argparse` produces `argparse_func_ast` given `class_ast` """ run_ast_test( self, reindent_docstring( emit.argparse_function( parse.class_(class_ast), emit_default_doc=False, )), gold=reindent_docstring(argparse_func_ast), )
def test_to_argparse_func_nargs(self) -> None: """ Tests whether an argparse function is generated with `action="append"` set properly """ run_ast_test( self, gen_ast=emit.argparse_function( parse.class_(class_nargs_ast), emit_default_doc=False, function_name="set_cli_action_append", ), gold=argparse_func_action_append_ast, )
def test_from_adadelta_class_in_memory(self) -> None: """ Tests that parse.class produces properly from a `class` in memory of current interpreter """ Adadelta = getattr( inspectable_compile(docstring_google_tf_adadelta_function_str), "Adadelta", ) ir = parse.class_(Adadelta) del ir["_internal"] # self.assertDictEqual(ir, docstring_google_tf_adadelta_ir) self.assertDictEqual( ir, docstring_google_tf_adadelta_function_ir, )
def test_to_argparse_google_tf_tensorboard(self) -> None: """ Tests whether `to_argparse` produces `argparse_function_google_tf_tensorboard_ast` given `class_google_tf_tensorboard_ast` """ run_ast_test( self, gen_ast=emit.argparse_function( parse.class_(class_google_tf_tensorboard_ast, merge_inner_function="__init__"), emit_default_doc=False, word_wrap=False, ), gold=argparse_function_google_tf_tensorboard_ast, )
def test_from_class_in_memory(self) -> None: """ Tests that parse.class produces properly from a `class` in memory of current interpreter """ class A(object): """A is one boring class""" ir = parse.class_(A) del ir["_internal"] # Not needed for this test self.assertDictEqual( ir, { "doc": "A is one boring class", "name": "TestParsers.test_from_class_in_memory.<locals>.A", "params": OrderedDict(), "returns": None, }, )
def test_from_class_torch_nn_one_cycle_lr(self) -> None: """ Tests that the parser can combine the outer class docstring + structure with the inner function parameter defaults, given a PyTorch loss LR scheduler class """ # Sanity check run_ast_test( self, class_torch_nn_one_cycle_lr_ast, gold=ast.parse(class_torch_nn_one_cycle_lr_str).body[0], ) parsed_ir = parse.class_( class_torch_nn_one_cycle_lr_ast, merge_inner_function="__init__", infer_type=True, ) del parsed_ir["_internal"] # Not needed for this test self.assertDictEqual(parsed_ir, class_torch_nn_one_cycle_lr_ir)
def test_from_class_and_function(self) -> None: """ Tests that the parser can combine the outer class docstring + structure with the inner function parameter defaults """ # Sanity check run_ast_test( self, class_google_tf_tensorboard_ast, gold=ast.parse(class_google_tf_tensorboard_str).body[0], ) parsed_ir = parse.class_( class_google_tf_tensorboard_ast, merge_inner_function="__init__", infer_type=True, ) del parsed_ir["_internal"] # Not needed for this test self.assertDictEqual(parsed_ir, class_google_tf_tensorboard_ir)
def exmod( module, emit_name, blacklist, whitelist, output_directory, dry_run, filesystem_layout="as_input", ): """ Expose module as `emit` types into `output_directory` :param module: Module name or path :type module: ```str``` :param emit_name: What type(s) to generate. :type emit_name: ```List[Literal["argparse", "class", "function", "sqlalchemy", "sqlalchemy_table"]]``` :param blacklist: Modules/FQN to omit. If unspecified will emit all (unless whitelist). :type blacklist: ```List[str]``` :param whitelist: Modules/FQN to emit. If unspecified will emit all (minus blacklist). :type whitelist: ```List[str]``` :param output_directory: Where to place the generated exposed interfaces to the given `--module`. :type output_directory: ```str``` :param dry_run: Show what would be created; don't actually write to the filesystem :type dry_run: ```bool``` :param filesystem_layout: Hierarchy of folder and file names generated. "java" is file per package per name. :type filesystem_layout: ```Literal["java", "as_input"]``` """ if dry_run: print("mkdir\t{output_directory!r}".format( output_directory=output_directory)) elif not path.isdir(output_directory): makedirs(output_directory) if blacklist: raise NotImplementedError("blacklist") elif whitelist: raise NotImplementedError("whitelist") module_name, new_module_name = map(path.basename, (module, output_directory)) module = (partial(module_from_file, module_name=module_name) if path.isdir(module) else import_module)(module) module_root_dir = path.dirname(module.__file__) + path.sep _mkdir_and_emit_file = partial( mkdir_and_emit_file, emit_name=emit_name, module_name=module_name, new_module_name=new_module_name, filesystem_layout=filesystem_layout, output_directory=output_directory, dry_run=dry_run, ) # Might need some `groupby` in case multiple files are in the one project; same for `get_module_contents` imports = list( map( _mkdir_and_emit_file, map( lambda name_source: ( name_source[0], (lambda filename: filename[len(module_name) + 1:] if filename.startswith(module_name) else filename) (relative_filename(getfile(name_source[1]))), { "params": OrderedDict(), "returns": OrderedDict() } if dry_run else parse.class_(name_source[1]), ), # sorted( map( lambda name_source: ( name_source[0][len(module_name) + 1:], name_source[1], ), get_module_contents( module, module_root_dir=module_root_dir).items(), ), # key=itemgetter(0), # ), ), ), ) assert len(imports), "Module contents are empty" modules_names = tuple( map( lambda name_module: ( name_module[0], tuple(map(itemgetter(1), name_module[1])), ), groupby( map( lambda node_mod: ( node_mod[0], node_mod[2].module, ), imports, ), itemgetter(0), ), )) init_filepath = path.join(output_directory, new_module_name, "__init__{extsep}py".format(extsep=extsep)) if dry_run: print("write\t{init_filepath!r}".format(init_filepath=init_filepath)) else: emit.file( Module( body=list( chain.from_iterable(( (Expr(set_value("\nExport internal imports\n")), ), map( lambda module_names: ImportFrom( module=module_names[0], names=list( map( lambda names: alias( names, None, identifier=None, identifier_name=None, ), module_names[1], )), level=1, identifier=None, ), modules_names, ), (Assign(targets=[Name("__all__", Store())], value=List( ctx=Load(), elts=list( map( set_value, sorted( frozenset( chain.from_iterable( map( itemgetter(1), modules_names, )), )), )), expr=None, ), expr=None, lineno=None, **maybe_type_comment), ), ))), stmt=None, type_ignores=[], ), init_filepath, mode="wt", )