def main(unused_argv): # TODO(andylou): write simple unit test s2t_out = pathlib.Path(tempfile.mkdtemp()) doc_generator = generate_lib.DocGenerator( root_title="Struct2Tensor", py_modules=[("s2t", s2t)], code_url_prefix=FLAGS.code_url_prefix, search_hints=FLAGS.search_hints, # explicit_package_contents_filter ensures that only modules imported # directly from s2t/__init__.py are documented in the location that # defines them, instead of every location that imports them. callbacks=[ public_api.explicit_package_contents_filter, _filter_module_attributes ]) doc_generator.build(s2t_out) expr_impl_out = pathlib.Path(tempfile.mkdtemp()) doc_generator = generate_lib.DocGenerator( root_title="Struct2Tensor-expression_impl", py_modules=[("expression_impl", expression_impl)], code_url_prefix=FLAGS.code_url_prefix + "/expression_impl", search_hints=FLAGS.search_hints, # explicit_package_contents_filter ensures that only modules imported # directly from s2t/expression_impl/__init__.py are documented in the # location that defines them, instead of every location that imports them. callbacks=[ public_api.explicit_package_contents_filter, _filter_module_attributes ]) doc_generator.build(expr_impl_out) output_dir = pathlib.Path(FLAGS.output_dir) def splice(name, tmp_dir): shutil.rmtree(output_dir / name, ignore_errors=True) shutil.copytree(tmp_dir / name, output_dir / name) shutil.copy(tmp_dir / f"{name}.md", output_dir / f"{name}.md") try: shutil.copy(tmp_dir / "_redirects.yaml", output_dir / name / "_redirects.yaml") except FileNotFoundError: pass shutil.copy(tmp_dir / "_toc.yaml", output_dir / name / "_toc.yaml") splice("s2t", s2t_out) splice("expression_impl", expr_impl_out) toc_path = output_dir / "_toc.yaml" toc_text = yaml.dump({ "toc": [{ "include": "/api_docs/python/s2t/_toc.yaml" }, { "break": True }, { "include": "/api_docs/python/expression_impl/_toc.yaml" }] }) toc_path.write_text(toc_text)
def main(args): if args[1:]: raise ValueError('Unrecognized Command line args', args[1:]) tft_out = pathlib.Path(tempfile.mkdtemp()) doc_generator = generate_lib.DocGenerator( root_title='TF-Transform', py_modules=[('tft', transform)], code_url_prefix=FLAGS.code_url_prefix, search_hints=FLAGS.search_hints, site_path=FLAGS.site_path, callbacks=[public_api.explicit_package_contents_filter]) doc_generator.build(tft_out) doc_controls.do_not_generate_docs(tft_beam.analyzer_impls) tft_beam_out = pathlib.Path(tempfile.mkdtemp()) doc_generator = generate_lib.DocGenerator( root_title='TFT-Beam', py_modules=[('tft_beam', tft_beam)], code_url_prefix=FLAGS.code_url_prefix + '/beam', search_hints=FLAGS.search_hints, site_path=FLAGS.site_path, callbacks=[ public_api.explicit_package_contents_filter, public_api.local_definitions_filter ]) doc_generator.build(tft_beam_out) output_dir = pathlib.Path(FLAGS.output_dir) def splice(name, tmp_dir): shutil.rmtree(output_dir / name, ignore_errors=True) shutil.copytree(tmp_dir / name, output_dir / name) shutil.copy(tmp_dir / f'{name}.md', output_dir / f'{name}.md') try: shutil.copy(tmp_dir / '_redirects.yaml', output_dir / name / '_redirects.yaml') except FileNotFoundError: pass shutil.copy(tmp_dir / '_toc.yaml', output_dir / name / '_toc.yaml') splice('tft', tft_out) splice('tft_beam', tft_beam_out) toc_path = output_dir / '_toc.yaml' toc_text = yaml.dump({ 'toc': [{ 'include': f'{FLAGS.site_path}/tft/_toc.yaml' }, { 'break': True }, { 'include': f'{FLAGS.site_path}/tft_beam/_toc.yaml' }] }) toc_path.write_text(toc_text)
def test_docs_for_module(self): m = types.ModuleType('m') m.__file__ = __file__ m.test_function = test_function m.test_function_with_args_kwargs = test_function_with_args_kwargs m.TestClass = TestClass generator = generate_lib.DocGenerator( root_title='test', py_modules=[('m', m)], code_url_prefix='https://tensorflow.org') parser_config = generator.run_extraction() api_node = doc_generator_visitor.ApiTreeNode(path=('m', ), py_object=test_module) page_info = docs_for_object.docs_for_object( api_node=api_node, parser_config=parser_config) # Make sure the brief docstring is present self.assertEqual( inspect.getdoc(test_module).split('\n')[0], page_info.doc.brief) # Make sure that the members are there funcs = {f_info.py_object for f_info in page_info.functions} self.assertEqual({test_function, test_function_with_args_kwargs}, funcs) classes = {cls_info.py_object for cls_info in page_info.classes} self.assertEqual({TestClass}, classes)
def test_docs_for_class_should_skip(self): class Parent(object): @doc_controls.do_not_doc_inheritable def a_method(self, arg='default'): pass class Child(Parent): def a_method(self, arg='default'): pass m = types.ModuleType('m') m.__file__ = __file__ m.Child = Child generator = generate_lib.DocGenerator( root_title='test', py_modules=[('m', m)], code_url_prefix='https://tensorflow.org') parser_config = generator.run_extraction() api_node = doc_generator_visitor.ApiTreeNode(path=( 'm', 'Child', ), py_object=Child) page_info = docs_for_object.docs_for_object( api_node=api_node, parser_config=parser_config) # Make sure the `a_method` is not present self.assertEmpty(page_info.methods)
def main(argv): if argv[1:]: raise ValueError("Unrecognized arguments: {}".format(argv[1:])) if FLAGS.code_url_prefix: code_url_prefix = FLAGS.code_url_prefix elif FLAGS.git_branch: code_url_prefix = CODE_PREFIX_TEMPLATE.format( git_branch=FLAGS.git_branch) else: code_url_prefix = CODE_PREFIX_TEMPLATE.format(git_branch="master") doc_generator = generate_lib.DocGenerator( root_title=PROJECT_FULL_NAME, py_modules=[(PROJECT_SHORT_NAME, tfra)], code_url_prefix=code_url_prefix, private_map={"tfa": ["__version__", "utils", "version"]}, # This callback usually cleans up a lot of aliases caused by internal imports. callbacks=[public_api.local_definitions_filter], search_hints=FLAGS.search_hints, site_path=FLAGS.site_path, ) doc_generator.build(FLAGS.output_dir) print("Output docs to: ", FLAGS.output_dir)
def main(argv): if argv[1:]: raise ValueError('Unrecognized arguments: {}'.format(argv[1:])) if FLAGS.git_branch: code_url_prefix = CODE_PREFIX_TEMPLATE.format( git_branch=FLAGS.git_branch) elif FLAGS.code_url_prefix: code_url_prefix = FLAGS.code_url_prefix else: code_url_prefix = CODE_PREFIX_TEMPLATE.format(git_branch='master') doc_generator = generate_lib.DocGenerator( root_title=PROJECT_FULL_NAME, # Replace `tensorflow_docs` with your module, here. py_modules=[(PROJECT_SHORT_NAME, tfio)], base_dir=pathlib.Path(tfio.__file__).parents[2], code_url_prefix=code_url_prefix, # This callback cleans up a lot of aliases caused by internal imports. callbacks=[public_api.explicit_package_contents_filter], search_hints=FLAGS.search_hints, site_path=FLAGS.site_path) doc_generator.build(FLAGS.output_dir) print('Output docs to: ', FLAGS.output_dir)
def build_docs(name_pair,output_dir,code_url_prefix, search_hints, gen_report): """Build api docs for w&b. Args: name_pair: Name of the pymodule output_dir: A string path, where to put the files. code_url_prefix: prefix for "Defined in" links. search_hints: Bool. Include meta-data search hints at the top of each file. gen_report: Bool. Generates an API report containing the health of the docstrings of the public API. """ # This is to help not document the parent class methods for cls in [wandb.data_types.WBValue, wandb.data_types.Media, wandb.data_types.BatchableMedia, wandb.apis.public.Paginator]: doc_controls.decorate_all_class_attributes( decorator=doc_controls.do_not_doc_in_subclasses, cls=cls, skip=["__init__"]) doc_generator = generate_lib.DocGenerator( root_title="W&B", py_modules=[name_pair], base_dir=path.dirname(wandb.__file__), search_hints=search_hints, code_url_prefix=code_url_prefix, site_path="", gen_report=gen_report, yaml_toc=False) doc_generator.build(output_dir)
def generate_cirq_aqt(): # This try-catch can go after v0.12 is released try: # should be present in the nightly (pre-release) build import cirq_aqt except ImportError: # as cirq.aqt is currently not being generated anyway # we won't handle this case (the stable build) return doc_generator = generate_lib.DocGenerator( root_title="Cirq-aqt", py_modules=[("cirq_aqt", cirq_aqt)], base_dir=os.path.dirname(cirq_aqt.__file__), code_url_prefix=FLAGS.code_url_prefix + "/cirq-aqt/cirq_aqt", search_hints=FLAGS.search_hints, site_path=FLAGS.site_path, callbacks=[ public_api.local_definitions_filter, filter_unwanted_inherited_methods ], extra_docs=_doc.RECORDED_CONST_DOCS, ) doc_controls.decorate_all_class_attributes( doc_controls.do_not_doc_inheritable, networkx.DiGraph, skip=[]) doc_generator.build(output_dir=FLAGS.output_dir)
def generate_api_docs(output_dir): """Generates markdown API docs for TFF. Args: output_dir: Base directory path to write generated files to. """ def _ignore_symbols(mod): """Returns list of symbols to ignore for documentation.""" all_symbols = [x for x in dir(mod) if not x.startswith('_')] allowed_symbols = mod._allowed_symbols # pylint: disable=protected-access return set(all_symbols) - set(allowed_symbols) def _get_ignored_symbols(module): """Returns a Python `set` of symbols to ignore for a given `module`.""" symbols = dir(module) private_symbols = [x for x in symbols if x.startswith('_')] module_symbols = [ x for x in symbols if inspect.ismodule(getattr(module, x)) ] return set(private_symbols + module_symbols + _FUTURE_SYMBOLS) doc_generator = generate_lib.DocGenerator( root_title='TensorFlow Federated', py_modules=[('tff', tff)], base_dir=os.path.dirname(tff.__file__), code_url_prefix=FLAGS.code_url_prefix, search_hints=FLAGS.search_hints, site_path=FLAGS.site_path, private_map={ 'tff': _ignore_symbols(tff), 'tff.backends': _ignore_symbols(tff.backends), 'tff.backends.mapreduce': _ignore_symbols(tff.backends.mapreduce), 'tff.framework': _ignore_symbols(tff.framework), 'tff.learning': _ignore_symbols(tff.learning), 'tff.learning.framework': _ignore_symbols(tff.learning.framework), 'tff.simulation': _ignore_symbols(tff.simulation), 'tff.simulation.datasets': _ignore_symbols(tff.simulation.datasets), 'tff.simulation.datasets.emnist': _get_ignored_symbols(tff.simulation.datasets.emnist), 'tff.simulation.datasets.shakespeare': _get_ignored_symbols(tff.simulation.datasets.shakespeare), 'tff.simulation.datasets.stackoverflow': _get_ignored_symbols(tff.simulation.datasets.stackoverflow), 'tff.simulation.models': _ignore_symbols(tff.simulation.models), 'tff.simulation.models.mnist': _get_ignored_symbols(tff.simulation.models.mnist), 'tff.utils': _ignore_symbols(tff.utils), }) doc_generator.build(output_dir)
def main(args): if args[1:]: raise ValueError("Unrecognized Command line args", args[1:]) for obj in supress_docs_for: doc_controls.do_not_generate_docs(obj) for name, value in inspect.getmembers(tfdv): if inspect.ismodule(value): doc_controls.do_not_generate_docs(value) for name, value in inspect.getmembers(beam.PTransform): # This ensures that the methods of PTransform are not documented in any # derived classes. if name == "__init__": continue try: doc_controls.do_not_doc_inheritable(value) except (TypeError, AttributeError): pass doc_generator = generate_lib.DocGenerator( root_title="TensorFlow Data Validation", py_modules=[("tfdv", tfdv)], code_url_prefix=FLAGS.code_url_prefix, search_hints=FLAGS.search_hints, site_path=FLAGS.site_path, # local_definitions_filter ensures that shared modules are only # documented in the location that defines them, instead of every location # that imports them. callbacks=[ public_api.local_definitions_filter, _filter_class_attributes ]) return doc_generator.build(output_dir=FLAGS.output_dir)
def main(unused_argv): doc_generator = generate_lib.DocGenerator( root_title="TensorFlow Model Optimization", py_modules=[("tfmot", tfmot)], base_dir=os.path.dirname(tfmot.__file__), code_url_prefix=FLAGS.code_url_prefix, search_hints=FLAGS.search_hints, site_path=FLAGS.site_path, # TODO(tfmot): remove this once the next release after 0.3.0 happens. # This is needed in the interim because the API docs reflect # the latest release and the current release still wildcard imports # all of the classes below. private_map={ "tfmot.sparsity.keras": [ # List of internal classes which get exposed when imported. "InputLayer", "custom_object_scope", "pruning_sched", "pruning_wrapper", "absolute_import", "division", "print_function", "compat" ] }, ) doc_generator.build(output_dir=FLAGS.output_dir)
def main(argv): if argv[1:]: raise ValueError('Unrecognized arguments: {}'.format(argv[1:])) if FLAGS.git_branch: code_url_prefix = CODE_PREFIX_TEMPLATE.format( git_branch=FLAGS.git_branch) elif FLAGS.code_url_prefix: code_url_prefix = FLAGS.code_url_prefix else: code_url_prefix = CODE_PREFIX_TEMPLATE.format(git_branch='master') doc_generator = generate_lib.DocGenerator( root_title=PROJECT_FULL_NAME, # Replace `tensorflow_docs` with your module, here. py_modules=[(PROJECT_SHORT_NAME, tfio)], code_url_prefix=code_url_prefix, private_map={'tfio': ['__version__', 'utils', 'version', 'core']}, # This callback cleans up a lot of aliases caused by internal imports. callbacks=[], search_hints=FLAGS.search_hints, site_path=FLAGS.site_path) doc_generator.build(FLAGS.output_dir) print('Output docs to: ', FLAGS.output_dir)
def test_docs_for_function_with_kwargs(self): m = types.ModuleType('m') m.__file__ = __file__ m.test_function_with_args_kwargs = test_function_with_args_kwargs generator = generate_lib.DocGenerator( root_title='test', py_modules=[('m', m)], code_url_prefix='https://tensorflow.org') parser_config = generator.run_extraction() api_node = doc_generator_visitor.ApiTreeNode( path=('test_function_with_args_kwargs', ), py_object=test_function_with_args_kwargs) page_info = docs_for_object.docs_for_object( api_node=api_node, parser_config=parser_config) # Make sure the brief docstring is present self.assertEqual( inspect.getdoc(test_function_with_args_kwargs).split('\n')[0], page_info.doc.brief) # Make sure the extracted signature is good. self.assertEqual('(\n unused_arg, *unused_args, **unused_kwargs\n)', str(page_info.signature))
def test_generate_index(self): m = types.ModuleType('m') m.__file__ = __file__ m.TestClass = TestClass m.test_function = test_function m.submodule = types.ModuleType('submodule') m.submodule.test_function = test_function generator = generate_lib.DocGenerator( root_title='test', py_modules=[('m', m)], code_url_prefix='https://tensorflow.org') parser_config = generator.run_extraction() docs = parser.generate_global_index( 'TestLibrary', index=parser_config.index, reference_resolver=parser_config.reference_resolver) # Make sure duplicates and non-top-level symbols are in the index, but # methods and properties are not. self.assertNotIn('a_method', docs) self.assertNotIn('a_property', docs) self.assertIn('m.TestClass', docs) self.assertIn('m.TestClass.ChildClass', docs) self.assertIn('m.submodule.test_function', docs) self.assertIn('<code>m.submodule.test_function', docs)
def main(unused_argv): doc_generator = generate_lib.DocGenerator( root_title="Cirq", py_modules=[("cirq", cirq)], base_dir=os.path.dirname(cirq.__file__), code_url_prefix=FLAGS.code_url_prefix, search_hints=FLAGS.search_hints, site_path=FLAGS.site_path, callbacks=[ public_api.local_definitions_filter, filter_unwanted_inherited_methods ], private_map={ # Opt to not build docs for these paths for now since they error. "cirq.google.engine.client.quantum.QuantumEngineServiceClient": ["enums"], "cirq.google.engine.client.quantum_v1alpha1.QuantumEngineServiceClient": ["enums"], "cirq.google.api": ["v1"], }, extra_docs=_doc.RECORDED_CONST_DOCS, ) doc_controls.decorate_all_class_attributes( doc_controls.do_not_doc_inheritable, networkx.DiGraph, skip=[]) doc_generator.build(output_dir=FLAGS.output_dir)
def main(args): if args[1:]: raise ValueError('Unrecognized command line args', args[1:]) suppress_docs_for = [] for name in ['version', 'goo' + 'gle', 'metadata_store', 'pywrap']: submodule = getattr(mlmd, name, None) if submodule is not None: suppress_docs_for.append(submodule) for obj in suppress_docs_for: doc_controls.do_not_generate_docs(obj) doc_generator = generate_lib.DocGenerator( root_title='ML Metadata', py_modules=[('mlmd', mlmd)], base_dir=os.path.dirname(mlmd.__file__), code_url_prefix=FLAGS.code_url_prefix, search_hints=FLAGS.search_hints, site_path=FLAGS.site_path, private_map={}, callbacks=[ # This filters out objects not defined in the current module or its # sub-modules. public_api.local_definitions_filter, ignore_proto_method, ignore_attrs_method ]) doc_generator.build(output_dir=FLAGS.output_dir)
def main(_): do_not_generate_docs_for = [] for name in ["utils", "dependencies", "version", "examples"]: submodule = getattr(tfx, name, None) if submodule is not None: do_not_generate_docs_for.append(submodule) for obj in do_not_generate_docs_for: doc_controls.do_not_generate_docs(obj) doc_generator = generate_lib.DocGenerator( root_title="TFX", py_modules=[("tfx", tfx)], code_url_prefix=FLAGS.code_url_prefix, search_hints=FLAGS.search_hints, site_path=FLAGS.site_path, gen_report=FLAGS.gen_report, private_map={}, # local_definitions_filter ensures that shared modules are only # documented in the location that defines them, instead of every location # that imports them. callbacks=[ api_generator.public_api.explicit_package_contents_filter, ignore_test_objects, ignore_proto_method ]) doc_generator.build(output_dir=FLAGS.output_dir)
def main(_): # These make up for the empty __init__.py files. api_generator.utils.recursive_import(tfx.orchestration) api_generator.utils.recursive_import(tfx.components) api_generator.utils.recursive_import(tfx.extensions) do_not_generate_docs_for = [] for name in ["utils", "proto", "dependencies", "version"]: submodule = getattr(tfx, name, None) if submodule is not None: do_not_generate_docs_for.append(submodule) for obj in do_not_generate_docs_for: doc_controls.do_not_generate_docs(obj) doc_generator = generate_lib.DocGenerator( root_title="TFX", py_modules=[("tfx", tfx)], code_url_prefix=FLAGS.code_url_prefix, search_hints=FLAGS.search_hints, site_path=FLAGS.site_path, private_map={}, # local_definitions_filter ensures that shared modules are only # documented in the location that defines them, instead of every location # that imports them. callbacks=[ api_generator.public_api.local_definitions_filter, ignore_test_objects ]) doc_generator.build(output_dir=FLAGS.output_dir)
def main(unused_argv): doc_generator = generate_lib.DocGenerator( root_title="TensorFlow Quantum", py_modules=[("tfq", tfq)], base_dir=os.path.dirname(tfq.__file__), code_url_prefix=FLAGS.code_url_prefix, search_hints=FLAGS.search_hints, site_path=FLAGS.site_path, callbacks=[public_api.local_definitions_filter], private_map={ "tfq": ["python", "core"], "tfq.layers": [ "circuit_construction", "circuit_executors", "high_level", ], "tfq.differentiators": [ "linear_combination", "differentiator", "parameter_shift", "stochastic_differentiator", "parameter_shift_util", "stochastic_differentiator_util", "adjoint" ], "tfq.datasets": ["cluster_state"], "tfq.util": [ "from_tensor", "convert_to_tensor", "exp_identity", "check_commutability", "kwargs_cartesian_product", "random_circuit_resolver_batch", "random_pauli_sums", "random_symbol_circuit", "random_symbol_circuit_resolver_batch" ] }) doc_generator.build(output_dir=FLAGS.output_dir)
def gen_api_docs(): """Generates api docs for the tensorflow docs package.""" # The below `del`'s are to avoid the api_gen_test to not document these. # Please remove these lines from your build_docs.py files when you create # them. del tensorflow_docs.google del tensorflow_docs.api_generator.tf_inspect doc_generator = generate_lib.DocGenerator( root_title=PROJECT_FULL_NAME, # Replace `tensorflow_docs` with your module, here. py_modules=[(PROJECT_SHORT_NAME, tensorflow_docs)], # Replace `tensorflow_docs` with your module, here. base_dir=os.path.dirname(tensorflow_docs.__file__), code_url_prefix=FLAGS.code_url_prefix, search_hints=FLAGS.search_hints, site_path=FLAGS.site_path, private_map={}, # This callback cleans up a lot of aliases caused by internal imports. callbacks=[public_api.local_definitions_filter]) doc_generator.build(FLAGS.output_dir) print('Output docs to: ', FLAGS.output_dir)
def gen_api_docs(): """Generates api docs for the tensorflow docs package.""" # The below `del`'s are to avoid the api_gen_test to not document these. # Please remove these lines from your build_docs.py files when you create # them. del tensorflow_docs.google del tensorflow_docs.api_generator.report.schema doc_generator = generate_lib.DocGenerator( root_title=PROJECT_FULL_NAME, # Replace `tensorflow_docs` with your module, here. py_modules=[(PROJECT_SHORT_NAME, tensorflow_docs)], # Replace `tensorflow_docs` with your module, here. base_dir=os.path.dirname(tensorflow_docs.__file__), code_url_prefix=FLAGS.code_url_prefix, search_hints=FLAGS.search_hints, site_path=FLAGS.site_path, # This callback ensures that docs are only generated for objects that # are explicitly imported in your __init__.py files. There are other # options but this is a good starting point. callbacks=[public_api.explicit_package_contents_filter], ) doc_generator.build(FLAGS.output_dir) print('Output docs to: ', FLAGS.output_dir)
def build_docs(output_dir, code_url_prefix, search_hints=True): """Build api docs for tensorflow v2. Args: output_dir: A string path, where to put the files. code_url_prefix: prefix for "Defined in" links. search_hints: Bool. Include meta-data search hints at the top of each file. """ # The custom page will be used for raw_ops.md not the one generated above. doc_controls.set_custom_page_content(tf.raw_ops, generate_raw_ops_doc()) _hide_layer_and_module_methods() try: doc_controls.do_not_generate_docs(tf.tools) except AttributeError: pass try: doc_controls.do_not_generate_docs(tf.compat.v1.pywrap_tensorflow) except AttributeError: pass try: doc_controls.do_not_generate_docs(tf.pywrap_tensorflow) except AttributeError: pass try: doc_controls.do_not_generate_docs(tf.flags) except AttributeError: pass base_dir = path.normpath(path.join(tf.__file__, "../..")) base_dirs = ( path.join(base_dir, "tensorflow_core"), # External packages base directories path.dirname(tensorboard.__file__), path.dirname(tensorflow_estimator.__file__), ) code_url_prefixes = ( code_url_prefix, # External packages source repositories, "https://github.com/tensorflow/tensorboard/tree/master/tensorboard", "https://github.com/tensorflow/estimator/tree/master/tensorflow_estimator", ) doc_generator = generate_lib.DocGenerator( root_title="TensorFlow 2", py_modules=[("tf", tf)], base_dir=base_dirs, search_hints=search_hints, code_url_prefix=code_url_prefixes, site_path=FLAGS.site_path, visitor_cls=TfExportAwareVisitor, private_map=_PRIVATE_MAP) doc_generator.build(output_dir)
def gen_api_docs(code_url_prefix, site_path, output_dir, project_short_name, project_full_name, search_hints): """Generates api docs for the tensorflow docs package.""" build_api_docs_lib.hide_module_model_and_layer_methods() branch = code_url_prefix.strip('/').split('/')[-2] official_url_prefix = ( f'https://github.com/tensorflow/models/blob/{branch}/official/') vision_base_dir = pathlib.Path(tfm.vision.__file__).parent # The `layers` submodule (and others) are actually defined in the `official` # package. Find the path to `official`. official_base_dir = [ p for p in pathlib.Path(tfm.vision.layers.__file__).parents if p.name == 'official' ][0] doc_generator = generate_lib.DocGenerator( root_title=project_full_name, py_modules=[(project_short_name, tfm.vision)], base_dir=[ vision_base_dir, official_base_dir, ], code_url_prefix=[code_url_prefix, official_url_prefix], search_hints=search_hints, site_path=site_path, callbacks=[custom_filter], ) doc_generator.build(output_dir) logging.info('Output docs to: %s', output_dir)
def test_get_other_member_doc_object_doc_attr(self): class A(): """Class docs.""" pass a = A() a.__doc__ = 'Object doc' m = types.ModuleType('m') m.__file__ = __file__ m.a = a generator = generate_lib.DocGenerator( root_title='test', py_modules=[('m', m)], code_url_prefix='https://tensorflow.org') parser_config = generator.run_extraction() result = parser._get_other_member_doc(a, parser_config, {}) expected = textwrap.dedent("""\ Instance of `__main__.A` Object doc""") self.assertEqual(expected, result)
def test_strips_default_arg_memory_address(self): """Validates that parser strips memory addresses out out default argspecs. argspec.defaults can contain object memory addresses, which can change between invocations. It's desirable to strip these out to reduce churn. See: `help(collections.MutableMapping.pop)` """ m = types.ModuleType('m') m.__file__ = __file__ m.fun = lambda x=object(): x generator = generate_lib.DocGenerator( root_title='test', py_modules=[('m', m)], code_url_prefix='https://tensorflow.org') parser_config = generator.run_extraction() api_node = doc_generator_visitor.ApiTreeNode(path=('m', 'fun'), py_object=m.fun) page_info = docs_for_object.docs_for_object( api_node=api_node, parser_config=parser_config) output = str(page_info.signature) self.assertNotIn('object at 0x', output) self.assertIn('<object object>', output)
def test_getsource_indexerror_resilience(self): """Validates that parser gracefully handles IndexErrors. getsource() can raise an IndexError in some cases. It's unclear why this happens, but it consistently repros on the `get` method of collections.MutableMapping subclasses. """ m = types.ModuleType('m') m.__file__ = __file__ m.ConcreteMutableMapping = ConcreteMutableMapping generator = generate_lib.DocGenerator( root_title='test', py_modules=[('m', m)], code_url_prefix='https://tensorflow.org') parser_config = generator.run_extraction() api_node = doc_generator_visitor.ApiTreeNode( path=('m', 'ConcreteMutableMapping'), py_object=ConcreteMutableMapping) page_info = docs_for_object.docs_for_object( api_node=api_node, parser_config=parser_config) self.assertIn(ConcreteMutableMapping.get, [m.py_object for m in page_info.methods])
def build_docs(output_dir, code_url_prefix, search_hints=True): """Build api docs for tensorflow v2. Args: output_dir: A string path, where to put the files. code_url_prefix: prefix for "Defined in" links. search_hints: Bool. Include meta-data search hints at the top of each file. """ _hide_layer_and_module_methods() try: doc_controls.do_not_generate_docs(tf.tools) except AttributeError: pass try: doc_controls.do_not_generate_docs(tf.compat.v1.pywrap_tensorflow) except AttributeError: pass try: doc_controls.do_not_generate_docs(tf.pywrap_tensorflow) except AttributeError: pass try: doc_controls.do_not_generate_docs(tf.flags) except AttributeError: pass base_dir = path.dirname(tf.__file__) base_dirs = ( base_dir, # External packages base directories, path.dirname(tensorboard.__file__), path.dirname(tensorflow_estimator.__file__), ) code_url_prefixes = ( code_url_prefix, # External packages source repositories, "https://github.com/tensorflow/tensorboard/tree/master/tensorboard", "https://github.com/tensorflow/estimator/tree/master/tensorflow_estimator", ) doc_generator = generate_lib.DocGenerator( root_title="TensorFlow 2.0 Preview", py_modules=[("tf", tf)], base_dir=base_dirs, search_hints=search_hints, code_url_prefix=code_url_prefixes, site_path=FLAGS.site_path, visitor_cls=TfExportAwareDocGeneratorVisitor, private_map=PRIVATE_MAP, do_not_descend_map=DO_NOT_DESCEND_MAP) doc_generator.build(output_dir)
def main(_): doc_generator = generate_lib.DocGenerator( root_title="TensorFlow/compression", py_modules=[("tfc", tfc)], base_dir=os.path.dirname(tfc.__file__), code_url_prefix="https://github.com/tensorflow/compression/tree/master", api_cache=False, ) sys.exit(doc_generator.build(FLAGS.output_dir))
def main(_): doc_generator = generate_lib.DocGenerator( root_title='TensorFlow Cloud', py_modules=[('tfc', tensorflow_cloud)], code_url_prefix=FLAGS.code_url_prefix, search_hints=FLAGS.search_hints, site_path=FLAGS.site_path, callbacks=[public_api.explicit_package_contents_filter]) doc_generator.build(output_dir=FLAGS.output_dir)
def main(_): doc_generator = generate_lib.DocGenerator( root_title="Tensorflow Graphics", py_modules=[("tfg", tfg)], base_dir=os.path.dirname(tfg.__file__), search_hints=FLAGS.search_hints, code_url_prefix=FLAGS.code_url_prefix, site_path=FLAGS.site_path) doc_generator.build(output_dir=FLAGS.output_dir)