Exemple #1
0
def main(_):
    # These make up for the empty __init__.py files.
    api_generator.utils.recursive_import(tfx.orchestration)
    api_generator.utils.recursive_import(tfx.components)
    api_generator.utils.recursive_import(tfx.extensions)

    do_not_generate_docs_for = []
    for name in ["utils", "proto", "dependencies", "version"]:
        submodule = getattr(tfx, name, None)
        if submodule is not None:
            do_not_generate_docs_for.append(submodule)

    for obj in do_not_generate_docs_for:
        doc_controls.do_not_generate_docs(obj)

    doc_generator = generate_lib.DocGenerator(
        root_title="TFX",
        py_modules=[("tfx", tfx)],
        code_url_prefix=FLAGS.code_url_prefix,
        search_hints=FLAGS.search_hints,
        site_path=FLAGS.site_path,
        private_map={},
        # local_definitions_filter ensures that shared modules are only
        # documented in the location that defines them, instead of every location
        # that imports them.
        callbacks=[
            api_generator.public_api.local_definitions_filter,
            ignore_test_objects
        ])
    doc_generator.build(output_dir=FLAGS.output_dir)
Exemple #2
0
def main(_):

    do_not_generate_docs_for = []
    for name in ["utils", "dependencies", "version", "examples"]:
        submodule = getattr(tfx, name, None)
        if submodule is not None:
            do_not_generate_docs_for.append(submodule)

    for obj in do_not_generate_docs_for:
        doc_controls.do_not_generate_docs(obj)

    doc_generator = generate_lib.DocGenerator(
        root_title="TFX",
        py_modules=[("tfx", tfx)],
        code_url_prefix=FLAGS.code_url_prefix,
        search_hints=FLAGS.search_hints,
        site_path=FLAGS.site_path,
        gen_report=FLAGS.gen_report,
        private_map={},
        # local_definitions_filter ensures that shared modules are only
        # documented in the location that defines them, instead of every location
        # that imports them.
        callbacks=[
            api_generator.public_api.explicit_package_contents_filter,
            ignore_test_objects, ignore_proto_method
        ])
    doc_generator.build(output_dir=FLAGS.output_dir)
def main(args):
    if args[1:]:
        raise ValueError("Unrecognized Command line args", args[1:])

    for obj in supress_docs_for:
        doc_controls.do_not_generate_docs(obj)

    for name, value in inspect.getmembers(tfdv):
        if inspect.ismodule(value):
            doc_controls.do_not_generate_docs(value)

    for name, value in inspect.getmembers(beam.PTransform):
        # This ensures that the methods of PTransform are not documented in any
        # derived classes.
        if name == "__init__":
            continue
        try:
            doc_controls.do_not_doc_inheritable(value)
        except (TypeError, AttributeError):
            pass

    doc_generator = generate_lib.DocGenerator(
        root_title="TensorFlow Data Validation",
        py_modules=[("tfdv", tfdv)],
        code_url_prefix=FLAGS.code_url_prefix,
        search_hints=FLAGS.search_hints,
        site_path=FLAGS.site_path,
        # local_definitions_filter ensures that shared modules are only
        # documented in the location that defines them, instead of every location
        # that imports them.
        callbacks=[
            public_api.local_definitions_filter, _filter_class_attributes
        ])

    return doc_generator.build(output_dir=FLAGS.output_dir)
Exemple #4
0
def main(args):
    if args[1:]:
        raise ValueError('Unrecognized command line args', args[1:])

    suppress_docs_for = []
    for name in ['version', 'goo' + 'gle', 'metadata_store', 'pywrap']:
        submodule = getattr(mlmd, name, None)
        if submodule is not None:
            suppress_docs_for.append(submodule)

    for obj in suppress_docs_for:
        doc_controls.do_not_generate_docs(obj)

    doc_generator = generate_lib.DocGenerator(
        root_title='ML Metadata',
        py_modules=[('mlmd', mlmd)],
        base_dir=os.path.dirname(mlmd.__file__),
        code_url_prefix=FLAGS.code_url_prefix,
        search_hints=FLAGS.search_hints,
        site_path=FLAGS.site_path,
        private_map={},
        callbacks=[
            # This filters out objects not defined in the current module or its
            # sub-modules.
            public_api.local_definitions_filter,
            ignore_proto_method,
            ignore_attrs_method
        ])

    doc_generator.build(output_dir=FLAGS.output_dir)
Exemple #5
0
def main(args):
    if args[1:]:
        raise ValueError('Unrecognized Command line args', args[1:])

    tft_out = pathlib.Path(tempfile.mkdtemp())
    doc_generator = generate_lib.DocGenerator(
        root_title='TF-Transform',
        py_modules=[('tft', transform)],
        code_url_prefix=FLAGS.code_url_prefix,
        search_hints=FLAGS.search_hints,
        site_path=FLAGS.site_path,
        callbacks=[public_api.explicit_package_contents_filter])

    doc_generator.build(tft_out)

    doc_controls.do_not_generate_docs(tft_beam.analyzer_impls)

    tft_beam_out = pathlib.Path(tempfile.mkdtemp())
    doc_generator = generate_lib.DocGenerator(
        root_title='TFT-Beam',
        py_modules=[('tft_beam', tft_beam)],
        code_url_prefix=FLAGS.code_url_prefix + '/beam',
        search_hints=FLAGS.search_hints,
        site_path=FLAGS.site_path,
        callbacks=[
            public_api.explicit_package_contents_filter,
            public_api.local_definitions_filter
        ])

    doc_generator.build(tft_beam_out)

    output_dir = pathlib.Path(FLAGS.output_dir)

    def splice(name, tmp_dir):
        shutil.rmtree(output_dir / name, ignore_errors=True)
        shutil.copytree(tmp_dir / name, output_dir / name)
        shutil.copy(tmp_dir / f'{name}.md', output_dir / f'{name}.md')
        try:
            shutil.copy(tmp_dir / '_redirects.yaml',
                        output_dir / name / '_redirects.yaml')
        except FileNotFoundError:
            pass
        shutil.copy(tmp_dir / '_toc.yaml', output_dir / name / '_toc.yaml')

    splice('tft', tft_out)
    splice('tft_beam', tft_beam_out)

    toc_path = output_dir / '_toc.yaml'
    toc_text = yaml.dump({
        'toc': [{
            'include': f'{FLAGS.site_path}/tft/_toc.yaml'
        }, {
            'break': True
        }, {
            'include': f'{FLAGS.site_path}/tft_beam/_toc.yaml'
        }]
    })
    toc_path.write_text(toc_text)
Exemple #6
0
def build_docs(output_dir, code_url_prefix, search_hints=True):
    """Build api docs for tensorflow v2.

  Args:
    output_dir: A string path, where to put the files.
    code_url_prefix: prefix for "Defined in" links.
    search_hints: Bool. Include meta-data search hints at the top of each file.
  """
    # The custom page will be used for raw_ops.md not the one generated above.
    doc_controls.set_custom_page_content(tf.raw_ops, generate_raw_ops_doc())

    _hide_layer_and_module_methods()

    try:
        doc_controls.do_not_generate_docs(tf.tools)
    except AttributeError:
        pass

    try:
        doc_controls.do_not_generate_docs(tf.compat.v1.pywrap_tensorflow)
    except AttributeError:
        pass

    try:
        doc_controls.do_not_generate_docs(tf.pywrap_tensorflow)
    except AttributeError:
        pass

    try:
        doc_controls.do_not_generate_docs(tf.flags)
    except AttributeError:
        pass

    base_dir = path.normpath(path.join(tf.__file__, "../.."))

    base_dirs = (
        path.join(base_dir, "tensorflow_core"),
        # External packages base directories
        path.dirname(tensorboard.__file__),
        path.dirname(tensorflow_estimator.__file__),
    )

    code_url_prefixes = (
        code_url_prefix,
        # External packages source repositories,
        "https://github.com/tensorflow/tensorboard/tree/master/tensorboard",
        "https://github.com/tensorflow/estimator/tree/master/tensorflow_estimator",
    )

    doc_generator = generate_lib.DocGenerator(
        root_title="TensorFlow 2",
        py_modules=[("tf", tf)],
        base_dir=base_dirs,
        search_hints=search_hints,
        code_url_prefix=code_url_prefixes,
        site_path=FLAGS.site_path,
        visitor_cls=TfExportAwareVisitor,
        private_map=_PRIVATE_MAP)

    doc_generator.build(output_dir)
Exemple #7
0
def main(unused_argv):
  for obj in DO_NOT_GENERATE_DOCS_FOR:
    doc_controls.do_not_generate_docs(obj)

  doc_generator = generate_lib.DocGenerator(
      root_title="TensorFlow Probability",
      py_modules=[("tfp", tfp)],
      base_dir=os.path.dirname(tfp.__file__),
      code_url_prefix=FLAGS.code_url_prefix,
      search_hints=FLAGS.search_hints,
      site_path=FLAGS.site_path,
      private_map={"tfp": ["google", "staging", "python"]},
      callbacks=[internal_filter])

  doc_generator.build(output_dir=FLAGS.output_dir)
Exemple #8
0
def build_docs(output_dir, code_url_prefix, search_hints=True):
    """Build api docs for tensorflow v2.

  Args:
    output_dir: A string path, where to put the files.
    code_url_prefix: prefix for "Defined in" links.
    search_hints: Bool. Include meta-data search hints at the top of each file.
  """
    _hide_layer_and_module_methods()

    try:
        doc_controls.do_not_generate_docs(tf.tools)
    except AttributeError:
        pass

    try:
        doc_controls.do_not_generate_docs(tf.compat.v1.pywrap_tensorflow)
    except AttributeError:
        pass

    try:
        doc_controls.do_not_generate_docs(tf.pywrap_tensorflow)
    except AttributeError:
        pass

    try:
        doc_controls.do_not_generate_docs(tf.flags)
    except AttributeError:
        pass

    base_dir = path.dirname(tf.__file__)

    base_dirs = (
        base_dir,
        # External packages base directories,
        path.dirname(tensorboard.__file__),
        path.dirname(tensorflow_estimator.__file__),
    )

    code_url_prefixes = (
        code_url_prefix,
        # External packages source repositories,
        "https://github.com/tensorflow/tensorboard/tree/master/tensorboard",
        "https://github.com/tensorflow/estimator/tree/master/tensorflow_estimator",
    )

    doc_generator = generate_lib.DocGenerator(
        root_title="TensorFlow 2.0 Preview",
        py_modules=[("tf", tf)],
        base_dir=base_dirs,
        search_hints=search_hints,
        code_url_prefix=code_url_prefixes,
        site_path=FLAGS.site_path,
        visitor_cls=TfExportAwareDocGeneratorVisitor,
        private_map=PRIVATE_MAP,
        do_not_descend_map=DO_NOT_DESCEND_MAP)

    doc_generator.build(output_dir)
def main(_):
    do_not_generate_docs_for = []

    for blocked_doc in do_not_generate_docs_for:
        doc_controls.do_not_generate_docs(blocked_doc)

    doc_generator = generate_lib.DocGenerator(
        root_title="Neural Structured Learning",
        py_modules=[("nsl", nsl)],
        code_url_prefix=FLAGS.code_url_prefix,
        search_hints=FLAGS.search_hints,
        site_path=FLAGS.site_path,
        # local_definitions_filter ensures that shared modules are only
        # documented in the location that defines them, instead of every location
        # that imports them.
        callbacks=[public_api.local_definitions_filter])
    doc_generator.build(output_dir=FLAGS.output_dir)
Exemple #10
0
def main(args):
    if args[1:]:
        raise ValueError('Unrecognized Command line args', args[1:])

    doc_controls.do_not_generate_docs(tft_beam.analyzer_impls)

    doc_generator = generate_lib.DocGenerator(
        root_title='TFT-Beam',
        py_modules=[('tft_beam', tft_beam)],
        code_url_prefix=FLAGS.code_url_prefix + '/beam',
        search_hints=FLAGS.search_hints,
        site_path=FLAGS.site_path,
        callbacks=[
            public_api.explicit_package_contents_filter,
            public_api.local_definitions_filter
        ])

    doc_generator.build(FLAGS.output_dir)
def main(args):
  if args[1:]:
    raise ValueError('Unrecognized command line args', args[1:])

  for obj in suppress_docs_for:
    doc_controls.do_not_generate_docs(obj)

  doc_generator = generate_lib.DocGenerator(
      root_title='TensorFlow Model Analysis',
      py_modules=[('tfma', tfma)],
      base_dir=os.path.dirname(tfma.__file__),
      code_url_prefix=FLAGS.code_url_prefix,
      search_hints=FLAGS.search_hints,
      site_path=FLAGS.site_path,
      callbacks=[
          public_api.local_definitions_filter, depth_filter, suppress_docs
      ])

  return doc_generator.build(output_dir=FLAGS.output_dir)
Exemple #12
0
def main(args):
    if args[1:]:
        raise ValueError('Unrecognized command line args', args[1:])

    for obj in suppress_docs_for:
        doc_controls.do_not_generate_docs(obj)

    doc_generator = generate_lib.DocGenerator(
        root_title='TensorFlow Hub',
        py_modules=[('hub', hub)],
        base_dir=os.path.dirname(hub.__file__),
        code_url_prefix=FLAGS.code_url_prefix,
        search_hints=FLAGS.search_hints,
        site_path=FLAGS.site_path,
        private_map={},
        callbacks=[
            # This filters out objects not defined in the current module or its
            # sub-modules.
            public_api.local_definitions_filter
        ])

    doc_generator.build(output_dir=FLAGS.output_dir)
Exemple #13
0
def build_docs(output_dir, code_url_prefix, search_hints=True):
    """Build api docs for tensorflow v2.

  Args:
    output_dir: A string path, where to put the files.
    code_url_prefix: prefix for "Defined in" links.
    search_hints: Bool. Include meta-data search hints at the top of each file.
  """
    try:
        doc_controls.do_not_generate_docs(tf.tools)
    except AttributeError:
        pass

    base_dir = path.dirname(tf.__file__)
    base_dirs = (
        base_dir,
        path.normpath(path.join(base_dir, "../../tensorflow")),
        path.dirname(tensorboard.__file__),
        path.dirname(tensorflow_estimator.__file__),
    )

    code_url_prefixes = (
        code_url_prefix,
        # External packages source repositories
        "https://github.com/tensorflow/tensorboard/tree/master/tensorboard"
        "https://github.com/tensorflow/estimator/tree/master/tensorflow_estimator"
    )

    doc_generator = generate_lib.DocGenerator(
        root_title="TensorFlow 2.0 Preview",
        py_modules=[("tf", tf)],
        base_dir=base_dirs,
        search_hints=search_hints,
        code_url_prefix=code_url_prefixes,
        site_path=FLAGS.site_path,
        visitor_cls=TfExportAwareDocGeneratorVisitor)

    doc_generator.build(output_dir)
Exemple #14
0
def build_docs(output_dir, code_url_prefix, search_hints=True):
  """Build api docs for tensorflow v2.

  Args:
    output_dir: A string path, where to put the files.
    code_url_prefix: prefix for "Defined in" links.
    search_hints: Bool. Include meta-data search hints at the top of each file.
  """
  try:
    doc_controls.do_not_generate_docs(tf.tools)
  except AttributeError:
    pass

  base_dir = path.dirname(tf.__file__)
  base_dirs = (
      base_dir,
      path.normpath(path.join(base_dir, "../../tensorflow")),
      path.dirname(tensorboard.__file__),
      path.dirname(tensorflow_estimator.__file__),
  )

  code_url_prefixes = (
      code_url_prefix,
      # External packages source repositories
      "https://github.com/tensorflow/tensorboard/tree/master/tensorboard"
      "https://github.com/tensorflow/estimator/tree/master/tensorflow_estimator"
  )

  doc_generator = generate_lib.DocGenerator(
      root_title="TensorFlow 2.0 Preview",
      py_modules=[("tf", tf)],
      base_dir=base_dirs,
      search_hints=search_hints,
      code_url_prefix=code_url_prefixes,
      site_path=FLAGS.site_path,
      visitor_cls=TfExportAwareDocGeneratorVisitor)

  doc_generator.build(output_dir)
Exemple #15
0
def build_docs(output_dir, code_url_prefix, search_hints):
    """Build api docs for tensorflow v2.

  Args:
    output_dir: A string path, where to put the files.
    code_url_prefix: prefix for "Defined in" links.
    search_hints: Bool. Include meta-data search hints at the top of each file.
  """
    if distutils.version.LooseVersion(tf.__version__) >= "2.9":
        doc_controls.set_deprecated(tf.keras.preprocessing)

    # The custom page will be used for raw_ops.md not the one generated above.
    doc_controls.set_custom_page_builder_cls(tf.raw_ops, RawOpsPageInfo)

    # Hide raw_ops from search.
    for name, obj in tf_inspect.getmembers(tf.raw_ops):
        if not name.startswith("_"):
            doc_controls.hide_from_search(obj)

    for cls in [
            tf.Module, tf.keras.layers.Layer, tf.keras.optimizers.Optimizer
    ]:
        doc_controls.decorate_all_class_attributes(
            decorator=doc_controls.do_not_doc_in_subclasses,
            cls=cls,
            skip=["__init__"])

    do_not_document = [
        "tf.__internal__", "tf.keras.__internal__", "tf.__operators__",
        "tf.tools", "tf.compat.v1.pywrap_tensorflow", "tf.pywrap_tensorflow",
        "tf.flags", "tf.batch_mat_mul_v3", "tf.sparse_segment_sum_grad"
    ]
    for path in do_not_document:
        item = tf
        for part in path.split(".")[1:]:
            item = getattr(item, part, None)
        if item is None:
            continue
        doc_controls.do_not_generate_docs(item)

    base_dirs, code_url_prefixes = base_dir.get_base_dirs_and_prefixes(
        code_url_prefix)
    doc_generator = generate_lib.DocGenerator(
        root_title="TensorFlow 2",
        py_modules=[("tf", tf)],
        base_dir=base_dirs,
        search_hints=search_hints,
        code_url_prefix=code_url_prefixes,
        site_path=FLAGS.site_path,
        visitor_cls=TfExportAwareVisitor,
        private_map=_PRIVATE_MAP,
        extra_docs=_EXTRA_DOCS)

    doc_generator.build(output_dir)

    out_path = pathlib.Path(output_dir)

    expected_path_contents = {
        "tf/summary/audio.md": "tensorboard/plugins/audio/summary_v2.py",
        "tf/estimator/DNNClassifier.md":
        "tensorflow_estimator/python/estimator/canned/dnn.py",
        "tf/nn/sigmoid_cross_entropy_with_logits.md": "python/ops/nn_impl.py",
        "tf/keras/Model.md": "keras/engine/training.py",
    }

    all_passed = True
    error_msg_parts = [
        'Some "view source" links seem to be broken, please check:'
    ]

    for (rel_path, contents) in expected_path_contents.items():
        path = out_path / rel_path
        if contents not in path.read_text():
            all_passed = False
            error_msg_parts.append("  " + str(path))

    if not all_passed:
        raise ValueError("\n".join(error_msg_parts))

    rejected_path_contents = {
        "tf/keras/optimizers.md": "keras/optimizers/__init__.py",
    }

    all_passed = True
    error_msg_parts = [
        'Bad "view source" links in generated files, please check:'
    ]
    for rel_path, content in rejected_path_contents.items():
        path = out_path / rel_path
        if content in path.read_text():
            all_passed = False
            error_msg_parts.append("  " + str(path))

    if not all_passed:
        raise ValueError("\n".join(error_msg_parts))

    num_files = len(list(out_path.rglob("*")))
    if num_files < MIN_NUM_FILES_EXPECTED:
        raise ValueError(
            f"The TensorFlow api should be more than {MIN_NUM_FILES_EXPECTED} files"
            f"(found {num_files}).")
Exemple #16
0
def build_docs(output_dir, code_url_prefix, search_hints=True):
  """Build api docs for tensorflow v2.

  Args:
    output_dir: A string path, where to put the files.
    code_url_prefix: prefix for "Defined in" links.
    search_hints: Bool. Include meta-data search hints at the top of each file.
  """
  # The custom page will be used for raw_ops.md not the one generated above.
  doc_controls.set_custom_page_content(tf.raw_ops, generate_raw_ops_doc())

  # Hide raw_ops from search.
  for name, obj in tf_inspect.getmembers(tf.raw_ops):
    if not name.startswith("_"):
      doc_controls.hide_from_search(obj)

  _hide_layer_and_module_methods()

  try:
    doc_controls.do_not_generate_docs(tf.__operators__)
  except AttributeError:
    pass

  try:
    doc_controls.do_not_generate_docs(tf.tools)
  except AttributeError:
    pass

  try:
    doc_controls.do_not_generate_docs(tf.compat.v1.pywrap_tensorflow)
  except AttributeError:
    pass

  try:
    doc_controls.do_not_generate_docs(tf.pywrap_tensorflow)
  except AttributeError:
    pass

  try:
    doc_controls.do_not_generate_docs(tf.flags)
  except AttributeError:
    pass

  base_dirs, code_url_prefixes = base_dir.get_base_dirs_and_prefixes(
      code_url_prefix)
  doc_generator = generate_lib.DocGenerator(
      root_title="TensorFlow 2",
      py_modules=[("tf", tf)],
      base_dir=base_dirs,
      search_hints=search_hints,
      code_url_prefix=code_url_prefixes,
      site_path=FLAGS.site_path,
      visitor_cls=TfExportAwareVisitor,
      private_map=_PRIVATE_MAP)

  doc_generator.build(output_dir)

  out_path = pathlib.Path(output_dir)
  num_files = len(list(out_path.rglob("*")))
  if num_files < 2000:
    raise ValueError("The TensorFlow api should be more than 2500 files"
                     "(found {}).".format(num_files))
  expected_path_contents = {
      "tf/summary/audio.md":
          "tensorboard/plugins/audio/summary_v2.py",
      "tf/estimator/DNNClassifier.md":
          "tensorflow_estimator/python/estimator/canned/dnn.py",
      "tf/nn/sigmoid_cross_entropy_with_logits.md":
          "python/ops/nn_impl.py",
      "tf/keras/Model.md":
          "tensorflow/python/keras/engine/training.py",
      "tf/compat/v1/gradients.md":
          "tensorflow/python/ops/gradients_impl.py",
  }

  all_passed = True
  error_msg_parts = [
      'Some "view source" links seem to be broken, please check:'
  ]

  for (rel_path, contents) in expected_path_contents.items():
    path = out_path / rel_path
    if contents not in path.read_text():
      all_passed = False
      error_msg_parts.append("  " + str(path))

  if not all_passed:
    raise ValueError("\n".join(error_msg_parts))
Exemple #17
0
        output_dir=".",
        code_url_prefix=CODE_URL_PREFIX,
        search_hints=False,
        gen_report=False)

    wandb_run = [
        'init',
        'log',
        'config',
        'summary',
        'login',
        'alert',]
    
    wandb.__all__ = wandb_run
    try:
        doc_controls.do_not_generate_docs(wandb.settings.Console)
    except AttributeError:
        pass
    try:
        doc_controls.do_not_generate_docs(wandb.settings.Source)
    except AttributeError:
        pass
    build_docs(
        name_pair=("run",wandb),
        output_dir="./library",
        code_url_prefix=CODE_URL_PREFIX,
        search_hints=False,
        gen_report=False)

    wandb_datatypes = [
        'Image',
Exemple #18
0
def build_docs(output_dir, code_url_prefix, search_hints, gen_report):
    """Build api docs for tensorflow v2.

  Args:
    output_dir: A string path, where to put the files.
    code_url_prefix: prefix for "Defined in" links.
    search_hints: Bool. Include meta-data search hints at the top of each file.
    gen_report: Bool. Generates an API report containing the health of the
      docstrings of the public API.
  """
    # The custom page will be used for raw_ops.md not the one generated above.
    doc_controls.set_custom_page_content(tf.raw_ops, generate_raw_ops_doc())

    # Hide raw_ops from search.
    for name, obj in tf_inspect.getmembers(tf.raw_ops):
        if not name.startswith("_"):
            doc_controls.hide_from_search(obj)

    for cls in [
            tf.Module, tf.keras.layers.Layer, tf.keras.optimizers.Optimizer
    ]:
        doc_controls.decorate_all_class_attributes(
            decorator=doc_controls.do_not_doc_in_subclasses,
            cls=cls,
            skip=["__init__"])

    try:
        doc_controls.do_not_generate_docs(tf.__internal__)
    except AttributeError:
        pass

    try:
        doc_controls.do_not_generate_docs(tf.keras.__internal__)
    except AttributeError:
        pass

    try:
        doc_controls.do_not_generate_docs(tf.__operators__)
    except AttributeError:
        pass

    try:
        doc_controls.do_not_generate_docs(tf.tools)
    except AttributeError:
        pass

    try:
        doc_controls.do_not_generate_docs(tf.compat.v1.pywrap_tensorflow)
    except AttributeError:
        pass

    try:
        doc_controls.do_not_generate_docs(tf.pywrap_tensorflow)
    except AttributeError:
        pass

    try:
        doc_controls.do_not_generate_docs(tf.flags)
    except AttributeError:
        pass

    base_dirs, code_url_prefixes = base_dir.get_base_dirs_and_prefixes(
        code_url_prefix)
    doc_generator = generate_lib.DocGenerator(
        root_title="TensorFlow 2",
        py_modules=[("tf", tf)],
        base_dir=base_dirs,
        search_hints=search_hints,
        code_url_prefix=code_url_prefixes,
        site_path=FLAGS.site_path,
        visitor_cls=TfExportAwareVisitor,
        private_map=_PRIVATE_MAP,
        gen_report=gen_report,
        extra_docs=_EXTRA_DOCS)

    doc_generator.build(output_dir)

    if gen_report:
        return

    out_path = pathlib.Path(output_dir)

    expected_path_contents = {
        "tf/summary/audio.md":
        "tensorboard/plugins/audio/summary_v2.py",
        "tf/estimator/DNNClassifier.md":
        "tensorflow_estimator/python/estimator/canned/dnn.py",
        "tf/nn/sigmoid_cross_entropy_with_logits.md":
        "python/ops/nn_impl.py",
        "tf/keras/Model.md":
        "keras/engine/training.py",
        "tf/keras/preprocessing/image/random_brightness.md":
        "keras_preprocessing/image/affine_transformations.py"
    }

    all_passed = True
    error_msg_parts = [
        'Some "view source" links seem to be broken, please check:'
    ]

    for (rel_path, contents) in expected_path_contents.items():
        path = out_path / rel_path
        if contents not in path.read_text():
            all_passed = False
            error_msg_parts.append("  " + str(path))

    if not all_passed:
        raise ValueError("\n".join(error_msg_parts))

    rejected_path_contents = {
        "tf/keras/optimizers.md": "keras/optimizers/__init__.py",
    }

    all_passed = True
    error_msg_parts = [
        'Bad "view source" links in generated files, please check:'
    ]
    for rel_path, content in rejected_path_contents.items():
        path = out_path / rel_path
        if content in path.read_text():
            all_passed = False
            error_msg_parts.append("  " + str(path))

    if not all_passed:
        raise ValueError("\n".join(error_msg_parts))

    num_files = len(list(out_path.rglob("*")))
    if num_files < 2000:
        raise ValueError("The TensorFlow api should be more than 2000 files"
                         "(found {}).".format(num_files))