コード例 #1
0
def test_generate():
    build_dir = Path("testdata/nodejs-hang")
    out_dir = build_dir / "site/api"

    # Generate!
    try:
        shell.run(
            [
                "docfx",
                "build",
                "-t",
                "../../third_party/docfx/templates/devsite",
            ],
            timeout=600,
            cwd=build_dir,
            hide_output=False,
        )
    except subprocess.TimeoutExpired:
        pytest.fail(
            "build failed by timeout. Doc generation seems stuck on an infinite loop."
        )
    except Exception as e:
        pytest.fail(f"hanging build raised an exception: {e}")

    # Note: rename of toc.yaml to _toc.yaml happens in doc-pipeline.
    toc_file_path = out_dir / "toc.yaml"
    assert toc_file_path.is_file()
    got_text = toc_file_path.read_text("utf-8")
    assert "/nodejs/docs/reference/dialogflow/latest" in got_text
コード例 #2
0
ファイル: generate.py プロジェクト: jskeet/doc-pipeline
def clone_templates(dir):
    shell.run(
        [
            "git",
            "clone",
            "--depth=1",
            "https://github.com/googleapis/doc-templates.git",
            ".",
        ],
        cwd=dir,
        hide_output=True,
    )
コード例 #3
0
def build_and_format(blob, is_bucket, devsite_template):
    tmp_path = pathlib.Path(tempfile.TemporaryDirectory(prefix="doc-pipeline.").name)

    api_path = decompress_path = tmp_path.joinpath("obj/api")

    api_path.mkdir(parents=True, exist_ok=True)

    # If building blobs on a bucket, use setup_bucket_docfx
    # Else, use setup_local_docfx
    if is_bucket:
        metadata_path, metadata = setup_bucket_docfx(
            tmp_path, api_path, decompress_path, blob
        )
        blob_name = blob.name
    else:
        metadata_path, metadata = setup_local_docfx(
            tmp_path, api_path, decompress_path, blob
        )
        blob_name = metadata.name

    site_path = tmp_path.joinpath("site")

    # Adjust Java TOC before generating.
    if metadata.language.lower() == "java":
        prepare.prepare_java_toc(api_path.joinpath("toc.yml"), metadata.name)

    log.info(f"Running `docfx build` for {blob_name}...")
    shell.run(
        ["docfx", "build", "-t", f"{devsite_template.absolute()}"],
        cwd=tmp_path,
        hide_output=False,
    )

    # Rename the output TOC file to be _toc.yaml to match the expected
    # format. As well, support both toc.html and toc.yaml
    try:
        shutil.move(site_path.joinpath("toc.yaml"), site_path.joinpath("_toc.yaml"))
    except FileNotFoundError:
        shutil.move(site_path.joinpath("toc.html"), site_path.joinpath("_toc.yaml"))

    # Remove the manifest.json file.
    site_path.joinpath("manifest.json").unlink()

    # Add the prettyprint class to code snippets
    prepare.add_prettyprint(site_path)

    log.success(f"Done building HTML for {blob_name}. Starting upload...")

    # Reuse the same docs.metadata file. The original docfx- prefix is an
    # command line option when uploading, not part of docs.metadata.
    shutil.copy(metadata_path, site_path)

    return tmp_path, metadata, site_path
コード例 #4
0
def upload_yaml(cwd, credentials, test_bucket):
    # Upload DocFX YAML to test with.
    shell.run(
        [
            "docuploader",
            "upload",
            ".",
            f"--credentials={credentials}",
            f"--staging-bucket={test_bucket}",
            "--destination-prefix=docfx",
        ],
        cwd=cwd,
        hide_output=False,
    )
コード例 #5
0
ファイル: test_goldens.py プロジェクト: jskeet/doc-templates
def test_goldens(update_goldens, test_dir):
    build_dir = Path("testdata") / test_dir
    golden_dir = Path("testdata/goldens") / test_dir
    out_dir = build_dir / "site/api"
    # Generate!
    try:
        shell.run(
            [
                "docfx",
                "build",
                "-t",
                "default,../../third_party/docfx/templates/devsite",
            ],
            cwd=build_dir,
            hide_output=False,
        )
    except Exception as e:
        pytest.fail(f"build raised an exception: {e}")

    if update_goldens:
        shutil.rmtree(golden_dir, ignore_errors=True)
        shutil.copytree(out_dir, golden_dir, dirs_exist_ok=True)
        pytest.skip(
            "Updated goldens! Re-run the test without the --update-goldens flag."
        )

    got_files = [os.path.relpath(f, out_dir) for f in out_dir.rglob("*")]
    golden_files = [
        os.path.relpath(f, golden_dir) for f in golden_dir.rglob("*")
    ]

    assert len(got_files) == len(
        golden_files), f"got {len(got_files)} files, want {len(golden_files)}"

    (eq, neq, other) = filecmp.cmpfiles(out_dir,
                                        golden_dir,
                                        got_files,
                                        shallow=False)
    neq = [(out_dir / f).as_posix() for f in neq]
    other = [(out_dir / f).as_posix() for f in other]

    if other:
        pytest.fail(f"found unknown files (should never happen): {other}")
    if neq:
        pytest.fail(f"got files that don't match goldens: {neq}")
コード例 #6
0
ファイル: test_generate.py プロジェクト: jskeet/doc-templates
def test_generate():
    build_dir = Path("testdata/python-small")
    out_dir = build_dir / "site/api"
    # Generate!
    try:
        shell.run(
            [
                "docfx",
                "build",
                "-t",
                "default,../../third_party/docfx/templates/devsite",
            ],
            cwd=build_dir,
            hide_output=False,
        )
    except Exception as e:
        pytest.fail(f"build raised an exception: {e}")

    # Note: rename of toc.html to _toc.yaml happens in doc-pipeline.
    toc_file_path = out_dir / "toc.html"
    assert toc_file_path.is_file()
    got_text = toc_file_path.read_text("utf-8")
    assert "/python/docs/reference/texttospeech/latest" in got_text
コード例 #7
0
def process_blob(blob, credentials, devsite_template):
    is_bucket = True
    tmp_path, metadata, site_path = build_and_format(blob, is_bucket, devsite_template)

    # Use the input blob name as the name of the xref file to avoid collisions.
    # The input blob has a "docfx-" prefix; make sure to remove it.
    xrefmap = site_path.joinpath("xrefmap.yml")
    xrefmap_lines = xrefmap.read_text().splitlines()
    # The baseUrl must start with a scheme and domain. With no scheme, docfx
    # assumes it's a file:// link.
    base_url = (
        f"baseUrl: https://cloud.google.com/{metadata.language}/docs/reference/"
        + f"{metadata.name}/latest/"
    )
    # Insert base_url after the YamlMime first line.
    xrefmap_lines.insert(1, base_url)
    xrefmap.write_text("\n".join(xrefmap_lines))

    xref_blob_name_base = blob.name[len("docfx-") :]
    xref_blob = blob.bucket.blob(f"{XREFS_DIR_NAME}/{xref_blob_name_base}.yml")
    xref_blob.upload_from_filename(filename=xrefmap)

    shell.run(
        [
            "docuploader",
            "upload",
            ".",
            f"--credentials={credentials}",
            f"--staging-bucket={blob.bucket.name}",
        ],
        cwd=site_path,
        hide_output=False,
    )

    shutil.rmtree(tmp_path)

    log.success(f"Done with {blob.name}!")
コード例 #8
0
def test_generate(test_dir, tmpdir):
    build_dir = tmpdir.join("python-small")
    out_dir = build_dir.join("site/api")
    # Generate!
    try:
        shell.run(
            [
                "docfx",
                "build",
                "-t",
                # Template path depends on the test_dir fixture setup.
                "default,../devsite",
            ],
            cwd=build_dir,
            hide_output=False,
        )
    except Exception as e:
        pytest.fail(f"build raised an exception: {e}")

    # Note: rename of toc.html to _toc.yaml happens in doc-pipeline.
    toc_file_path = out_dir.join("toc.html")
    assert toc_file_path.isfile()
    got_text = toc_file_path.read_text("utf-8")
    assert "/python/docs/reference/texttospeech/latest" in got_text
コード例 #9
0
def decompress(archive: str, destination: str) -> subprocess.CompletedProcess:
    """Decompress the given tarfile to the destination."""
    # Note: we don't use the stdlib's "tarfile" module for performance reasons.
    # While it can handle creating tarfiles, its not as efficient on large
    # numbers of files like the tar command.
    return shell.run(
        [
            "tar",
            "--extract",
            f"--directory={destination}",
            f"--file={archive}",
            "--gzip",
            "--verbose",
        ],
        hide_output=True,
    )
コード例 #10
0
def compress(directory: str, destination: str) -> subprocess.CompletedProcess:
    """Compress the given directory into the tarfile at destination."""
    # Note: we don't use the stdlib's "tarfile" module for performance reasons.
    # While it can handle creating tarfiles, its not as efficient on large
    # numbers of files like the tar command.
    return shell.run(
        [
            "tar",
            "--create",
            f"--directory={directory}",
            f"--file={destination}",
            # Treat a colon in the filename as part of the filename,
            # not an indication of a remote file. This is required in order to
            # handle canonical filenames on Windows.
            "--force-local",
            "--gzip",
            "--verbose",
            ".",
        ],
        hide_output=False,
    )
コード例 #11
0
ファイル: test_generate.py プロジェクト: jskeet/doc-pipeline
def test_generate(yaml_dir, tmpdir):
    test_bucket = os.environ.get("TEST_BUCKET")
    if not test_bucket:
        pytest.skip("Must set TEST_BUCKET")

    credentials = os.environ.get("GOOGLE_APPLICATION_CREDENTIALS")
    if not credentials:
        pytest.skip("Must set GOOGLE_APPLICATION_CREDENTIALS")

    parsed_credentials = service_account.Credentials.from_service_account_file(
        credentials
    )
    storage_client = storage.Client(
        project=parsed_credentials.project_id, credentials=parsed_credentials
    )

    # Clean up any previous test data.
    yaml_blob_name = "docfx-python-doc-pipeline-test-2.1.1.tar.gz"
    html_blob_name = "python-doc-pipeline-test-2.1.1.tar.gz"
    bucket = storage_client.get_bucket(test_bucket)
    yaml_blob = bucket.blob(yaml_blob_name)
    html_blob = bucket.blob(html_blob_name)
    if yaml_blob.exists():
        yaml_blob.delete()
    if html_blob.exists():
        html_blob.delete()

    start_blobs = list(storage_client.list_blobs(test_bucket))

    # Upload DocFX YAML to test with.
    shell.run(
        [
            "docuploader",
            "upload",
            ".",
            f"--credentials={credentials}",
            f"--staging-bucket={test_bucket}",
            "--destination-prefix=docfx",
        ],
        cwd=yaml_dir,
        hide_output=False,
    )

    # Make sure docuploader succeeded.
    assert len(list(storage_client.list_blobs(test_bucket))) == len(start_blobs) + 1

    # Generate!
    try:
        generate.build_new_docs(test_bucket, credentials)
    except Exception as e:
        pytest.fail(f"build_new_docs raised an exception: {e}")

    # Verify the results.
    blobs = list(storage_client.list_blobs(test_bucket))
    assert len(blobs) == len(start_blobs) + 2

    assert html_blob.exists()

    tar_path = tmpdir.join("out.tgz")
    html_blob.download_to_filename(tar_path)
    tar.decompress(tar_path, tmpdir)
    assert tmpdir.join("docs.metadata").isfile()

    # Check _rootPath and docs.metadata parsing worked.
    toc_file_path = tmpdir.join("_toc.yaml")
    assert toc_file_path.isfile()
    got_text = toc_file_path.read_text("utf-8")
    # See testdata/docs.metadata.
    assert "/python/docs/reference/doc-pipeline-test/latest" in got_text

    # Check the template worked.
    html_file_path = tmpdir.join("google.api.customhttppattern.html")
    assert html_file_path.isfile()
    got_text = html_file_path.read_text("utf-8")
    assert "devsite" in got_text
    assert "/python/_book.yaml" in got_text

    # Force regeneration and verify the timestamp is different.
    html_blob = bucket.get_blob(html_blob_name)
    t1 = html_blob.updated
    generate.build_all_docs(test_bucket, credentials)
    html_blob = bucket.get_blob(html_blob_name)
    t2 = html_blob.updated
    assert t1 != t2

    # Force regeneration of a single doc and verify timestamp.
    generate.build_one_doc(test_bucket, yaml_blob_name, credentials)
    html_blob = bucket.get_blob(html_blob_name)
    t3 = html_blob.updated
    assert t2 != t3
コード例 #12
0
ファイル: generate.py プロジェクト: jskeet/doc-pipeline
def process_blob(blob, credentials, devsite_template):
    log.info(f"Processing {blob.name}...")

    tmp_path = pathlib.Path("tmp")
    api_path = tmp_path.joinpath("obj/api")
    output_path = tmp_path.joinpath("site/api")

    api_path.mkdir(parents=True, exist_ok=True)
    tar_filename = tmp_path.joinpath(blob.name)
    tar_filename.parent.mkdir(parents=True, exist_ok=True)

    blob.download_to_filename(tar_filename)
    log.info(
        f"Downloaded gs://{blob.bucket.name}/{blob.name} to {tar_filename}")

    tar.decompress(tar_filename, api_path)
    log.info(f"Decompressed {blob.name} in {api_path}")

    metadata_path = api_path.joinpath("docs.metadata")
    metadata = metadata_pb2.Metadata()
    text_format.Merge(metadata_path.read_text(), metadata)
    pkg = metadata.name

    with open(tmp_path.joinpath("docfx.json"), "w") as f:
        f.write(
            DOCFX_JSON_TEMPLATE.format(
                **{
                    "package": pkg,
                    "path":
                    f"/{metadata.language}/docs/reference/{pkg}/latest",
                    "project_path": f"/{metadata.language}/",
                }))
    log.info("Wrote docfx.json")

    # TODO: remove this once _toc.yaml is no longer created.
    if pathlib.Path(api_path.joinpath("_toc.yaml")).is_file():
        shutil.move(api_path.joinpath("_toc.yaml"),
                    api_path.joinpath("toc.yml"))

    log.info(f"Running `docfx build` for {blob.name}...")
    shell.run(
        ["docfx", "build", "-t", f"default,{devsite_template.absolute()}"],
        cwd=tmp_path,
        hide_output=False,
    )

    # Rename the output TOC file to be _toc.yaml to match the expected
    # format.
    shutil.move(output_path.joinpath("toc.html"),
                output_path.joinpath("_toc.yaml"))

    log.success(f"Done building HTML for {blob.name}. Starting upload...")

    # Reuse the same docs.metadata file. The original docfx- prefix is an
    # command line option when uploading, not part of docs.metadata.
    shutil.copyfile(api_path.joinpath("docs.metadata"),
                    output_path.joinpath("docs.metadata"))

    shell.run(
        [
            "docuploader",
            "upload",
            ".",
            f"--credentials={credentials}",
            f"--staging-bucket={blob.bucket.name}",
        ],
        cwd=output_path,
        hide_output=False,
    )
    shutil.rmtree(tmp_path)

    log.success(f"Done with {blob.name}!")
コード例 #13
0
def test_goldens(update_goldens, test_dir):
    build_dir = Path("testdata") / test_dir
    golden_dir = Path("testdata/goldens") / test_dir
    out_dir = build_dir / "site/api"
    # Generate!
    try:
        shell.run(
            [
                "docfx",
                "build",
                "-t",
                "../../third_party/docfx/templates/devsite",
            ],
            cwd=build_dir,
            hide_output=False,
        )
    except Exception as e:
        pytest.fail(f"build raised an exception: {e}")

    if update_goldens:
        shutil.rmtree(golden_dir, ignore_errors=True)
        shutil.copytree(out_dir, golden_dir, dirs_exist_ok=True)
        pytest.skip(
            "Updated goldens! Re-run the test without the --update-goldens flag."
        )

    got_files = [os.path.relpath(f, out_dir) for f in out_dir.rglob("*")]
    golden_files = [
        os.path.relpath(f, golden_dir) for f in golden_dir.rglob("*")
    ]

    nl = "\n"
    extra = "Extra:\n" + "\n+ ".join(
        [f for f in got_files if f not in golden_files])
    missing = "Missing:\n" + "\n- ".join(
        [f for f in golden_files if f not in got_files])

    assert len(got_files) == len(
        golden_files
    ), f"got {len(got_files)} files, want {len(golden_files)}:{nl}{extra}{nl}{missing}"

    (eq, neq, other) = filecmp.cmpfiles(out_dir,
                                        golden_dir,
                                        got_files,
                                        shallow=False)
    other = [(out_dir / f).as_posix() for f in other]

    if other:
        pytest.fail(f"found unknown files (should never happen): {other}")
    if neq:
        diff = ""
        for f in neq:
            with open(out_dir / f) as out:
                with open(golden_dir / f) as gold:
                    out_lines = out.readlines()
                    gold_lines = gold.readlines()
                    diff = "\n" + "\n".join(
                        difflib.context_diff(
                            gold_lines,
                            out_lines,
                            fromfile=str(golden_dir / f),
                            tofile=str(out_dir / f),
                        ))

        pytest.fail(f"got files that don't match goldens: {diff}")