Exemplo n.º 1
0
def write(outfile: str = "synth.metadata") -> None:
    """Writes out the metadata to a file."""
    jsonified = google.protobuf.json_format.MessageToJson(_metadata)

    with open(outfile, "w") as fh:
        fh.write(jsonified)

    log.debug(f"Wrote metadata to {outfile}.")
Exemplo n.º 2
0
    def _clone_googleapis(self):
        if self._googleapis is not None:
            return self._googleapis

        if LOCAL_GOOGLEAPIS:
            self._googleapis = Path(LOCAL_GOOGLEAPIS).expanduser()
            log.debug(f"Using local googleapis at {self._googleapis}")

        else:
            log.debug("Cloning googleapis.")
            self._googleapis = git.clone(GOOGLEAPIS_URL)

        return self._googleapis
Exemplo n.º 3
0
    def _ensure_dependencies_installed(self):
        log.debug("Ensuring dependencies.")

        dependencies = ["docker", "git"]
        failed_dependencies = []
        for dependency in dependencies:
            return_code = shell.run(["which", dependency],
                                    check=False).returncode
            if return_code:
                failed_dependencies.append(dependency)

        if failed_dependencies:
            raise EnvironmentError(
                f"Dependencies missing: {', '.join(failed_dependencies)}")
Exemplo n.º 4
0
    def _clone_googleapis_private(self):
        if self._googleapis_private is not None:
            return self._googleapis_private

        if LOCAL_GOOGLEAPIS:
            self._googleapis_private = Path(LOCAL_GOOGLEAPIS).expanduser()
            log.debug(
                f"Using local googleapis at {self._googleapis_private} for googleapis-private"
            )

        else:
            log.debug("Cloning googleapis-private.")
            self._googleapis_private = git.clone(GOOGLEAPIS_PRIVATE_URL)

        return self._googleapis_private
Exemplo n.º 5
0
def _get_sample_readme_metadata(sample_dir: Path) -> dict:
    sample_readme = sample_dir / "README.rst.in"

    sample_metadata = {}
    if sample_readme.exists():
        requirements = str(Path(sample_dir / "requirements.txt").resolve())
        log.debug(
            f"Installing requirements at {requirements} to generate {sample_readme}"
        )
        shell.run([sys.executable, "-m", "pip", "install", "-r", requirements])

        with open(sample_readme) as f:
            sample_metadata = yaml.load(f, Loader=yaml.SafeLoader)
        for sample in sample_metadata["samples"]:
            # add absolute path to metadata so `python foo.py --help` succeeds
            sample["abs_path"] = Path(sample_dir / (sample["file"])).resolve()

    return sample_metadata
Exemplo n.º 6
0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""

from synthtool import _tracked_paths
import synthtool as s
import synthtool.log as log
import synthtool.shell as shell
import synthtool.sources.git as git
import logging

logging.basicConfig(level=logging.DEBUG)

repository_url = "https://github.com/GoogleCloudPlatform/elixir-google-api.git"

log.debug(f"Cloning {repository_url}.")
repository = git.clone(repository_url, depth=1)

log.debug("Installing dependencies.")
shell.run("mix deps.get".split(), cwd=repository)
shell.run("npm install".split(), cwd=repository)

log.debug("Generating all libraries.")
shell.run("mix google_apis.generate".split(), cwd=repository)

# copy all clients
s.copy(repository / "clients")
Exemplo n.º 7
0
    def _generate_code(
        self,
        service,
        version,
        language,
        config_path=None,
        artman_output_name=None,
        private=False,
        include_protos=False,
        generator_args=None,
    ):
        # map the language to the artman argument and subdir of genfiles
        GENERATE_FLAG_LANGUAGE = {
            "python": ("python_gapic", "python"),
            "nodejs": ("nodejs_gapic", "js"),
            "ruby": ("ruby_gapic", "ruby"),
            "php": ("php_gapic", "php"),
            "java": ("java_gapic", "java"),
        }

        if language not in GENERATE_FLAG_LANGUAGE:
            raise ValueError("provided language unsupported")

        gapic_language_arg, gen_language = GENERATE_FLAG_LANGUAGE[language]

        # Determine which googleapis repo to use
        if not private:
            googleapis = self._clone_googleapis()
        else:
            googleapis = self._clone_googleapis_private()

        if googleapis is None:
            raise RuntimeError(
                f"Unable to generate {config_path}, the googleapis repository"
                "is unavailable.")

        generator_dir = LOCAL_GENERATOR
        if generator_dir is not None:
            log.debug(f"Using local generator at {generator_dir}")

        # Run the code generator.
        # $ artman --config path/to/artman_api.yaml generate python_gapic
        if config_path is None:
            config_path = (Path("google/cloud") / service /
                           f"artman_{service}_{version}.yaml")
        elif Path(config_path).is_absolute():
            config_path = Path(config_path).relative_to("/")
        else:
            config_path = Path("google/cloud") / service / Path(config_path)

        if not (googleapis / config_path).exists():
            raise FileNotFoundError(
                f"Unable to find configuration yaml file: {(googleapis / config_path)}."
            )

        log.debug(f"Running generator for {config_path}.")

        output_root = self._artman.run(
            f"googleapis/artman:{artman.ARTMAN_VERSION}",
            googleapis,
            config_path,
            gapic_language_arg,
            generator_dir=generator_dir,
            generator_args=generator_args,
        )

        # Expect the output to be in the artman-genfiles directory.
        # example: /artman-genfiles/python/speech-v1
        if artman_output_name is None:
            artman_output_name = f"{service}-{version}"
        genfiles = output_root / gen_language / artman_output_name

        if not genfiles.exists():
            raise FileNotFoundError(
                f"Unable to find generated output of artman: {genfiles}.")

        log.success(f"Generated code into {genfiles}.")

        # Get the *.protos files and put them in a protos dir in the output
        if include_protos:
            import shutil

            source_dir = googleapis / config_path.parent / version
            proto_files = source_dir.glob("**/*.proto")
            # By default, put the protos at the root in a folder named 'protos'.
            # Specific languages can be cased here to put them in a more language
            # appropriate place.
            proto_output_path = genfiles / "protos"
            if language == "python":
                # place protos alongsize the *_pb2.py files
                proto_output_path = genfiles / f"google/cloud/{service}_{version}/proto"
            os.makedirs(proto_output_path, exist_ok=True)

            for i in proto_files:
                log.debug(f"Copy: {i} to {proto_output_path / i.name}")
                shutil.copyfile(i, proto_output_path / i.name)
            log.success(f"Placed proto files into {proto_output_path}.")

        metadata.add_client_destination(
            source="googleapis" if not private else "googleapis-private",
            api_name=service,
            api_version=version,
            language=language,
            generator="gapic",
            config=str(config_path),
        )

        _tracked_paths.add(genfiles)
        return genfiles
Exemplo n.º 8
0
# Copy the repo into a temporary directory, removing the build and deps, and
# perform generation there. This is because the docker command may be a
# cross-compile whose build environment should be isolated from the current
# git clone.
with tempfile.TemporaryDirectory() as tmpdir:
    repository = pathlib.Path(tmpdir) / "repo"
    shutil.copytree(os.getcwd(), repository)
    shutil.rmtree(repository / "_build", ignore_errors=True)
    shutil.rmtree(repository / "deps", ignore_errors=True)

    image = "gcr.io/cloud-devrel-public-resources/elixir19"
    generate_command = "scripts/generate_client.sh"
    command = [
        "docker", "run", "--rm", f"-v{repository}:/workspace",
        "-v/var/run/docker.sock:/var/run/docker.sock", "-e",
        f"USER_GROUP={os.getuid()}:{os.getgid()}", "-w", "/workspace", image,
        generate_command
    ]

    if extra_args():
        command.extend(extra_args())

    log.debug(f"Running: {' '.join(command)}")

    shell.run(command, cwd=repository, hide_output=False)

    # Copy the resulting clients directory back into the git clone.
    shutil.rmtree("clients", ignore_errors=True)
    shutil.move(repository / "clients", "clients")
Exemplo n.º 9
0
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""

from synthtool import _tracked_paths
import synthtool as s
import synthtool.log as log
import synthtool.shell as shell
import synthtool.sources.git as git
import logging
import sys

logging.basicConfig(level=logging.DEBUG)

repository_url = "https://github.com/googleapis/elixir-google-api.git"

log.debug(f"Cloning {repository_url}.")
repository = git.clone(repository_url, depth=1)

image = "gcr.io/cloud-devrel-public-resources/elixir16"
generate_command = "scripts/generate_client.sh"
command = f"docker run --rm -v{repository}:/workspace -v/var/run/docker.sock:/var/run/docker.sock -w /workspace {image} {generate_command}"

if len(sys.argv) == 2:
    command = command + " " + sys.argv[1]

log.debug(f"Running in docker: {command}")

shell.run(command.split(), cwd=repository)

# copy all clients
s.copy(repository / "clients")
Exemplo n.º 10
0
    def _generate_code(
        self,
        service: str,
        version: str,
        language: str,
        *,
        private: bool = False,
        proto_path: Union[str, Path] = None,
        extra_proto_files: List[str] = [],
        output_dir: Union[str, Path] = None,
        generator_version: str = "latest",
        generator_args: Mapping[str, str] = None,
    ):
        # Determine which googleapis repo to use
        if not private:
            googleapis = self._clone_googleapis()
        else:
            googleapis = self._clone_googleapis_private()

        # Sanity check: We should have a googleapis repo; if we do not,
        # something went wrong, and we should abort.
        if googleapis is None:
            raise RuntimeError(
                f"Unable to generate {service}, the googleapis repository"
                "is unavailable.")

        # Pull the code generator for the requested language.
        # If a code generator version was specified, honor that.
        log.debug(
            f"Pulling Docker image: gapic-generator-{language}:{generator_version}"
        )
        shell.run(
            [
                "docker",
                "pull",
                f"gcr.io/gapic-images/gapic-generator-{language}:{generator_version}",
            ],
            hide_output=False,
        )

        # Determine where the protos we are generating actually live.
        # We can sometimes (but not always) determine this from the service
        # and version; in other cases, the user must provide it outright.
        if proto_path:
            proto_path = Path(proto_path)
            if proto_path.is_absolute():
                proto_path = proto_path.relative_to("/")
        else:
            proto_path = Path("google/cloud") / service / version

        # Sanity check: Do we have protos where we think we should?
        if not (googleapis / proto_path).exists():
            raise FileNotFoundError(
                f"Unable to find directory for protos: {(googleapis / proto_path)}."
            )
        if not tuple((googleapis / proto_path).glob("*.proto")):
            raise FileNotFoundError(
                f"Directory {(googleapis / proto_path)} exists, but no protos found."
            )

        # Ensure the desired output directory exists.
        # If none was provided, create a temporary directory.
        if not output_dir:
            output_dir = tempfile.mkdtemp()
        output_dir = Path(output_dir).resolve()

        # The time has come, the walrus said, to talk of actually running
        # the code generator.
        sep = os.path.sep

        # try to figure out user ID and stay compatible.
        # If there is no `os.getuid()`, fallback to `getpass.getuser()`
        getuid = getattr(os, "getuid", None)
        if getuid:
            user = str(getuid())
        else:
            user = getpass.getuser()

        docker_run_args = [
            "docker",
            "run",
            "--mount",
            f"type=bind,source={googleapis / proto_path}{sep},destination={Path('/in') / proto_path}{sep},readonly",
            "--mount",
            f"type=bind,source={output_dir}{sep},destination={Path('/out')}{sep}",
            "--rm",
            "--user",
            user,
        ]

        # Process extra proto files, e.g. google/cloud/common_resources.proto,
        # if they are required by this API.
        # First, bind mount all the extra proto files into the container.
        for proto in extra_proto_files:
            source_proto = googleapis / Path(proto)
            if not source_proto.exists():
                raise FileNotFoundError(
                    f"Unable to find extra proto file: {source_proto}.")
            docker_run_args.extend([
                "--mount",
                f"type=bind,source={source_proto},destination={Path('/in') / proto},readonly",
            ])

        docker_run_args.append(
            f"gcr.io/gapic-images/gapic-generator-{language}:{generator_version}"
        )

        # Populate any additional CLI arguments provided for Docker.
        if generator_args:
            for key, value in generator_args.items():
                docker_run_args.append(f"--{key}")
                docker_run_args.append(value)

        log.debug(f"Generating code for: {proto_path}.")
        shell.run(docker_run_args)

        # Sanity check: Does the output location have code in it?
        # If not, complain.
        if not tuple(output_dir.iterdir()):
            raise RuntimeError(
                f"Code generation seemed to succeed, but {output_dir} is empty."
            )

        # Huzzah, it worked.
        log.success(f"Generated code into {output_dir}.")

        # Record this in the synthtool metadata.
        metadata.add_client_destination(
            source="googleapis" if not private else "googleapis-private",
            api_name=service,
            api_version=version,
            language=language,
            generator=f"gapic-generator-{language}",
        )

        _tracked_paths.add(output_dir)
        return output_dir
Exemplo n.º 11
0
 def _install_artman(self):
     log.debug("Pulling artman image.")
     shell.run(["docker", "pull", f"googleapis/artman:{ARTMAN_VERSION}"],
               hide_output=False)
Exemplo n.º 12
0
def cleanup():
    for path in _tempdirs:
        shutil.rmtree(str(path))
    log.debug(f"Cleaned up {len(_tempdirs)} temporary directories.")
Exemplo n.º 13
0
    def _include_samples(
        self,
        language: str,
        version: str,
        genfiles: Path,
        googleapis_service_dir: Path,
        samples_root_dir: Path = None,
        samples_resources_dir: Path = None,
    ):
        """Include code samples and supporting resources in generated output.

        Resulting directory structure in generated output:
            samples/
            ├── resources
            │   ├── example_text_file.txt
            │   └── example_data.csv
            └── v1/
                ├── sample_one.py
                ├── sample_two.py
                └── test/
                    ├── samples.manifest.yaml
                    ├── sample_one.test.yaml
                    └── sample_two.test.yaml

        Samples are included in the genfiles output of the generator.

        Sample tests are defined in googleapis:
            {service}/{version}/samples/test/*.test.yaml

        Sample resources are declared in {service}/sample_resources.yaml
        which includes a list of files with public gs:// URIs for download.

        Sample resources are files needed to run code samples or system tests.
        Synth keeps resources in sync by always pulling down the latest version.
        It is recommended to store resources in the `cloud-samples-data` bucket.

        Sample manifest is a generated file which defines invocation commands
        for each code sample (used by sample-tester to invoke samples).
        """

        if samples_root_dir is None:
            samples_root_dir = genfiles / "samples"

        if samples_resources_dir is None:
            samples_resources_dir = samples_root_dir / "resources"

        samples_version_dir = samples_root_dir / version

        # Some languages capitalize their `V` prefix for version numbers
        if not samples_version_dir.is_dir():
            samples_version_dir = samples_root_dir / version.capitalize()

        # Do not proceed if genfiles does not include samples/{version} dir.
        if not samples_version_dir.is_dir():
            return None

        samples_test_dir = samples_version_dir / "test"
        samples_manifest_yaml = samples_test_dir / "samples.manifest.yaml"

        googleapis_samples_dir = googleapis_service_dir / version / "samples"
        googleapis_resources_yaml = googleapis_service_dir / "sample_resources.yaml"

        # Copy sample tests from googleapis {service}/{version}/samples/*.test.yaml
        # into generated output as samples/{version}/test/*.test.yaml
        test_files = googleapis_samples_dir.glob("**/*.test.yaml")
        os.makedirs(samples_test_dir, exist_ok=True)
        for i in test_files:
            log.debug(f"Copy: {i} to {samples_test_dir / i.name}")
            shutil.copyfile(i, samples_test_dir / i.name)

        # Download sample resources from sample_resources.yaml storage URIs.
        #
        #  sample_resources:
        #  - uri: gs://bucket/the/file/path.csv
        #    description: Description of this resource
        #
        # Code follows happy path. An error is desirable if YAML is invalid.
        if googleapis_resources_yaml.is_file():
            with open(googleapis_resources_yaml, "r") as f:
                resources_data = yaml.load(f, Loader=yaml.SafeLoader)
            resource_list = resources_data.get("sample_resources")
            for resource in resource_list:
                uri = resource.get("uri")
                if uri.startswith("gs://"):
                    uri = uri.replace("gs://",
                                      "https://storage.googleapis.com/")
                response = requests.get(uri, allow_redirects=True)
                download_path = samples_resources_dir / os.path.basename(uri)
                os.makedirs(samples_resources_dir, exist_ok=True)
                log.debug(f"Download {uri} to {download_path}")
                with open(download_path, "wb") as output:  # type: ignore
                    output.write(response.content)

        # Generate manifest file at samples/{version}/test/samples.manifest.yaml
        # Includes a reference to every sample (via its "region tag" identifier)
        # along with structured instructions on how to invoke that code sample.
        relative_manifest_path = str(
            samples_manifest_yaml.relative_to(samples_root_dir))

        LANGUAGE_EXECUTABLES = {
            "nodejs": "node",
            "php": "php",
            "python": "python3",
            "ruby": "bundle exec ruby",
        }
        if language not in LANGUAGE_EXECUTABLES:
            log.info("skipping manifest gen")
            return None

        manifest_arguments = [
            "gen-manifest",
            f"--env={language}",
            f"--bin={LANGUAGE_EXECUTABLES[language]}",
            f"--output={relative_manifest_path}",
            "--chdir={@manifest_dir}/../..",
        ]

        for code_sample in samples_version_dir.glob("*"):
            sample_path = str(code_sample.relative_to(samples_root_dir))
            if os.path.isfile(code_sample):
                manifest_arguments.append(sample_path)
        try:
            log.debug(f"Writing samples manifest {manifest_arguments}")
            shell.run(manifest_arguments, cwd=samples_root_dir)
        except (subprocess.CalledProcessError, FileNotFoundError):
            log.warning(
                "gen-manifest failed (sample-tester may not be installed)")
Exemplo n.º 14
0
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""

import synthtool.log as log

log.debug("Synthtool disabled for elixir")
Exemplo n.º 15
0
    def _generate_code(
        self,
        service,
        version,
        language,
        config_path=None,
        artman_output_name=None,
        private=False,
    ):
        # map the language to the artman argument and subdir of genfiles
        GENERATE_FLAG_LANGUAGE = {
            "python": ("python_gapic", "python"),
            "nodejs": ("nodejs_gapic", "js"),
            "ruby": ("ruby_gapic", "ruby"),
            "php": ("php_gapic", "php"),
            "java": ("java_gapic", "java"),
        }

        if language not in GENERATE_FLAG_LANGUAGE:
            raise ValueError("provided language unsupported")

        gapic_language_arg, gen_language = GENERATE_FLAG_LANGUAGE[language]

        # Determine which googleapis repo to use
        if not private:
            googleapis = self._clone_googleapis()
        else:
            googleapis = self._clone_googleapis_private()

        if googleapis is None:
            raise RuntimeError(
                f"Unable to generate {config_path}, the googleapis repository"
                "is unavailable.")

        # Run the code generator.
        # $ artman --config path/to/artman_api.yaml generate python_gapic
        if config_path is None:
            config_path = (Path("google/cloud") / service /
                           f"artman_{service}_{version}.yaml")
        elif Path(config_path).is_absolute():
            config_path = Path(config_path).relative_to("/")
        else:
            config_path = Path("google/cloud") / service / Path(config_path)

        if not (googleapis / config_path).exists():
            raise FileNotFoundError(
                f"Unable to find configuration yaml file: {(googleapis / config_path)}."
            )

        log.debug(f"Running generator for {config_path}.")

        output_root = self._artman.run(
            f"googleapis/artman:{artman.ARTMAN_VERSION}",
            googleapis,
            config_path,
            gapic_language_arg,
        )

        # Expect the output to be in the artman-genfiles directory.
        # example: /artman-genfiles/python/speech-v1
        if artman_output_name is None:
            artman_output_name = f"{service}-{version}"
        genfiles = output_root / gen_language / artman_output_name

        if not genfiles.exists():
            raise FileNotFoundError(
                f"Unable to find generated output of artman: {genfiles}.")

        log.success(f"Generated code into {genfiles}.")

        _tracked_paths.add(genfiles)
        return genfiles
Exemplo n.º 16
0
    def _generate_code(
        self,
        service: str,
        version: str,
        language: str,
        *,
        private: bool = False,
        proto_path: Union[str, Path] = None,
        output_dir: Union[str, Path] = None,
        generator_version: str = "latest",
        generator_args: Mapping[str, str] = None,
    ):
        # Determine which googleapis repo to use
        if not private:
            googleapis = self._clone_googleapis()
        else:
            googleapis = self._clone_googleapis_private()

        # Sanity check: We should have a googleapis repo; if we do not,
        # something went wrong, and we should abort.
        if googleapis is None:
            raise RuntimeError(
                f"Unable to generate {service}, the googleapis repository"
                "is unavailable.")

        # Pull the code generator for the requested language.
        # If a code generator version was specified, honor that.
        log.debug(
            "Pulling Docker image: gapic-generator-{language}:{generator_version}"
        )
        shell.run(
            [
                "docker",
                "pull",
                f"gcr.io/gapic-images/gapic-generator-{language}:{generator_version}",
            ],
            hide_output=False,
        )

        # Determine where the protos we are generating actually live.
        # We can sometimes (but not always) determine this from the service
        # and version; in other cases, the user must provide it outright.
        if proto_path:
            proto_path = Path(proto_path)
            if proto_path.is_absolute():
                proto_path = proto_path.relative_to("/")
        else:
            proto_path = Path("google/cloud") / service / version

        # Sanity check: Do we have protos where we think we should?
        if not (googleapis / proto_path).exists():
            raise FileNotFoundError(
                f"Unable to find directory for protos: {(googleapis / proto_path)}."
            )
        if not tuple((googleapis / proto_path).glob("*.proto")):
            raise FileNotFoundError(
                f"Directory {(googleapis / proto_path)} exists, but no protos found."
            )

        # Ensure the desired output directory exists.
        # If none was provided, create a temporary directory.
        if not output_dir:
            output_dir = tempfile.mkdtemp()
        output_dir = Path(output_dir).resolve()

        # The time has come, the walrus said, to talk of actually running
        # the code generator.
        log.debug(f"Generating code for: {proto_path}.")
        sep = os.path.sep
        shell.run([
            "docker",
            "run",
            "--mount",
            f"type=bind,source={googleapis / proto_path}{sep},destination={Path('/in') / proto_path}{sep},readonly",
            "--mount",
            f"type=bind,source={output_dir}{sep},destination={Path('/out')}{sep}",
            "--rm",
            "--user",
            str(os.getuid()),
            f"gcr.io/gapic-images/gapic-generator-{language}",
        ])

        # Sanity check: Does the output location have code in it?
        # If not, complain.
        if not tuple(output_dir.iterdir()):
            raise RuntimeError(
                f"Code generation seemed to succeed, but {output_dir} is empty."
            )

        # Huzzah, it worked.
        log.success(f"Generated code into {output_dir}.")

        # Record this in the synthtool metadata.
        metadata.add_client_destination(
            source="googleapis" if not private else "googleapis-private",
            api_name=service,
            api_version=version,
            language=language,
            generator=f"gapic-generator-{language}",
        )

        _tracked_paths.add(output_dir)
        return output_dir
Exemplo n.º 17
0
    def _generate_code(
        self,
        service: str,
        version: str,
        language: str,
        *,
        private: bool = False,
        proto_path: Union[str, Path] = None,
        output_dir: Union[str, Path] = None,
        bazel_target: str = None,
    ):
        # Determine which googleapis repo to use
        if not private:
            googleapis = self._clone_googleapis()
        else:
            googleapis = self._clone_googleapis_private()

        # Sanity check: We should have a googleapis repo; if we do not,
        # something went wrong, and we should abort.
        if googleapis is None:
            raise RuntimeError(
                f"Unable to generate {service}, the googleapis repository"
                "is unavailable."
            )

        # Determine where the protos we are generating actually live.
        # We can sometimes (but not always) determine this from the service
        # and version; in other cases, the user must provide it outright.
        if proto_path:
            proto_path = Path(proto_path)
            if proto_path.is_absolute():
                proto_path = proto_path.relative_to("/")
        else:
            proto_path = Path("google/cloud") / service / version

        # Determine bazel target based on per-language patterns
        # Java:    google-cloud-{{assembly_name}}-{{version}}-java
        # Go:      gapi-cloud-{{assembly_name}}-{{version}}-go
        # Python:  {{assembly_name}}-{{version}}-py
        # PHP:     google-cloud-{{assembly_name}}-{{version}}-php
        # Node.js: {{assembly_name}}-{{version}}-nodejs
        # Ruby:    google-cloud-{{assembly_name}}-{{version}}-ruby
        # C#:      google-cloud-{{assembly_name}}-{{version}}-csharp
        if bazel_target is None:
            parts = list(proto_path.parts)
            while len(parts) > 0 and parts[0] != "google":
                parts.pop(0)
            if len(parts) == 0:
                raise RuntimeError(
                    f"Cannot determine bazel_target from proto_path {proto_path}."
                    "Please set bazel_target explicitly."
                )
            if language == "python":
                suffix = f"{service}-{version}-py"
            elif language == "nodejs":
                suffix = f"{service}-{version}-nodejs"
            elif language == "go":
                suffix = f"gapi-{'-'.join(parts[1:])}-go"
            else:
                suffix = f"{'-'.join(parts)}-{language}"
            bazel_target = f"//{os.path.sep.join(parts)}:{suffix}"

        # Sanity check: Do we have protos where we think we should?
        if not (googleapis / proto_path).exists():
            raise FileNotFoundError(
                f"Unable to find directory for protos: {(googleapis / proto_path)}."
            )
        if not tuple((googleapis / proto_path).glob("*.proto")):
            raise FileNotFoundError(
                f"Directory {(googleapis / proto_path)} exists, but no protos found."
            )
        if not (googleapis / proto_path / "BUILD.bazel"):
            raise FileNotFoundError(
                f"File {(googleapis / proto_path / 'BUILD.bazel')} does not exist."
            )

        # Ensure the desired output directory exists.
        # If none was provided, create a temporary directory.
        if not output_dir:
            output_dir = tempfile.mkdtemp()
        output_dir = Path(output_dir).resolve()

        # Let's build some stuff now.
        cwd = os.getcwd()
        os.chdir(str(googleapis))

        bazel_run_args = ["bazel", "build", bazel_target]

        log.debug(f"Generating code for: {proto_path}.")
        shell.run(bazel_run_args)

        # We've got tar file!
        # its location: bazel-bin/google/cloud/language/v1/language-v1-nodejs.tar.gz
        # bazel_target:         //google/cloud/language/v1:language-v1-nodejs
        tar_file = (
            f"bazel-bin{os.path.sep}{bazel_target[2:].replace(':', os.path.sep)}.tar.gz"
        )

        tar_run_args = [
            "tar",
            "-C",
            str(output_dir),
            "--strip-components=1",
            "-xzf",
            tar_file,
        ]
        shell.run(tar_run_args)

        os.chdir(cwd)

        # Sanity check: Does the output location have code in it?
        # If not, complain.
        if not tuple(output_dir.iterdir()):
            raise RuntimeError(
                f"Code generation seemed to succeed, but {output_dir} is empty."
            )

        # Huzzah, it worked.
        log.success(f"Generated code into {output_dir}.")

        # Record this in the synthtool metadata.
        metadata.add_client_destination(
            source="googleapis" if not private else "googleapis-private",
            api_name=service,
            api_version=version,
            language=language,
            generator="bazel",
        )

        _tracked_paths.add(output_dir)
        return output_dir
Exemplo n.º 18
0
"""This script is used to synthesize generated parts of this library."""

from synthtool.__main__ import extra_args
from synthtool import _tracked_paths
import synthtool as s
import synthtool.log as log
import synthtool.shell as shell
import synthtool.sources.git as git
import logging
import sys

logging.basicConfig(level=logging.DEBUG)

repository_url = "https://github.com/googleapis/elixir-google-api.git"

log.debug(f"Cloning {repository_url}.")
repository = git.clone(repository_url, depth=1)
shell.run(["git", "clean", "-fdx"], cwd=repository / "clients")

image = "gcr.io/cloud-devrel-public-resources/elixir16"
generate_command = "scripts/generate_client.sh"
command = [
    "docker", "run", "--rm", f"-v{repository}:/workspace",
    "-v/var/run/docker.sock:/var/run/docker.sock", "-w", "/workspace", image,
    generate_command
]

if extra_args():
    command.extend(extra_args())

log.debug(f"Running: {' '.join(command)}")
Exemplo n.º 19
0
import re
import sys
from packaging import version

logging.basicConfig(level=logging.DEBUG)

VERSION_REGEX = r"([^\.]*)\.(.+)\.json$"

TEMPLATE_VERSIONS = [
    "default",
]
discovery_url = "https://github.com/googleapis/discovery-artifact-manager.git"

repository = Path('.')

log.debug(f"Cloning {discovery_url}.")
discovery = git.clone(discovery_url)

log.debug("Cleaning output directory.")
shell.run("rm -rf .cache".split(), cwd=repository)

log.debug("Installing dependencies.")
shell.run(
    "python2 -m pip install -e generator/ --user".split(),
    cwd=repository
)

def generate_service(disco: str):
    m = re.search(VERSION_REGEX, disco)
    name = m.group(1)
    version = m.group(2)
Exemplo n.º 20
0
 def _clone_discovery_artifact_manager(self):
     log.debug("Cloning discovery-artifact-manager.")
     self.discovery_artifact_manager = git.clone(
         DISCOVERY_ARTIFACT_MANAGER_URL)