Beispiel #1
0
def compile_protos(hide_output=False):
    """
    Compiles protos into .json, .js, and .d.ts files using
    compileProtos script from google-gax.
    """
    logger.debug("Compiling protos...")
    shell.run(["npx", "compileProtos", "src"], hide_output=hide_output)
Beispiel #2
0
def fix(hide_output=False):
    """
    Fixes the formatting in the current Node.js library.
    Before running fix script, run prelint to install extra dependencies
    for samples, but do not fail if it does not succeed.
    """
    logger.debug("Running prelint...")
    shell.run(["npm", "run", "prelint"], check=False, hide_output=hide_output)
    logger.debug("Running fix...")
    shell.run(["npm", "run", "fix"], hide_output=hide_output)
Beispiel #3
0
def compile_protos_hermetic(hide_output=False):
    """
    Compiles protos into .json, .js, and .d.ts files using
    compileProtos script from google-gax.
    """
    logger.debug("Compiling protos...")
    shell.run(
        [f"{_TOOLS_DIRECTORY}/node_modules/.bin/compileProtos", "src"],
        check=True,
        hide_output=hide_output,
    )
Beispiel #4
0
def format_code(path: str, times: int = 2) -> None:
    """
    Runs the google-java-format jar against all .java files found within the
    provided path.
    """
    # Find all .java files in path and run the formatter on them
    files = list(glob.iglob(os.path.join(path, "**/*.java"), recursive=True))

    # Run the formatter as a jar file
    logger.info("Running java formatter on {} files".format(len(files)))
    formatter_binary = sys.argv[2]
    for _ in range(times):
        shell.run([formatter_binary, "--replace"] + files)
Beispiel #5
0
 def add_n_fetch_remote_repo(self):
     if is_cloned is False:
         shell.run(["git", "init"])
     repo_list = ((self.repo1_remote_name,
                   self.repo1), (self.repo2_remote_name, self.repo2),
                  (self.fork1_remote_name,
                   self.fork1), (self.fork2_remote_name, self.fork2))
     for repo_name, url in repo_list:
         if self.is_repo_exist_on_git(url):
             self.add_remote_url(repo_name, url)
         else:
             print("{} url have not repo exist".format(url))
             sys.exit()
Beispiel #6
0
def build_grpc(target):
  """Build a grpc build target and copy all generate source files."""
  output = bazel_build(
    target=target,
    cwd=googleapis,
  )

  src_output = Path(tempfile.mkdtemp())
  for proto_jar in output.glob("*grpc-src.jar"):
    logger.debug(f"unzipping: {os.path.basename(proto_jar)}")
    shell.run(["unzip", "-o", proto_jar, "-d", src_output / "src"])

  java.fix_grpc_headers(src_output, "")
  s.copy(src_output / "src/com", "grpc-google-iam-v1/src/main/java/com")
 def add_fetch_remote_repo(self):
     self.add_remote_url(self.upstream_name, self.upstream)
     self.add_remote_url(self.fork2_name, self.fork2)
     self.add_remote_url(self.upstream2_name, self.upstream2)
     shell.run(["git", "fetch", self.upstream_name])
     shell.run(["git", "fetch", self.fork2_name])
     shell.run(["git", "fetch", self.upstream2_name])
     shell.run(["git", "fetch", "origin"])
Beispiel #8
0
def format_code(path: str, version: str = DEFAULT_FORMAT_VERSION) -> None:
    """
    Runs the google-java-format jar against all .java files found within the
    provided path.
    """
    jar_name = f"google-java-format-{version}.jar"
    jar = cache.get_cache_dir() / jar_name
    if not jar.exists():
        _download_formatter(version, jar)

    # Find all .java files in path and run the formatter on them
    files = list(glob.iglob(os.path.join(path, "**/*.java"), recursive=True))

    # Run the formatter as a jar file
    log.info("Running java formatter on {} files".format(len(files)))
    shell.run(["java", "-jar", str(jar), "--replace"] + files)
Beispiel #9
0
    def _generic_library(self, directory: str, **kwargs) -> Path:
        # load common repo meta information (metadata that's not language specific).
        if "metadata" in kwargs:
            self._load_generic_metadata(kwargs["metadata"])
            # if no samples were found, don't attempt to render a
            # samples/README.md.
            if "samples" not in kwargs[
                    "metadata"] or not kwargs["metadata"]["samples"]:
                self.excludes.append("samples/README.md")

        t = templates.TemplateGroup(self._template_root / directory,
                                    self.excludes)

        if "repository" in kwargs["metadata"] and "repo" in kwargs["metadata"]:
            kwargs["metadata"]["repo"][
                "default_branch"] = _get_default_branch_name(
                    kwargs["metadata"]["repository"])

        # TODO: migrate to python.py once old sample gen is deprecated
        if directory == "python_samples":
            t.env.globals["get_help"] = lambda filename: shell.run(
                ["python", filename, "--help"]).stdout

        result = t.render(**kwargs)
        _tracked_paths.add(result)

        return result
Beispiel #10
0
def fix_hermetic(hide_output=False):
    """
    Fixes the formatting in the current Node.js library. It assumes that gts
    is already installed in a well known location on disk:
    """
    logger.debug("Copy eslint config")
    shell.run(
        ["cp", "-r", f"{_TOOLS_DIRECTORY}/node_modules", "."],
        check=True,
        hide_output=hide_output,
    )
    logger.debug("Running fix...")
    shell.run(
        [f"{_TOOLS_DIRECTORY}/node_modules/.bin/gts", "fix"],
        check=False,
        hide_output=hide_output,
    )
Beispiel #11
0
 def has_diff(self, from_repo, to_repo):
     has_more_changes = False
     output = shell.run([
         "git", "diff", "{}/master".format(from_repo),
         "{}/master".format(to_repo), "--shortstat"
     ])
     if output.stdout:
         has_more_changes = True
     return has_more_changes
Beispiel #12
0
def _get_sample_readme_metadata(sample_dir: Path) -> dict:
    sample_readme = sample_dir / "README.rst.in"

    sample_metadata = {}
    if sample_readme.exists():
        requirements = str(Path(sample_dir / "requirements.txt").resolve())
        log.debug(
            f"Installing requirements at {requirements} to generate {sample_readme}"
        )
        shell.run([sys.executable, "-m", "pip", "install", "-r", requirements])

        with open(sample_readme) as f:
            sample_metadata = yaml.load(f, Loader=yaml.SafeLoader)
        for sample in sample_metadata["samples"]:
            # add absolute path to metadata so `python foo.py --help` succeeds
            sample["abs_path"] = Path(sample_dir / (sample["file"])).resolve()

    return sample_metadata
Beispiel #13
0
    def run(self, image, root_dir, config, *args):
        """Executes artman command in the artman container.
          Args:
              root_dir: The input directory that will be mounted to artman docker
                  container as local googleapis directory.
          Returns:
              The output directory with artman-generated files.
          """
        container_name = "artman-docker"
        output_dir = root_dir / "artman-genfiles"

        docker_cmd = [
            "docker",
            "run",
            "--name",
            container_name,
            "--rm",
            "-i",
            "-e",
            f"HOST_USER_ID={os.getuid()}",
            "-e",
            f"HOST_GROUP_ID={os.getgid()}",
            "-e",
            "RUNNING_IN_ARTMAN_DOCKER=True",
            "-v",
            f"{root_dir}:{root_dir}",
            "-v",
            f"{output_dir}:{output_dir}",
            "-w",
            root_dir,
            image,
            "/bin/bash",
            "-c",
        ]

        artman_command = " ".join(
            map(str, ["artman", "--local", "--config", config, "generate"] +
                list(args)))

        cmd = docker_cmd + [artman_command]

        shell.run(cmd, cwd=root_dir)

        return output_dir
 def sync_master_with_upstream(self):
     print("started syncing with upstream :")
     shell.run(["git", "checkout", "master"])
     if shell.run(["git", "diff", "master", "upstream/master"]).stdout:
         print("different found beetween master and upstream master")
         shell.run(["git", "rebase", "upstream/master"])
         shell.run(["git", "push", "origin", "master"])
         print("succesfully rebased master with upstream master")
Beispiel #15
0
def bazel_build(target: str, cwd: Union[Path, str]) -> Path:
  """Build a bazel target and return the output build directory."""
  old_cwd = os.getcwd()
  os.chdir(str(cwd))

  bazel_run_args = [
    "bazel",
    "--max_idle_secs=240",
    "build",
    target,
  ]

  logger.debug(f"Generating code for: {target}.")
  shell.run(bazel_run_args)

  output_dir = Path(f"bazel-bin{os.path.sep}{target[2:].split(':')[0]}").resolve()
  os.chdir(old_cwd)

  return output_dir
Beispiel #16
0
def generate_service(disco: str):
    m = re.search(VERSION_REGEX, disco)
    name = m.group(1)
    version = m.group(2)
    template = TEMPLATE_VERSIONS[-1]  # Generate for latest version

    log.info(f"Generating {name} {version} ({template}).")

    output_dir = repository / ".cache" / name / version
    input_file = discovery / "discoveries" / disco

    command = (
        f"python2 -m googleapis.codegen --output_dir={output_dir}" +
        f" --input={input_file} --language=php --language_variant={template}" +
        f" --package_path=api/services")

    shell.run(f"mkdir -p {output_dir}".split(), cwd=repository / "generator")
    shell.run(command.split(), cwd=repository, hide_output=False)

    s.copy(output_dir, f"src")
Beispiel #17
0
def clone(
    url: str,
    dest: pathlib.Path = None,
    committish: str = "master",
    force: bool = False,
    depth: int = None,
) -> pathlib.Path:
    if dest is None:
        dest = cache.get_cache_dir()

    dest = dest / pathlib.Path(url).stem

    if force and dest.exists():
        shutil.rmtree(dest)

    if not dest.exists():
        cmd = ["git", "clone", url, dest]
        if depth is not None:
            cmd.extend(["--depth", str(depth)])
        shell.run(cmd)
    else:
        shell.run(["git", "pull"], cwd=str(dest))

    shell.run(["git", "reset", "--hard", committish], cwd=str(dest))

    # track all git repositories
    _tracked_paths.add(dest)

    return dest
Beispiel #18
0
    def _ensure_dependencies_installed(self):
        log.debug("Ensuring dependencies.")

        dependencies = ["docker", "git"]
        failed_dependencies = []
        for dependency in dependencies:
            return_code = shell.run(["which", dependency],
                                    check=False).returncode
            if return_code:
                failed_dependencies.append(dependency)

        if failed_dependencies:
            raise EnvironmentError(
                f"Dependencies missing: {', '.join(failed_dependencies)}")
 def has_more_diff(self):
     has_more_changes = False
     # ["git",  "diff", "master", "against_fork/master" ,"--shortstat"]
     output = shell.run(["git", "diff",  "master", "against_fork/master", "--shortstat"])
     # output =  208 files changed, 39625 insertions(+), 9481 deletions(-)
     if output.stdout:
         print(output.stdout)
         insertion_str = re.findall("\d+ insertion", output.stdout)
         if insertion_str:
             count_list = re.findall("\d+", insertion_str[0])
             if count_list:
                 count = int(count_list[0])
                 if count > 0:
                     has_more_changes = True
     return has_more_changes
def generate_service(disco: str):
    m = re.search(VERSION_REGEX, disco)
    if m is None:
        log.info(f"Skipping {disco}.")
        return

    name = m.group(1)
    version = m.group(2)

    log.info(f"Generating {name} {version}.")

    library_name = f"google-api-services-{name}"
    output_dir = repository / ".cache" / library_name / version
    input_file = discovery / "discoveries" / disco

    for template in TEMPLATE_VERSIONS:
        log.info(f"\t{template}")

        command = (
            f"python2 -m googleapis.codegen --output_dir={output_dir}" +
            f" --input={input_file} --language=java --language_variant={template}"
            + f" --package_path=api/services")

        shell.run(f"mkdir -p {output_dir}".split(),
                  cwd=repository / "generator")
        shell.run(command.split(), cwd=repository, hide_output=False)

        s.copy(output_dir, f"clients/{library_name}/{version}/{template}")

        resource_dir = (repository / "clients" / library_name / version /
                        template / "resources")
        shell.run(f"mkdir -p {resource_dir}".split())
        write_discovery_file(input_file, resource_dir / path.basename(disco))

    # write the metadata file
    latest_version = TEMPLATE_VERSIONS[-1]
    metadata = maven_metadata(
        str(repository / "clients" / library_name / version / latest_version /
            "pom.xml"))
    write_metadata_file(library_name, version, metadata)

    # copy the latest README to the main service location
    shutil.copy(
        repository / "clients" / library_name / version / latest_version /
        "README.md",
        repository / "clients" / library_name / version / "README.md",
    )
Beispiel #21
0
def clone(
    url: str,
    dest: pathlib.Path = None,
    committish: str = "master",
    force: bool = False,
    depth: int = None,
) -> pathlib.Path:
    if dest is None:
        dest = cache.get_cache_dir()

    dest = dest / pathlib.Path(url).stem

    if force and dest.exists():
        shutil.rmtree(dest)

    if not dest.exists():
        cmd = ["git", "clone", url, dest]
        if depth is not None:
            cmd.extend(["--depth", str(depth)])
        shell.run(cmd)
    else:
        shell.run(["git", "pull"], cwd=str(dest))

    shell.run(["git", "reset", "--hard", committish], cwd=str(dest))

    # track all git repositories
    _tracked_paths.add(dest)

    # add repo to metadata
    sha, message = get_latest_commit(dest)
    commit_metadata = extract_commit_message_metadata(message)

    metadata.add_git_source(
        name=dest.name,
        remote=url,
        sha=sha,
        internal_ref=commit_metadata.get("PiperOrigin-RevId"),
    )

    return dest
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""This script is used to synthesize generated parts of this library."""

from synthtool.__main__ import extra_args
import synthtool as s
import synthtool.log as log
import synthtool.shell as shell
import logging
import os

logging.basicConfig(level=logging.DEBUG)
s.metadata.set_track_obsolete_files(False)  # TODO: enable again.

command = [
    "docker",
    "run",
    "--rm",
    f"-v{os.getcwd()}:/workspace",
    "-v/var/run/docker.sock:/var/run/docker.sock",
    "-w", "/workspace",
    "--entrypoint", "script/synth.rb",
    "gcr.io/cloud-devrel-kokoro-resources/yoshi-ruby/autosynth"]
if extra_args():
    command.extend(extra_args())

log.debug(f"Running: {' '.join(command)}")
shell.run(command, hide_output=False)
Beispiel #23
0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""

from synthtool import _tracked_paths
import synthtool as s
import synthtool.log as log
import synthtool.shell as shell
import synthtool.sources.git as git
import logging

logging.basicConfig(level=logging.DEBUG)

repository_url = "https://github.com/GoogleCloudPlatform/elixir-google-api.git"

log.debug(f"Cloning {repository_url}.")
repository = git.clone(repository_url, depth=1)

log.debug("Installing dependencies.")
shell.run("mix deps.get".split(), cwd=repository)
shell.run("npm install".split(), cwd=repository)

log.debug("Generating all libraries.")
shell.run("mix google_apis.generate".split(), cwd=repository)

# copy all clients
s.copy(repository / "clients")
Beispiel #24
0
def _get_help(filename: str) -> str:
    """Function used by sample readmegen"""
    return shell.run([sys.executable, filename, "--help"]).stdout
Beispiel #25
0
def _get_help(filename: str) -> str:
    """Function used by sample readmegen"""
    return shell.run(["python", filename, "--help"]).stdout
Beispiel #26
0
    def _generate_code(
        self,
        service: str,
        version: str,
        language: str,
        *,
        private: bool = False,
        discogapic: bool = False,
        proto_path: Union[str, Path] = None,
        output_dir: Union[str, Path] = None,
        bazel_target: str = None,
        include_protos: bool = False,
        proto_output_path: Union[str, Path] = None,
        tar_strip_components: int = 1,
    ):
        # Determine which googleapis repo to use
        if discogapic:
            api_definitions_repo = self._clone_discovery_artifact_manager()
            api_definitions_repo_name = "discovery-artifact-manager"
        elif private:
            api_definitions_repo = self._clone_googleapis_private()
            api_definitions_repo_name = "googleapis_private"
        else:
            api_definitions_repo = self._clone_googleapis()
            api_definitions_repo_name = "googleapis"

        # Sanity check: We should have a googleapis repo; if we do not,
        # something went wrong, and we should abort.
        if not api_definitions_repo:
            raise RuntimeError(
                f"Unable to generate {service}, the sources repository repository"
                "is unavailable."
            )

        # Calculate proto_path if necessary.
        if not bazel_target or include_protos:
            # If bazel_target is not specified explicitly, we will need
            # proto_path to calculate it. If include_protos is True,
            # we will need the proto_path to copy the protos.
            if not proto_path:
                if bazel_target:
                    # Calculate proto_path from the full bazel target, which is
                    # in the format "//proto_path:target_name
                    proto_path = bazel_target.split(":")[0][2:]
                else:
                    # If bazel_target is not specified, assume the protos are
                    # simply under google/cloud, where the most of the protos
                    # usually are.
                    proto_path = f"google/cloud/{service}/{version}"
            protos = Path(proto_path)
            if protos.is_absolute():
                protos = protos.relative_to("/")

        # Determine bazel target based on per-language patterns
        # Java:    google-cloud-{{assembly_name}}-{{version}}-java
        # Go:      gapi-cloud-{{assembly_name}}-{{version}}-go
        # Python:  {{assembly_name}}-{{version}}-py
        # PHP:     google-cloud-{{assembly_name}}-{{version}}-php
        # Node.js: {{assembly_name}}-{{version}}-nodejs
        # Ruby:    google-cloud-{{assembly_name}}-{{version}}-ruby
        # C#:      google-cloud-{{assembly_name}}-{{version}}-csharp
        if not bazel_target:
            # Determine where the protos we are generating actually live.
            # We can sometimes (but not always) determine this from the service
            # and version; in other cases, the user must provide it outright.
            parts = list(protos.parts)
            while len(parts) > 0 and parts[0] != "google":
                parts.pop(0)
            if len(parts) == 0:
                raise RuntimeError(
                    f"Cannot determine bazel_target from proto_path {protos}."
                    "Please set bazel_target explicitly."
                )
            if language == "python":
                suffix = f"{service}-{version}-py"
            elif language == "nodejs":
                suffix = f"{service}-{version}-nodejs"
            elif language == "go":
                suffix = f"gapi-{'-'.join(parts[1:])}-go"
            else:
                suffix = f"{'-'.join(parts)}-{language}"
            bazel_target = f"//{os.path.sep.join(parts)}:{suffix}"

            # Sanity check: Do we have protos where we think we should?
            if not (api_definitions_repo / protos).exists():
                raise FileNotFoundError(
                    f"Unable to find directory for protos: {(api_definitions_repo / protos)}."
                )
            if not tuple((api_definitions_repo / protos).glob("*.proto")):
                raise FileNotFoundError(
                    f"Directory {(api_definitions_repo / protos)} exists, but no protos found."
                )
            if not (api_definitions_repo / protos / "BUILD.bazel"):
                raise FileNotFoundError(
                    f"File {(api_definitions_repo / protos / 'BUILD.bazel')} does not exist."
                )

        # Ensure the desired output directory exists.
        # If none was provided, create a temporary directory.
        if not output_dir:
            output_dir = tempfile.mkdtemp()
        output_dir = Path(output_dir).resolve()

        # Let's build some stuff now.
        cwd = os.getcwd()
        os.chdir(str(api_definitions_repo))

        bazel_run_args = [
            "bazel",
            "--max_idle_secs=240",
            "build",
            bazel_target,
        ]

        logger.debug(f"Generating code for: {bazel_target}.")
        shell.run(bazel_run_args)

        # We've got tar file!
        # its location: bazel-bin/google/cloud/language/v1/language-v1-nodejs.tar.gz
        # bazel_target:         //google/cloud/language/v1:language-v1-nodejs
        tar_file = (
            f"bazel-bin{os.path.sep}{bazel_target[2:].replace(':', os.path.sep)}.tar.gz"
        )

        tar_run_args = [
            "tar",
            "-C",
            str(output_dir),
            f"--strip-components={tar_strip_components}",
            "-xzf",
            tar_file,
        ]
        shell.run(tar_run_args)

        # Get the *.protos files and put them in a protos dir in the output
        if include_protos:
            proto_files = protos.glob("**/*.proto")
            # By default, put the protos at the root in a folder named 'protos'.
            # Specific languages can be cased here to put them in a more language
            # appropriate place.
            if not proto_output_path:
                proto_output_path = output_dir / "protos"
                if language == "python":
                    # place protos alongsize the *_pb2.py files
                    proto_output_path = (
                        output_dir / f"google/cloud/{service}_{version}/proto"
                    )
            else:
                proto_output_path = Path(output_dir / proto_output_path)
            os.makedirs(proto_output_path, exist_ok=True)

            for i in proto_files:
                logger.debug(f"Copy: {i} to {proto_output_path / i.name}")
                shutil.copyfile(i, proto_output_path / i.name)
            logger.success(f"Placed proto files into {proto_output_path}.")

        os.chdir(cwd)

        # Sanity check: Does the output location have code in it?
        # If not, complain.
        if not tuple(output_dir.iterdir()):
            raise RuntimeError(
                f"Code generation seemed to succeed, but {output_dir} is empty."
            )

        # Huzzah, it worked.
        logger.success(f"Generated code into {output_dir}.")

        # Record this in the synthtool metadata.
        metadata.add_client_destination(
            source=api_definitions_repo_name,
            api_name=service,
            api_version=version,
            language=language,
            generator="bazel",
        )

        _tracked_paths.add(output_dir)
        return output_dir
Beispiel #27
0
    def _generate_code(
        self,
        service: str,
        version: str,
        language: str,
        *,
        private: bool = False,
        proto_path: Union[str, Path] = None,
        extra_proto_files: List[str] = [],
        output_dir: Union[str, Path] = None,
        generator_version: str = "latest",
        generator_args: Mapping[str, str] = None,
    ):
        # Determine which googleapis repo to use
        if not private:
            googleapis = self._clone_googleapis()
        else:
            googleapis = self._clone_googleapis_private()

        # Sanity check: We should have a googleapis repo; if we do not,
        # something went wrong, and we should abort.
        if googleapis is None:
            raise RuntimeError(
                f"Unable to generate {service}, the googleapis repository"
                "is unavailable.")

        # Pull the code generator for the requested language.
        # If a code generator version was specified, honor that.
        log.debug(
            f"Pulling Docker image: gapic-generator-{language}:{generator_version}"
        )
        shell.run(
            [
                "docker",
                "pull",
                f"gcr.io/gapic-images/gapic-generator-{language}:{generator_version}",
            ],
            hide_output=False,
        )

        # Determine where the protos we are generating actually live.
        # We can sometimes (but not always) determine this from the service
        # and version; in other cases, the user must provide it outright.
        if proto_path:
            proto_path = Path(proto_path)
            if proto_path.is_absolute():
                proto_path = proto_path.relative_to("/")
        else:
            proto_path = Path("google/cloud") / service / version

        # Sanity check: Do we have protos where we think we should?
        if not (googleapis / proto_path).exists():
            raise FileNotFoundError(
                f"Unable to find directory for protos: {(googleapis / proto_path)}."
            )
        if not tuple((googleapis / proto_path).glob("*.proto")):
            raise FileNotFoundError(
                f"Directory {(googleapis / proto_path)} exists, but no protos found."
            )

        # Ensure the desired output directory exists.
        # If none was provided, create a temporary directory.
        if not output_dir:
            output_dir = tempfile.mkdtemp()
        output_dir = Path(output_dir).resolve()

        # The time has come, the walrus said, to talk of actually running
        # the code generator.
        sep = os.path.sep

        # try to figure out user ID and stay compatible.
        # If there is no `os.getuid()`, fallback to `getpass.getuser()`
        getuid = getattr(os, "getuid", None)
        if getuid:
            user = str(getuid())
        else:
            user = getpass.getuser()

        docker_run_args = [
            "docker",
            "run",
            "--mount",
            f"type=bind,source={googleapis / proto_path}{sep},destination={Path('/in') / proto_path}{sep},readonly",
            "--mount",
            f"type=bind,source={output_dir}{sep},destination={Path('/out')}{sep}",
            "--rm",
            "--user",
            user,
        ]

        # Process extra proto files, e.g. google/cloud/common_resources.proto,
        # if they are required by this API.
        # First, bind mount all the extra proto files into the container.
        for proto in extra_proto_files:
            source_proto = googleapis / Path(proto)
            if not source_proto.exists():
                raise FileNotFoundError(
                    f"Unable to find extra proto file: {source_proto}.")
            docker_run_args.extend([
                "--mount",
                f"type=bind,source={source_proto},destination={Path('/in') / proto},readonly",
            ])

        docker_run_args.append(
            f"gcr.io/gapic-images/gapic-generator-{language}:{generator_version}"
        )

        # Populate any additional CLI arguments provided for Docker.
        if generator_args:
            for key, value in generator_args.items():
                docker_run_args.append(f"--{key}")
                docker_run_args.append(value)

        log.debug(f"Generating code for: {proto_path}.")
        shell.run(docker_run_args)

        # Sanity check: Does the output location have code in it?
        # If not, complain.
        if not tuple(output_dir.iterdir()):
            raise RuntimeError(
                f"Code generation seemed to succeed, but {output_dir} is empty."
            )

        # Huzzah, it worked.
        log.success(f"Generated code into {output_dir}.")

        # Record this in the synthtool metadata.
        metadata.add_client_destination(
            source="googleapis" if not private else "googleapis-private",
            api_name=service,
            api_version=version,
            language=language,
            generator=f"gapic-generator-{language}",
        )

        _tracked_paths.add(output_dir)
        return output_dir
Beispiel #28
0
def install(hide_output=False):
    """
    Installs all dependencies for the current Node.js library.
    """
    logger.debug("Installing dependencies...")
    shell.run(["npm", "install"], hide_output=hide_output)
Beispiel #29
0
logging.basicConfig(level=logging.DEBUG)

VERSION_REGEX = r"([^\.]*)\.(.+)\.json$"

TEMPLATE_VERSIONS = [
    "default",
]
discovery_url = "https://github.com/googleapis/discovery-artifact-manager.git"

repository = Path('.')

log.debug(f"Cloning {discovery_url}.")
discovery = git.clone(discovery_url)

log.debug("Cleaning output directory.")
shell.run("rm -rf .cache".split(), cwd=repository)

log.debug("Installing dependencies.")
shell.run(
    "python2 -m pip install -e generator/ --user".split(),
    cwd=repository
)

def generate_service(disco: str):
    m = re.search(VERSION_REGEX, disco)
    name = m.group(1)
    version = m.group(2)
    template = TEMPLATE_VERSIONS[-1] # Generate for latest version

    log.info(f"Generating {name} {version} ({template}).")
Beispiel #30
0
# Copy the repo into a temporary directory, removing the build and deps, and
# perform generation there. This is because the docker command may be a
# cross-compile whose build environment should be isolated from the current
# git clone.
with tempfile.TemporaryDirectory() as tmpdir:
    repository = pathlib.Path(tmpdir) / "repo"
    shutil.copytree(os.getcwd(), repository)
    shutil.rmtree(repository / "_build", ignore_errors=True)
    shutil.rmtree(repository / "deps", ignore_errors=True)

    image = "gcr.io/cloud-devrel-public-resources/elixir19"
    generate_command = "scripts/generate_client.sh"
    command = [
        "docker", "run", "--rm", f"-v{repository}:/workspace",
        "-v/var/run/docker.sock:/var/run/docker.sock", "-e",
        f"USER_GROUP={os.getuid()}:{os.getgid()}", "-w", "/workspace", image,
        generate_command
    ]

    if extra_args():
        command.extend(extra_args())

    log.debug(f"Running: {' '.join(command)}")

    shell.run(command, cwd=repository, hide_output=False)

    # Copy the resulting clients directory back into the git clone.
    shutil.rmtree("clients", ignore_errors=True)
    shutil.move(repository / "clients", "clients")