Ejemplo n.º 1
0
def main(synthfile: str, metadata: str, extra_args: Sequence[str]):
    f"""Synthesizes source code according to the instructions in synthfile arg.

    Optional environment variables:
      SYNTHTOOL_ARTMAN_VERSION:  The version of artman to use.
      SYNTHTOOL_GOOGLEAPIS:      Path to local clone of https://github.com/googleapis/googleapis
      SYNTHTOOL_GENERATOR:       Path to local gapic-generator directory to use for generation.
                By default, the latest version of gapic-generator will be used.
      AUTOSYNTH_USE_SSH:         Access github repos via ssh instead of https.
      {preconfig.PRECONFIG_ENVIRONMENT_VARIABLE}:  Path to a json file.


    {preconfig.PRECONFIG_HELP}
    """
    _extra_args.extend(extra_args)

    synth_file = os.path.abspath(synthfile)

    if os.path.lexists(synth_file):
        logger.debug(f"Executing {synth_file}.")
        # https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly
        spec = importlib.util.spec_from_file_location("synth", synth_file)
        synth_module = importlib.util.module_from_spec(spec)

        if spec.loader is None:
            raise ImportError("Could not import synth.py")

        with synthtool.metadata.MetadataTrackerAndWriter(metadata):
            spec.loader.exec_module(synth_module)  # type: ignore

    else:
        logger.exception(f"{synth_file} not found.")
        sys.exit(1)
Ejemplo n.º 2
0
def compile_protos(hide_output=False):
    """
    Compiles protos into .json, .js, and .d.ts files using
    compileProtos script from google-gax.
    """
    logger.debug("Compiling protos...")
    shell.run(["npx", "compileProtos", "src"], hide_output=hide_output)
Ejemplo n.º 3
0
def write(outfile: str = "synth.metadata") -> None:
    """Writes out the metadata to a file."""
    jsonified = google.protobuf.json_format.MessageToJson(_metadata)

    with open(outfile, "w") as fh:
        fh.write(jsonified)

    logger.debug(f"Wrote metadata to {outfile}.")
Ejemplo n.º 4
0
def owlbot_patch() -> None:
    """Apply some replacements for copied libraries.

    This function assumes the current directory is the target.
    """
    logger.debug("owlbot_patch called for %s", os.getcwd())

    # Apply common replacements, currently nothing.
    pass
Ejemplo n.º 5
0
 def __init__(self):
     local_clone = os.environ.get("SYNTHTOOL_GOOGLEAPIS_GEN")
     if local_clone:
         self._googleapis_gen = Path(local_clone).expanduser()
         logger.debug(
             f"Using local googleapis-gen at {self._googleapis_gen}")
     else:
         logger.debug("Cloning googleapis-gen.")
         self._googleapis_gen = git.clone(
             git.make_repo_clone_url("googleapis/googleapis-gen"))
Ejemplo n.º 6
0
    def __init__(self):
        if LOCAL_TEMPLATES:
            logger.debug(f"Using local templates at {LOCAL_TEMPLATES}")
            self._template_root = LOCAL_TEMPLATES
        else:
            templates_git = git.clone(TEMPLATES_URL)
            self._template_root = templates_git / DEFAULT_TEMPLATES_PATH

        self._templates = templates.Templates(self._template_root)
        self.excludes = []  # type: List[str]
Ejemplo n.º 7
0
def fix(hide_output=False):
    """
    Fixes the formatting in the current Node.js library.
    Before running fix script, run prelint to install extra dependencies
    for samples, but do not fail if it does not succeed.
    """
    logger.debug("Running prelint...")
    shell.run(["npm", "run", "prelint"], check=False, hide_output=hide_output)
    logger.debug("Running fix...")
    shell.run(["npm", "run", "fix"], hide_output=hide_output)
Ejemplo n.º 8
0
def _merge_common_templates(source_text: str, destination_text: str,
                            file_path: Path) -> str:
    # keep any existing pom.xml
    if file_path.match("pom.xml"):
        logger.debug(
            f"existing pom file found ({file_path}) - keeping the existing")
        return destination_text

    # by default return the newly generated content
    return source_text
Ejemplo n.º 9
0
def compile_protos_hermetic(hide_output=False):
    """
    Compiles protos into .json, .js, and .d.ts files using
    compileProtos script from google-gax.
    """
    logger.debug("Compiling protos...")
    shell.run(
        [f"{_TOOLS_DIRECTORY}/node_modules/.bin/compileProtos", "src"],
        check=True,
        hide_output=hide_output,
    )
Ejemplo n.º 10
0
    def __init__(self, template_path: Optional[Path] = None):
        if template_path:
            self._template_root = template_path
        elif LOCAL_TEMPLATES:
            logger.debug(f"Using local templates at {LOCAL_TEMPLATES}")
            self._template_root = Path(LOCAL_TEMPLATES)
        else:
            templates_git = git.clone(TEMPLATES_URL)
            self._template_root = templates_git / DEFAULT_TEMPLATES_PATH

        self._templates = templates.Templates(self._template_root)
        self.excludes = []  # type: List[str]
Ejemplo n.º 11
0
    def _clone_googleapis(self):
        if self._googleapis is not None:
            return self._googleapis

        if LOCAL_GOOGLEAPIS:
            self._googleapis = Path(LOCAL_GOOGLEAPIS).expanduser()
            logger.debug(f"Using local googleapis at {self._googleapis}")

        else:
            logger.debug("Cloning googleapis.")
            self._googleapis = git.clone(GOOGLEAPIS_URL)

        return self._googleapis
Ejemplo n.º 12
0
def _merge_common_templates(source_text: str, destination_text: str,
                            file_path: Path) -> str:
    # keep any existing pom.xml
    if file_path.match("pom.xml") or file_path.match(
            "sync-repo-settings.yaml"):
        logger.debug(
            f"existing pom file found ({file_path}) - keeping the existing")
        return destination_text

    if file_path.match("release-please.yml"):
        return _merge_release_please(destination_text)

    # by default return the newly generated content
    return source_text
Ejemplo n.º 13
0
    def _ensure_dependencies_installed(self):
        logger.debug("Ensuring dependencies.")

        dependencies = ["docker", "git"]
        failed_dependencies = []
        for dependency in dependencies:
            return_code = shell.run(["which", dependency], check=False).returncode
            if return_code:
                failed_dependencies.append(dependency)

        if failed_dependencies:
            raise EnvironmentError(
                f"Dependencies missing: {', '.join(failed_dependencies)}"
            )
Ejemplo n.º 14
0
    def _clone_googleapis_discovery(self):
        if self._googleapis_discovery:
            return self._googleapis_discovery

        if LOCAL_GOOGLEAPIS_DISCOVERY:
            self._googleapis_discovery = Path(LOCAL_GOOGLEAPIS_DISCOVERY).expanduser()
            logger.debug(
                f"Using local googleapis-discovery at {self._googleapis_discovery}"
            )

        else:
            logger.debug("Cloning googleapis-discovery.")
            self._googleapis_discovery = git.clone(GOOGLEAPIS_DISCOVERY_URL)

        return self._googleapis_discovery
Ejemplo n.º 15
0
def _find_copy_target(src: Path, version_string: str) -> typing.Optional[Path]:
    """Returns a directory contains the version subdirectory.
    """
    logger.debug("_find_copy_target called with %s and %s", src,
                 version_string)
    entries = os.scandir(src)
    if not entries:
        return None
    for entry in entries:
        if entry.path.endswith(version_string):
            return src
        if entry.is_dir():
            return _find_copy_target(
                Path(entry.path).resolve(), version_string)
    return None
Ejemplo n.º 16
0
    def _clone_googleapis_private(self):
        if self._googleapis_private is not None:
            return self._googleapis_private

        if LOCAL_GOOGLEAPIS:
            self._googleapis_private = Path(LOCAL_GOOGLEAPIS).expanduser()
            logger.debug(
                f"Using local googleapis at {self._googleapis_private} for googleapis-private"
            )

        else:
            logger.debug("Cloning googleapis-private.")
            self._googleapis_private = git.clone(GOOGLEAPIS_PRIVATE_URL)

        return self._googleapis_private
Ejemplo n.º 17
0
def owlbot_copy_version(
    src: Path,
    dest: Path,
    copy_excludes: typing.Optional[typing.List[str]] = None,
) -> None:
    """Copies files from a version subdirectory.
    """
    logger.debug("owlbot_copy_version called from %s to %s", src, dest)

    if copy_excludes is None:
        copy_excludes = DEFAULT_COPY_EXCLUDES
    # detect the version string for later use
    src_dir = src / "src"
    entries = os.scandir(src_dir)
    if not entries:
        logger.info("there is no src directory '%s' to copy", src_dir)
        return
    version_string = os.path.basename(os.path.basename(next(entries)))
    logger.debug("version_string detected: %s", version_string)

    # copy all src including partial veneer classes
    s.move([src / "src"], dest / "src", merge=_merge, excludes=copy_excludes)

    # copy tests
    s.move([src / "tests"],
           dest / "tests",
           merge=_merge,
           excludes=copy_excludes)

    # detect the directory containing proto generated PHP source and metadata.
    proto_src = src / "proto/src"
    entries = os.scandir(proto_src)
    proto_dir = None
    metadata_dir = None
    if not entries:
        logger.info("there is no proto generated src directory to copy: %s",
                    proto_src)
        return
    for entry in entries:
        if os.path.basename(entry.path) == METADATA_DIR:
            metadata_dir = _find_copy_target(
                Path(entry.path).resolve(), version_string)
        else:
            proto_dir = _find_copy_target(
                Path(entry.path).resolve(), version_string)

    # copy proto files
    if isinstance(proto_dir, Path):
        logger.debug("proto_dir detected: %s", proto_dir)
        s.move([proto_dir], dest / "src", merge=_merge, excludes=copy_excludes)

    # copy metadata files
    if isinstance(metadata_dir, Path):
        logger.debug("metadata_dir detected: %s", metadata_dir)
        s.move([metadata_dir],
               dest / "metadata",
               merge=_merge,
               excludes=copy_excludes)
Ejemplo n.º 18
0
    def _clone_discovery_artifact_manager(self):
        if self._discovery_artifact_manager:
            return self._discovery_artifact_manager

        if LOCAL_DISCOVERY_ARTIFACT_MANAGER:
            self._discovery_artifact_manager = Path(
                LOCAL_DISCOVERY_ARTIFACT_MANAGER
            ).expanduser()
            logger.debug(
                f"Using local discovery_artifact_manager at {self._discovery_artifact_manager} for googleapis-private"
            )
        else:
            logger.debug("Cloning discovery-artifact-manager.")
            self._discovery_artifact_manager = git.clone(DISCOVERY_ARTIFACT_MANAGER_URL)

        return self._discovery_artifact_manager
Ejemplo n.º 19
0
def fix_hermetic(hide_output=False):
    """
    Fixes the formatting in the current Node.js library. It assumes that gts
    is already installed in a well known location on disk:
    """
    logger.debug("Copy eslint config")
    shell.run(
        ["cp", "-r", f"{_TOOLS_DIRECTORY}/node_modules", "."],
        check=True,
        hide_output=hide_output,
    )
    logger.debug("Running fix...")
    shell.run(
        [f"{_TOOLS_DIRECTORY}/node_modules/.bin/gts", "fix"],
        check=False,
        hide_output=hide_output,
    )
Ejemplo n.º 20
0
def _merge(src: str, dest: str, path: Path):
    """Merge function for the PHP post processor.
    This should be used for most merges of newly generated and existing files.
    It preserves copyright year from destination files
    Args:
        src: Source file content from gapic
        dest: Destination file content
        path: Destination file path
    Returns:
        The merged file content.
    """
    logger.debug("_merge called for %s", path)
    m = re.search(COPYRIGHT_REGEX, dest)
    if m:
        return re.sub(COPYRIGHT_REGEX, f"Copyright {m.group(1)} Google LLC",
                      src, 1)
    return src
Ejemplo n.º 21
0
    def py_samples(self, **kwargs) -> Path:
        """
        Determines whether generation is being done in a client library or in a samples
        folder so it can either generate in the current directory or the client lib's
        'samples' folder. A custom path for where to generate may also be specified.
        Renders README.md according to .repo-metadata.json
        """
        # kwargs["metadata"] is required to load values from .repo-metadata.json
        if "metadata" not in kwargs:
            kwargs["metadata"] = {}
        # load common repo meta information (metadata that's not language specific).
        self._load_generic_metadata(kwargs["metadata"])
        # temporary exclusion prior to old templates being migrated out
        self.excludes.extend([
            "README.rst",
            "auth_api_key.tmpl.rst",
            "auth.tmpl.rst",
            "install_deps.tmpl.rst",
            "install_portaudio.tmpl.rst",
            "noxfile.py.j2",
        ])

        in_client_library = Path("samples").exists()
        sample_project_dir = kwargs["metadata"]["repo"].get(
            "sample_project_dir")

        if sample_project_dir is None:  # Not found in metadata
            if in_client_library:
                sample_project_dir = "samples"
            else:
                sample_project_dir = "."
        elif not Path(sample_project_dir).exists():
            raise Exception(f"'{sample_project_dir}' does not exist")

        logger.debug(
            f"Generating templates for samples directory '{sample_project_dir}'"
        )
        py_samples_templates = Path(self._template_root) / "python_samples"
        t = templates.TemplateGroup(py_samples_templates, self.excludes)
        result = t.render(subdir=sample_project_dir, **kwargs)
        _tracked_paths.add(result)
        return result
Ejemplo n.º 22
0
    def py_samples_override(
        self, root, override_path, override_samples, **overridden_samples_kwargs
    ) -> Path:
        """
        Handles additional generation of READMEs where "override_path"s
        are set in one or more samples' metadata
        """
        overridden_samples_kwargs["metadata"]["repo"][
            "sample_project_dir"
        ] = override_path
        # Set samples metadata to ONLY samples intended to generate
        # under this directory (override_path)
        overridden_samples_kwargs["metadata"]["repo"]["samples"] = override_samples
        if root != ".":
            override_path = Path(root) / override_path

        logger.debug(f"Generating templates for override path '{override_path}'")

        overridden_samples_kwargs["subdir"] = override_path
        return self._generic_library("python_samples", **overridden_samples_kwargs)
Ejemplo n.º 23
0
def owlbot_entrypoint(staging_dir: str = STAGING_DIR) -> None:
    """Copies files from staging and template directories into current working dir.

    """
    logging.basicConfig(level=logging.INFO)

    logger.debug("owlbot_main called")

    staging = Path(staging_dir)
    if staging.is_dir():
        logger.debug("Found the staging dir!")
        entries = os.scandir(staging)
        for entry in entries:
            if entry.is_dir():
                # We use the same directory name for destination.
                src = Path(entry.path).resolve()
                dest = Path(src.parts[-1]).resolve()
                owlbot_py = dest / OWLBOT_PY_FILENAME
                if owlbot_py.is_file():
                    subprocess.run(["python", owlbot_py], cwd=dest, check=True)
                else:
                    owlbot_main(src, dest)
        # The staging directory should never be merged into the main branch.
        shutil.rmtree(staging)
    else:
        logger.debug("Staging dir not found.")
Ejemplo n.º 24
0
    def _generate_code(
        self,
        service: str,
        version: str,
        language: str,
        *,
        private: bool = False,
        proto_path: Union[str, Path] = None,
        extra_proto_files: List[str] = [],
        output_dir: Union[str, Path] = None,
        generator_version: str = GENERATOR_VERSION,
        generator_args: Mapping[str, str] = None,
    ):
        # Determine which googleapis repo to use
        if not private:
            googleapis = self._clone_googleapis()
        else:
            googleapis = self._clone_googleapis_private()

        # Confidence check: We should have a googleapis repo; if we do not,
        # something went wrong, and we should abort.
        if googleapis is None:
            raise RuntimeError(
                f"Unable to generate {service}, the googleapis repository"
                "is unavailable."
            )

        # Pull the code generator for the requested language.
        # If a code generator version was specified, honor that.
        logger.debug(
            f"Pulling Docker image: gapic-generator-{language}:{generator_version}"
        )
        shell.run(
            [
                "docker",
                "pull",
                f"gcr.io/gapic-images/gapic-generator-{language}:{generator_version}",
            ],
            hide_output=False,
        )

        # Determine where the protos we are generating actually live.
        # We can sometimes (but not always) determine this from the service
        # and version; in other cases, the user must provide it outright.
        if proto_path:
            proto_path = Path(proto_path)
            if proto_path.is_absolute():
                proto_path = proto_path.relative_to("/")
        else:
            proto_path = Path("google/cloud") / service / version

        # Confidence check: Do we have protos where we think we should?
        if not (googleapis / proto_path).exists():
            raise FileNotFoundError(
                f"Unable to find directory for protos: {(googleapis / proto_path)}."
            )
        if not tuple((googleapis / proto_path).glob("*.proto")):
            raise FileNotFoundError(
                f"Directory {(googleapis / proto_path)} exists, but no protos found."
            )

        # Ensure the desired output directory exists.
        # If none was provided, create a temporary directory.
        if not output_dir:
            output_dir = tempfile.mkdtemp()
        output_dir = Path(output_dir).resolve()

        # The time has come, the walrus said, to talk of actually running
        # the code generator.
        sep = os.path.sep

        # try to figure out user ID and stay compatible.
        # If there is no `os.getuid()`, fallback to `getpass.getuser()`
        getuid = getattr(os, "getuid", None)
        if getuid:
            user = str(getuid())
        else:
            user = getpass.getuser()

        docker_run_args = [
            "docker",
            "run",
            "--mount",
            f"type=bind,source={googleapis / proto_path}{sep},destination={Path('/in') / proto_path}{sep},readonly",
            "--mount",
            f"type=bind,source={output_dir}{sep},destination={Path('/out')}{sep}",
            "--rm",
            "--user",
            user,
        ]

        # Process extra proto files, e.g. google/cloud/common_resources.proto,
        # if they are required by this API.
        # First, bind mount all the extra proto files into the container.
        for proto in extra_proto_files:
            source_proto = googleapis / Path(proto)
            if not source_proto.exists():
                raise FileNotFoundError(
                    f"Unable to find extra proto file: {source_proto}."
                )
            docker_run_args.extend(
                [
                    "--mount",
                    f"type=bind,source={source_proto},destination={Path('/in') / proto},readonly",
                ]
            )

        docker_run_args.append(
            f"gcr.io/gapic-images/gapic-generator-{language}:{generator_version}"
        )

        # Populate any additional CLI arguments provided for Docker.
        if generator_args:
            for key, value in generator_args.items():
                docker_run_args.append(f"--{key}")
                docker_run_args.append(value)

        logger.debug(f"Generating code for: {proto_path}.")
        shell.run(docker_run_args, hide_output=False)

        # Confidence check: Does the output location have code in it?
        # If not, complain.
        if not tuple(output_dir.iterdir()):
            raise RuntimeError(
                f"Code generation seemed to succeed, but {output_dir} is empty."
            )

        # Huzzah, it worked.
        logger.success(f"Generated code into {output_dir}.")

        # Record this in the synthtool metadata.
        metadata.add_client_destination(
            source="googleapis" if not private else "googleapis-private",
            api_name=service,
            api_version=version,
            language=language,
            generator=f"gapic-generator-{language}",
        )

        _tracked_paths.add(output_dir)
        return output_dir
Ejemplo n.º 25
0
 def merge(source_text: str, destinaton_text: str, file_path: Path) -> str:
     for pattern in patterns:
         if file_path.match(str(pattern)):
             logger.debug(f"Preserving existing contents of {file_path}.")
             return destinaton_text
     return source_text
Ejemplo n.º 26
0
def postprocess_gapic_library_hermetic(hide_output=False):
    logger.debug("Post-processing GAPIC library...")
    fix_hermetic(hide_output=hide_output)
    compile_protos_hermetic(hide_output=hide_output)
    logger.debug("Post-processing completed")
Ejemplo n.º 27
0
def postprocess_gapic_library(hide_output=False):
    logger.debug("Post-processing GAPIC library...")
    install(hide_output=hide_output)
    fix(hide_output=hide_output)
    compile_protos(hide_output=hide_output)
    logger.debug("Post-processing completed")
Ejemplo n.º 28
0
    def _generate_code(
        self,
        service: str,
        version: str,
        language: str,
        *,
        private: bool = False,
        discogapic: bool = False,
        proto_path: Union[str, Path] = None,
        output_dir: Union[str, Path] = None,
        bazel_target: str = None,
        include_protos: bool = False,
        proto_output_path: Union[str, Path] = None,
        tar_strip_components: int = 1,
    ):
        # Determine which googleapis repo to use
        if discogapic:
            api_definitions_repo = self._clone_discovery_artifact_manager()
            api_definitions_repo_name = "discovery-artifact-manager"
        elif private:
            api_definitions_repo = self._clone_googleapis_private()
            api_definitions_repo_name = "googleapis_private"
        else:
            api_definitions_repo = self._clone_googleapis()
            api_definitions_repo_name = "googleapis"

        # Sanity check: We should have a googleapis repo; if we do not,
        # something went wrong, and we should abort.
        if not api_definitions_repo:
            raise RuntimeError(
                f"Unable to generate {service}, the sources repository repository"
                "is unavailable."
            )

        # Calculate proto_path if necessary.
        if not bazel_target or include_protos:
            # If bazel_target is not specified explicitly, we will need
            # proto_path to calculate it. If include_protos is True,
            # we will need the proto_path to copy the protos.
            if not proto_path:
                if bazel_target:
                    # Calculate proto_path from the full bazel target, which is
                    # in the format "//proto_path:target_name
                    proto_path = bazel_target.split(":")[0][2:]
                else:
                    # If bazel_target is not specified, assume the protos are
                    # simply under google/cloud, where the most of the protos
                    # usually are.
                    proto_path = f"google/cloud/{service}/{version}"
            protos = Path(proto_path)
            if protos.is_absolute():
                protos = protos.relative_to("/")

        # Determine bazel target based on per-language patterns
        # Java:    google-cloud-{{assembly_name}}-{{version}}-java
        # Go:      gapi-cloud-{{assembly_name}}-{{version}}-go
        # Python:  {{assembly_name}}-{{version}}-py
        # PHP:     google-cloud-{{assembly_name}}-{{version}}-php
        # Node.js: {{assembly_name}}-{{version}}-nodejs
        # Ruby:    google-cloud-{{assembly_name}}-{{version}}-ruby
        # C#:      google-cloud-{{assembly_name}}-{{version}}-csharp
        if not bazel_target:
            # Determine where the protos we are generating actually live.
            # We can sometimes (but not always) determine this from the service
            # and version; in other cases, the user must provide it outright.
            parts = list(protos.parts)
            while len(parts) > 0 and parts[0] != "google":
                parts.pop(0)
            if len(parts) == 0:
                raise RuntimeError(
                    f"Cannot determine bazel_target from proto_path {protos}."
                    "Please set bazel_target explicitly."
                )
            if language == "python":
                suffix = f"{service}-{version}-py"
            elif language == "nodejs":
                suffix = f"{service}-{version}-nodejs"
            elif language == "go":
                suffix = f"gapi-{'-'.join(parts[1:])}-go"
            else:
                suffix = f"{'-'.join(parts)}-{language}"
            bazel_target = f"//{os.path.sep.join(parts)}:{suffix}"

            # Sanity check: Do we have protos where we think we should?
            if not (api_definitions_repo / protos).exists():
                raise FileNotFoundError(
                    f"Unable to find directory for protos: {(api_definitions_repo / protos)}."
                )
            if not tuple((api_definitions_repo / protos).glob("*.proto")):
                raise FileNotFoundError(
                    f"Directory {(api_definitions_repo / protos)} exists, but no protos found."
                )
            if not (api_definitions_repo / protos / "BUILD.bazel"):
                raise FileNotFoundError(
                    f"File {(api_definitions_repo / protos / 'BUILD.bazel')} does not exist."
                )

        # Ensure the desired output directory exists.
        # If none was provided, create a temporary directory.
        if not output_dir:
            output_dir = tempfile.mkdtemp()
        output_dir = Path(output_dir).resolve()

        # Let's build some stuff now.
        cwd = os.getcwd()
        os.chdir(str(api_definitions_repo))

        bazel_run_args = [
            "bazel",
            "--max_idle_secs=240",
            "build",
            bazel_target,
        ]

        logger.debug(f"Generating code for: {bazel_target}.")
        shell.run(bazel_run_args)

        # We've got tar file!
        # its location: bazel-bin/google/cloud/language/v1/language-v1-nodejs.tar.gz
        # bazel_target:         //google/cloud/language/v1:language-v1-nodejs
        tar_file = (
            f"bazel-bin{os.path.sep}{bazel_target[2:].replace(':', os.path.sep)}.tar.gz"
        )

        tar_run_args = [
            "tar",
            "-C",
            str(output_dir),
            f"--strip-components={tar_strip_components}",
            "-xzf",
            tar_file,
        ]
        shell.run(tar_run_args)

        # Get the *.protos files and put them in a protos dir in the output
        if include_protos:
            proto_files = protos.glob("**/*.proto")
            # By default, put the protos at the root in a folder named 'protos'.
            # Specific languages can be cased here to put them in a more language
            # appropriate place.
            if not proto_output_path:
                proto_output_path = output_dir / "protos"
                if language == "python":
                    # place protos alongsize the *_pb2.py files
                    proto_output_path = (
                        output_dir / f"google/cloud/{service}_{version}/proto"
                    )
            else:
                proto_output_path = Path(output_dir / proto_output_path)
            os.makedirs(proto_output_path, exist_ok=True)

            for i in proto_files:
                logger.debug(f"Copy: {i} to {proto_output_path / i.name}")
                shutil.copyfile(i, proto_output_path / i.name)
            logger.success(f"Placed proto files into {proto_output_path}.")

        os.chdir(cwd)

        # Sanity check: Does the output location have code in it?
        # If not, complain.
        if not tuple(output_dir.iterdir()):
            raise RuntimeError(
                f"Code generation seemed to succeed, but {output_dir} is empty."
            )

        # Huzzah, it worked.
        logger.success(f"Generated code into {output_dir}.")

        # Record this in the synthtool metadata.
        metadata.add_client_destination(
            source=api_definitions_repo_name,
            api_name=service,
            api_version=version,
            language=language,
            generator="bazel",
        )

        _tracked_paths.add(output_dir)
        return output_dir
Ejemplo n.º 29
0
    def py_samples(self, **kwargs) -> List[Path]:
        """
        Handles generation of README.md templates for Python samples
        - Determines whether generation is being done in a client library or in a samples
        folder automatically
        - Otherwise accepts manually set sample_project_dir through kwargs metadata
        - Delegates generation of additional sample documents alternate/overridden folders
        through py_samples_override()
        """
        # kwargs["metadata"] is required to load values from .repo-metadata.json
        if "metadata" not in kwargs:
            kwargs["metadata"] = {}

        # load common repo meta information (metadata that's not language specific).
        self._load_generic_metadata(kwargs["metadata"])

        # temporary exclusion prior to old templates being migrated out
        self.excludes.extend([
            "README.rst",
            "auth_api_key.tmpl.rst",
            "auth.tmpl.rst",
            "install_deps.tmpl.rst",
            "install_portaudio.tmpl.rst",
            "noxfile.py.j2",
        ])

        # ensure samples will generate
        kwargs["metadata"]["samples"] = True

        # determine if in client lib and set custom root sample dir if specified, else None
        in_client_library = Path("samples").exists()
        sample_project_dir = kwargs["metadata"]["repo"].get(
            "sample_project_dir")

        if sample_project_dir is None:  # Not found in metadata
            if in_client_library:
                sample_project_dir = "samples"
            else:
                sample_project_dir = "."
        elif not Path(sample_project_dir).exists():
            raise Exception(f"'{sample_project_dir}' does not exist")

        override_paths_to_samples: Dict[str, List[str]] = {
        }  # Dict of format { override_path : sample(s) }
        samples_dict = deepcopy(kwargs["metadata"]["repo"].get("samples"))
        default_samples_dict = [
        ]  # Dict which will generate in sample_project_dir

        # Iterate through samples to store override_paths_to_samples for all existing
        # override paths
        for sample_idx, sample in enumerate(samples_dict):
            override_path = samples_dict[sample_idx].get("override_path")

            if override_path is not None:
                # add absolute path to metadata so `python foo.py --help` succeeds
                if sample.get("file") is not None:
                    path = os.path.join(sample_project_dir, override_path,
                                        sample.get("file"))
                    sample["abs_path"] = Path(path).resolve()

                cur_override_sample = override_paths_to_samples.get(
                    override_path)
                # Base case: No samples are yet planned to gen in this override dir
                if cur_override_sample is None:
                    override_paths_to_samples[override_path] = [sample]
                # Else: Sample docs will be generated in README merged with other
                # sample doc(s) already planned to generate in this dir
                else:
                    cur_override_sample.append(sample)
                    override_paths_to_samples[
                        override_path] = cur_override_sample
            # If override path none, will be generated in the default
            # folder: sample_project_dir
            else:
                if sample.get("file") is not None:
                    path = os.path.join(sample_project_dir, sample.get("file"))
                    sample["abs_path"] = Path(path).resolve()
                default_samples_dict.append(sample)

        # List of paths to tempdirs which will be copied into sample folders
        result = []

        # deep copy is req. here to avoid kwargs being affected
        overridden_samples_kwargs = deepcopy(kwargs)
        for override_path in override_paths_to_samples:
            # Generate override sample docs
            result.append(
                self.py_samples_override(
                    root=sample_project_dir,
                    override_path=override_path,
                    override_samples=override_paths_to_samples[override_path],
                    **overridden_samples_kwargs,
                ))
        kwargs["metadata"]["repo"]["samples"] = default_samples_dict

        logger.debug(
            f"Generating templates for samples directory '{sample_project_dir}'"
        )
        kwargs["subdir"] = sample_project_dir
        # Generate default sample docs
        result.append(self._generic_library("python_samples", **kwargs))

        for path in result:
            # .add() records the root of the paths and needs to be applied to each
            _tracked_paths.add(path)

        return result
Ejemplo n.º 30
0
def install(hide_output=False):
    """
    Installs all dependencies for the current Node.js library.
    """
    logger.debug("Installing dependencies...")
    shell.run(["npm", "install"], hide_output=hide_output)