Example #1
0
def replace(
    sources: ListOfPathsOrStrs, before: str, after: str, flags: int = re.MULTILINE
) -> int:
    """Replaces occurrences of before with after in all the given sources.

    Returns:
      The number of times the text was found and replaced across all files.
    """
    expr = re.compile(before, flags=flags or 0)
    paths = _filter_files(_expand_paths(sources, "."))

    if not paths:
        logger.warning(f"No files were found in sources {sources} for replace()")

    count_replaced = 0
    for path in paths:
        replaced = _replace_in_file(path, expr, after)
        count_replaced += replaced
        if replaced:
            logger.info(f"Replaced {before!r} in {path}.")

    if not count_replaced:
        logger.warning(
            f"No replacements made in {sources} for pattern {before}, maybe "
            "replacement is no longer needed?"
        )
    return count_replaced
Example #2
0
def _add_git_source_from_directory(name: str, dir_path: str) -> int:
    """Adds the git repo containing the directory as a git source.

    Returns:
        The number of git sources added to metadata.
    """
    completed_process = subprocess.run(["git", "-C", dir_path, "status"],
                                       universal_newlines=True)
    if completed_process.returncode:
        logger.warning("%s is not directory in a git repo.", dir_path)
        return 0
    completed_process = subprocess.run(
        ["git", "-C", dir_path, "remote", "get-url", "origin"],
        stdout=subprocess.PIPE,
        universal_newlines=True,
    )
    url = completed_process.stdout.strip()
    completed_process = subprocess.run(
        [
            "git", "-C", dir_path, "log", "--no-decorate", "-1",
            "--pretty=format:%H"
        ],
        stdout=subprocess.PIPE,
        universal_newlines=True,
    )
    latest_sha = completed_process.stdout.strip()
    add_git_source(name=name, remote=url, sha=latest_sha)
    return 1
Example #3
0
def _read_sample_metadata_comment(sample_file: str) -> Dict:
    """Additional meta-information can be provided through embedded comments:

    // sample-metadata:
    //   title: ACL (Access Control)
    //   description: Demonstrates setting access control rules.
    //   usage: node iam.js --help
    """
    sample_metadata = {}  # type: Dict[str, str]
    with open(sample_file) as f:
        contents = f.read()
        match = re.search(r"(?P<metadata>// *sample-metadata:([^\n]+|\n//)+)",
                          contents, re.DOTALL)
        if match:
            # the metadata yaml is stored in a comments, remove the
            # prefix so that we can parse the yaml contained.
            sample_metadata_string = re.sub(r"((#|//) ?)", "",
                                            match.group("metadata"))
            try:
                sample_metadata = yaml.load(
                    sample_metadata_string,
                    Loader=yaml.SafeLoader)["sample-metadata"]
            except yaml.scanner.ScannerError:
                # warn and continue on bad metadata
                logger.warning(f"bad metadata detected in {sample_file}")
    return sample_metadata
Example #4
0
def move(
    sources: ListOfPathsOrStrs,
    destination: PathOrStr = None,
    excludes: ListOfPathsOrStrs = None,
    merge: Callable[[str, str, Path], str] = None,
    required: bool = False,
) -> bool:
    """
    copy file(s) at source to current directory, preserving file mode.

    Args:
        sources (ListOfPathsOrStrs): Glob pattern(s) to copy
        destination (PathOrStr): Destination folder for copied files
        excludes (ListOfPathsOrStrs): Glob pattern(s) of files to skip
        merge (Callable[[str, str, Path], str]): Callback function for merging files
            if there is an existing file.
        required (bool): If required and no source files are copied, throws a MissingSourceError

    Returns:
        True if any files were copied, False otherwise.
    """
    copied = False

    for source in _expand_paths(sources):
        if destination is None:
            canonical_destination = _tracked_paths.relativize(source)
        else:
            canonical_destination = Path(destination)

        if excludes:
            excludes = [
                _tracked_paths.relativize(e)
                for e in _expand_paths(excludes, source)
            ]
        else:
            excludes = []
        if source.is_dir():
            copied = copied or _copy_dir_to_existing_dir(
                source, canonical_destination, excludes=excludes, merge=merge)
        elif source not in excludes:
            # copy individual file
            if merge is not None and canonical_destination.is_file():
                _merge_file(source, canonical_destination, merge)
            else:
                shutil.copy2(source, canonical_destination)
            copied = True

    if not copied:
        if required:
            raise MissingSourceError(
                f"No files in sources {sources} were copied. Does the source "
                f"contain files?")
        else:
            logger.warning(
                f"No files in sources {sources} were copied. Does the source "
                f"contain files?")

    return copied
Example #5
0
    def py_library(self, **kwargs) -> Path:
        # kwargs["metadata"] is required to load values from .repo-metadata.json
        if "metadata" not in kwargs:
            kwargs["metadata"] = {}
        # rename variable to accomodate existing synth.py files
        if "system_test_dependencies" in kwargs:
            kwargs["system_test_local_dependencies"] = kwargs[
                "system_test_dependencies"
            ]
            logger.warning(
                "Template argument 'system_test_dependencies' is deprecated."
                "Use 'system_test_local_dependencies' or 'system_test_external_dependencies'"
                "instead."
            )

        return self._generic_library("python_library", **kwargs)
Example #6
0
    def py_library(self, **kwargs) -> Path:
        # kwargs["metadata"] is required to load values from .repo-metadata.json
        if "metadata" not in kwargs:
            kwargs["metadata"] = {}
        # rename variable to accomodate existing synth.py files
        if "system_test_dependencies" in kwargs:
            kwargs["system_test_local_dependencies"] = kwargs[
                "system_test_dependencies"]
            logger.warning(
                "Template argument 'system_test_dependencies' is deprecated."
                "Use 'system_test_local_dependencies' or 'system_test_external_dependencies'"
                "instead.")

        # Set default Python versions for noxfile.py
        if "default_python_version" not in kwargs:
            kwargs["default_python_version"] = "3.8"
        if "unit_test_python_versions" not in kwargs:
            kwargs["unit_test_python_versions"] = ["3.6", "3.7", "3.8"]
            if "microgenerator" not in kwargs:
                kwargs["unit_test_python_versions"] = [
                    "2.7", "3.5"
                ] + kwargs["unit_test_python_versions"]

        if "system_test_python_versions" not in kwargs:
            kwargs["system_test_python_versions"] = ["3.8"]
            if "microgenerator" not in kwargs:
                kwargs["system_test_python_versions"] = [
                    "2.7"
                ] + kwargs["system_test_python_versions"]

        # If cov_level is not given, set it to None.
        if "cov_level" not in kwargs:
            kwargs["cov_level"] = None

        # Don't add samples templates if there are no samples
        if "samples" not in kwargs:
            self.excludes += [
                "samples/AUTHORING_GUIDE.md", "samples/CONTRIBUTING.md"
            ]

        return self._generic_library("python_library", **kwargs)
Example #7
0
def _get_new_files(newer_than: float) -> List[str]:
    """Searchs current directory for new files and returns them in a list.

    Parameters:
        newer_than: any file modified after this timestamp (from time.time())
            will be added to the metadata
    """
    new_files = []
    for (root, dirs, files) in os.walk(os.getcwd()):
        for filename in files:
            filepath = os.path.join(root, filename)
            try:
                mtime = os.path.getmtime(filepath)
            except FileNotFoundError:
                logger.warning(
                    f"FileNotFoundError while getting modified time for {filepath}."
                )
                continue
            if mtime >= newer_than:
                new_files.append(os.path.relpath(filepath))
    return new_files
Example #8
0
    def py_library(self, **kwargs) -> Path:
        # kwargs["metadata"] is required to load values from .repo-metadata.json
        if "metadata" not in kwargs:
            kwargs["metadata"] = {}
        # rename variable to accomodate existing synth.py files
        if "system_test_dependencies" in kwargs:
            kwargs["system_test_local_dependencies"] = kwargs[
                "system_test_dependencies"]
            logger.warning(
                "Template argument 'system_test_dependencies' is deprecated."
                "Use 'system_test_local_dependencies' or 'system_test_external_dependencies'"
                "instead.")

        # Set default Python versions for noxfile.py
        if "default_python_version" not in kwargs:
            kwargs["default_python_version"] = "3.8"
        if "unit_test_python_versions" not in kwargs:
            kwargs["unit_test_python_versions"] = ["3.6", "3.7", "3.8"]
            if "microgenerator" not in kwargs:
                kwargs["unit_test_python_versions"] = [
                    "2.7", "3.5"
                ] + kwargs["unit_test_python_versions"]

        if "system_test_python_versions" not in kwargs:
            kwargs["system_test_python_versions"] = ["3.8"]
            if "microgenerator" not in kwargs:
                kwargs["system_test_python_versions"] = [
                    "2.7"
                ] + kwargs["system_test_python_versions"]

        # If cov_level is not given, set it to None.
        if "cov_level" not in kwargs:
            kwargs["cov_level"] = None

        # Don't add samples templates if there are no samples
        if "samples" not in kwargs:
            self.excludes += [
                "samples/AUTHORING_GUIDE.md", "samples/CONTRIBUTING.md"
            ]

        ret = self._generic_library("python_library", **kwargs)

        # If split_system_tests is set to True, we disable the system
        # test in the main presubmit build and create individual build
        # configs for each python versions.
        if kwargs.get("split_system_tests", False):
            template_root = self._template_root / "py_library_split_systests"
            # copy the main presubmit config
            shutil.copy2(
                template_root / ".kokoro/presubmit/presubmit.cfg",
                ret / ".kokoro/presubmit/presubmit.cfg",
            )
            env = jinja2.Environment(
                loader=jinja2.FileSystemLoader(str(template_root)))
            tmpl = env.get_template(".kokoro/presubmit/system.cfg")
            for v in kwargs["system_test_python_versions"]:
                nox_session = f"system-{v}"
                dest = ret / f".kokoro/presubmit/system-{v}.cfg"
                content = tmpl.render(nox_session=nox_session)
                with open(dest, "w") as f:
                    f.write(content)
        return ret
Example #9
0
    def py_library(self, **kwargs) -> Path:
        # kwargs["metadata"] is required to load values from .repo-metadata.json
        if "metadata" not in kwargs:
            kwargs["metadata"] = {}

        # load common repo meta information (metadata that's not language specific).
        self._load_generic_metadata(kwargs["metadata"])

        # initialize default_version if it doesn't exist in kwargs["metadata"]['repo']
        if "default_version" not in kwargs["metadata"]["repo"]:
            kwargs["metadata"]["repo"]["default_version"] = ""

        # rename variable to accommodate existing owlbot.py files
        if "system_test_dependencies" in kwargs:
            kwargs["system_test_local_dependencies"] = kwargs[
                "system_test_dependencies"]
            logger.warning(
                "Template argument 'system_test_dependencies' is deprecated."
                "Use 'system_test_local_dependencies' or 'system_test_external_dependencies'"
                "instead.")

        # Set default Python versions for noxfile.py
        if "default_python_version" not in kwargs:
            kwargs["default_python_version"] = "3.8"
        if "unit_test_python_versions" not in kwargs:
            kwargs["unit_test_python_versions"] = [
                "3.6", "3.7", "3.8", "3.9", "3.10"
            ]

        if "system_test_python_versions" not in kwargs:
            kwargs["system_test_python_versions"] = ["3.8"]

        # If cov_level is not given, set it to None.
        if "cov_level" not in kwargs:
            kwargs["cov_level"] = None

        # Don't add samples templates if there are no samples
        if "samples" not in kwargs:
            self.excludes += [
                "samples/AUTHORING_GUIDE.md", "samples/CONTRIBUTING.md"
            ]

        # Don't add `docs/index.rst` if `versions` is not provided or `default_version` is empty
        if ("versions" not in kwargs
                or not kwargs["metadata"]["repo"]["default_version"]):
            self.excludes += ["docs/index.rst"]

        # Add kwargs to signal that UPGRADING.md should be included in docs/index.rst if it exists
        if Path("docs/UPGRADING.md").exists():
            kwargs["include_uprading_doc"] = True

        # If the directory `google/cloud` exists, add kwargs to signal that the client library is for a Cloud API
        if Path("google/cloud").exists():
            kwargs["is_google_cloud_api"] = True

        # Assume the python-docs-samples Dockerfile is used for samples by default
        if "custom_samples_dockerfile" not in kwargs:
            kwargs["custom_samples_dockerfile"] = False

        ret = self._generic_library("python_library", **kwargs)

        # If split_system_tests is set to True, we disable the system
        # test in the main presubmit build and create individual build
        # configs for each python versions.
        if kwargs.get("split_system_tests", False):
            template_root = self._template_root / "py_library_split_systests"
            # copy the main presubmit config
            shutil.copy2(
                template_root / ".kokoro/presubmit/presubmit.cfg",
                ret / ".kokoro/presubmit/presubmit.cfg",
            )
            env = jinja2.Environment(
                loader=jinja2.FileSystemLoader(str(template_root)))
            tmpl = env.get_template(".kokoro/presubmit/system.cfg")
            for v in kwargs["system_test_python_versions"]:
                nox_session = f"system-{v}"
                dest = ret / f".kokoro/presubmit/system-{v}.cfg"
                content = tmpl.render(nox_session=nox_session)
                with open(dest, "w") as f:
                    f.write(content)
        return ret
Example #10
0
    def _include_samples(
        self,
        language: str,
        version: str,
        genfiles: Path,
        googleapis_service_dir: Path,
        samples_root_dir: Path = None,
        samples_resources_dir: Path = None,
    ):
        """Include code samples and supporting resources in generated output.

        Resulting directory structure in generated output:
            samples/
            ├── resources
            │   ├── example_text_file.txt
            │   └── example_data.csv
            └── v1/
                ├── sample_one.py
                ├── sample_two.py
                └── test/
                    ├── samples.manifest.yaml
                    ├── sample_one.test.yaml
                    └── sample_two.test.yaml

        Samples are included in the genfiles output of the generator.

        Sample tests are defined in googleapis:
            {service}/{version}/samples/test/*.test.yaml

        Sample resources are declared in {service}/sample_resources.yaml
        which includes a list of files with public gs:// URIs for download.

        Sample resources are files needed to run code samples or system tests.
        Synth keeps resources in sync by always pulling down the latest version.
        It is recommended to store resources in the `cloud-samples-data` bucket.

        Sample manifest is a generated file which defines invocation commands
        for each code sample (used by sample-tester to invoke samples).
        """

        if samples_root_dir is None:
            samples_root_dir = genfiles / "samples"

        if samples_resources_dir is None:
            samples_resources_dir = samples_root_dir / "resources"

        samples_version_dir = samples_root_dir / version

        # Some languages capitalize their `V` prefix for version numbers
        if not samples_version_dir.is_dir():
            samples_version_dir = samples_root_dir / version.capitalize()

        # Do not proceed if genfiles does not include samples/{version} dir.
        if not samples_version_dir.is_dir():
            return None

        samples_test_dir = samples_version_dir / "test"
        samples_manifest_yaml = samples_test_dir / "samples.manifest.yaml"

        googleapis_samples_dir = googleapis_service_dir / version / "samples"
        googleapis_resources_yaml = googleapis_service_dir / "sample_resources.yaml"

        # Copy sample tests from googleapis {service}/{version}/samples/*.test.yaml
        # into generated output as samples/{version}/test/*.test.yaml
        test_files = googleapis_samples_dir.glob("**/*.test.yaml")
        os.makedirs(samples_test_dir, exist_ok=True)
        for i in test_files:
            logger.debug(f"Copy: {i} to {samples_test_dir / i.name}")
            shutil.copyfile(i, samples_test_dir / i.name)

        # Download sample resources from sample_resources.yaml storage URIs.
        #
        #  sample_resources:
        #  - uri: gs://bucket/the/file/path.csv
        #    description: Description of this resource
        #
        # Code follows happy path. An error is desirable if YAML is invalid.
        if googleapis_resources_yaml.is_file():
            with open(googleapis_resources_yaml, "r") as f:
                resources_data = yaml.load(f, Loader=yaml.SafeLoader)
            resource_list = resources_data.get("sample_resources")
            for resource in resource_list:
                uri = resource.get("uri")
                if uri.startswith("gs://"):
                    uri = uri.replace("gs://", "https://storage.googleapis.com/")
                response = requests.get(uri, allow_redirects=True)
                download_path = samples_resources_dir / os.path.basename(uri)
                os.makedirs(samples_resources_dir, exist_ok=True)
                logger.debug(f"Download {uri} to {download_path}")
                with open(download_path, "wb") as output:  # type: ignore
                    output.write(response.content)

        # Generate manifest file at samples/{version}/test/samples.manifest.yaml
        # Includes a reference to every sample (via its "region tag" identifier)
        # along with structured instructions on how to invoke that code sample.
        relative_manifest_path = str(
            samples_manifest_yaml.relative_to(samples_root_dir)
        )

        LANGUAGE_EXECUTABLES = {
            "nodejs": "node",
            "php": "php",
            "python": "python3",
            "ruby": "bundle exec ruby",
        }
        if language not in LANGUAGE_EXECUTABLES:
            logger.info("skipping manifest gen")
            return None

        manifest_arguments = [
            "gen-manifest",
            f"--env={language}",
            f"--bin={LANGUAGE_EXECUTABLES[language]}",
            f"--output={relative_manifest_path}",
            "--chdir={@manifest_dir}/../..",
        ]

        for code_sample in samples_version_dir.glob("*"):
            sample_path = str(code_sample.relative_to(samples_root_dir))
            if os.path.isfile(code_sample):
                manifest_arguments.append(sample_path)
        try:
            logger.debug(f"Writing samples manifest {manifest_arguments}")
            shell.run(manifest_arguments, cwd=samples_root_dir)
        except (subprocess.CalledProcessError, FileNotFoundError):
            logger.warning("gen-manifest failed (sample-tester may not be installed)")