def clone( url: str, dest: pathlib.Path = None, committish: str = "master", force: bool = False, depth: int = None, ) -> pathlib.Path: if dest is None: dest = cache.get_cache_dir() dest = dest / pathlib.Path(url).stem if force and dest.exists(): shutil.rmtree(dest) if not dest.exists(): cmd = ["git", "clone", url, dest] if depth is not None: cmd.extend(["--depth", str(depth)]) shell.run(cmd) else: shell.run(["git", "pull"], cwd=str(dest)) shell.run(["git", "reset", "--hard", committish], cwd=str(dest)) # track all git repositories _tracked_paths.add(dest) return dest
def get_staging_dirs(default_version: Optional[str] = None) -> List[Path]: """Returns the list of directories, one per version, copied from https://github.com/googleapis/googleapis-gen. Will return in lexical sorting order with the exception of the default_version which will be last (if specified). Args: default_version: the default version of the API. The directory for this version will be the last item in the returned list if specified. Returns: the empty list if no file were copied. """ staging = Path("owl-bot-staging") if staging.is_dir(): # Collect the subdirectories of the staging directory. versions = [v.name for v in staging.iterdir() if v.is_dir()] # Reorder the versions so the default version always comes last. versions = [v for v in versions if v != default_version] versions.sort() if default_version is not None: versions += [default_version] dirs = [staging / v for v in versions] for dir in dirs: _tracked_paths.add(dir) return dirs else: return []
def _generic_library(self, directory: str, **kwargs) -> Path: # load common repo meta information (metadata that's not language specific). if "metadata" in kwargs: self._load_generic_metadata(kwargs["metadata"]) # if no samples were found, don't attempt to render a # samples/README.md. if "samples" not in kwargs[ "metadata"] or not kwargs["metadata"]["samples"]: self.excludes.append("samples/README.md") t = templates.TemplateGroup(self._template_root / directory, self.excludes) if "repository" in kwargs["metadata"] and "repo" in kwargs["metadata"]: kwargs["metadata"]["repo"][ "default_branch"] = _get_default_branch_name( kwargs["metadata"]["repository"]) # TODO: migrate to python.py once old sample gen is deprecated if directory == "python_samples": t.env.globals["get_help"] = lambda filename: shell.run( ["python", filename, "--help"]).stdout result = t.render(**kwargs) _tracked_paths.add(result) return result
def test__dont_overwrite(): with tempfile.TemporaryDirectory() as dira, tempfile.TemporaryDirectory( ) as dirb: Path(dira).joinpath("README.md").write_text("README") Path(dira).joinpath("code.py").write_text("# code.py") Path(dira).joinpath("BUILD").write_text("bazel") Path(dirb).joinpath("README.md").write_text("chickens") Path(dirb).joinpath("code.py").write_text("# chickens") cwd = os.getcwd() os.chdir(dirb) try: _tracked_paths.add(dira) transforms.move([Path(dira).joinpath("*")], merge=transforms.dont_overwrite(["*.md"])) finally: os.chdir(cwd) # Should not have been overwritten. assert "chickens" == Path(dirb).joinpath("README.md").read_text() # Should have been overwritten. assert "# code.py" == Path(dirb).joinpath("code.py").read_text() # Should have been written. assert "bazel" == Path(dirb).joinpath("BUILD").read_text()
def owlbot_main(template_path: Optional[Path] = None): """Copies files from staging and template directories into current working dir. When there is no owlbot.py file, run this function instead. Also, when an owlbot.py file is necessary, the first statement of owlbot.py should probably call this function. Depends on owl-bot copying into a staging directory, so your .Owlbot.yaml should look a lot like this: docker: image: gcr.io/repo-automation-bots/owlbot-nodejs:latest deep-remove-regex: - /owl-bot-staging deep-copy-regex: - source: /google/cloud/video/transcoder/(.*)/.*-nodejs/(.*) dest: /owl-bot-staging/$1/$2 Also, this function requires a default_version in your .repo-metadata.json. Ex: "default_version": "v1", """ logging.basicConfig(level=logging.DEBUG) # Load the default version defined in .repo-metadata.json. default_version = json.load(open(".repo-metadata.json", "rt"))["default_version"] staging = Path("owl-bot-staging") s_copy = transforms.move if staging.is_dir(): # Collect the subdirectories of the staging directory. versions = [v.name for v in staging.iterdir() if v.is_dir()] # Reorder the versions so the default version always comes last. versions = [v for v in versions if v != default_version ] + [default_version] # Copy each version directory into the root. for version in versions: library = staging / version _tracked_paths.add(library) s_copy([library], excludes=["README.md", "package.json", "src/index.ts"]) # The staging directory should never be merged into the main branch. shutil.rmtree(staging) else: # Collect the subdirectories of the src directory. src = Path("src") versions = [v.name for v in src.iterdir() if v.is_dir()] # Reorder the versions so the default version always comes last. versions = [v for v in versions if v != default_version ] + [default_version] common_templates = gcp.CommonTemplates(template_path) templates = common_templates.node_library(source_location="build/src", versions=versions, default_version=default_version) s_copy([templates], excludes=[]) postprocess_gapic_library_hermetic()
def _generic_library(self, directory: str, **kwargs) -> Path: t = templates.TemplateGroup(_TEMPLATES_DIR / directory) result = t.render(**kwargs) _tracked_paths.add(result) metadata.add_template_source(name=directory, origin="synthtool.gcp", version=__main__.VERSION) return result
def test_deep_paths(): parent = FIXTURES / "parent" deep_path = FIXTURES / "parent" / "child" / "grandchild" deep_item = deep_path / "thing.txt" _tracked_paths.add(parent) _tracked_paths.add(deep_path) assert _tracked_paths.relativize(deep_item) == Path("thing.txt")
def py_samples(*, root: PathOrStr = None, skip_readmes: bool = False) -> None: """ Find all samples projects and render templates. Samples projects always have a 'requirements.txt' file and may also have README.rst.in Args: root (Union[Path, str]): The samples directory root. skip_readmes (bool): If true, do not generate readmes. """ in_client_library = Path("samples").exists() and Path("setup.py").exists() if root is None: if in_client_library: root = "samples" else: root = "." excludes = [] # todo(kolea2): temporary exclusion until samples are ready to be migrated to new format excludes.append("README.md") # TODO(busunkim): Readmegen is disabled as it requires installing the sample # requirements in Synthtool. Sample Readmegen should be refactored to stop # relying on the output of `python sample.py --help` skip_readmes = True if skip_readmes: excludes.append("README.rst") t = templates.TemplateGroup(SAMPLES_TEMPLATE_PATH, excludes=excludes) t.env.globals["get_help"] = _get_help # for sample readmegen for req in Path(root).glob("**/requirements.txt"): sample_project_dir = req.parent log.info( f"Generating templates for samples project '{sample_project_dir}'") excludes = ["**/*tmpl*"] # .tmpl. files are partial templates sample_readme_metadata: Dict[str, Any] = {} if not skip_readmes: sample_readme_metadata = _get_sample_readme_metadata( sample_project_dir) # Don't generate readme if there's no metadata if sample_readme_metadata == {}: excludes.append("**/README.rst") if Path(sample_project_dir / "noxfile_config.py").exists(): # Don't overwrite existing noxfile configs excludes.append("**/noxfile_config.py") result = t.render(subdir=sample_project_dir, **sample_readme_metadata) _tracked_paths.add(result) s.copy([result], excludes=excludes)
def test__move_to_dest_subdir(expand_path_fixtures): tmp_path = Path(str(expand_path_fixtures)) _tracked_paths.add(expand_path_fixtures) dest = Path(str(expand_path_fixtures / normpath("dest/dira"))) # Move to a different dir to make sure that move doesn't depend on the cwd os.chdir(tempfile.gettempdir()) transforms.move(tmp_path / "dira", dest, excludes=["f.py"]) os.chdir(str(tmp_path)) files = sorted( [str(x) for x in transforms._expand_paths("**/*", root="dest")]) # Assert destination does not contain dira/f.py (excluded) assert files == [normpath("dest/dira"), normpath("dest/dira/e.txt")]
def _generic_library(self, directory: str, **kwargs) -> Path: # load common repo meta information (metadata that's not language specific). if "metadata" in kwargs: self._load_generic_metadata(kwargs["metadata"]) # if no samples were found, don't attempt to render a # samples/README.md. if "samples" not in kwargs[ "metadata"] or not kwargs["metadata"]["samples"]: self.excludes.append("samples/README.md") t = templates.TemplateGroup(self._template_root / directory, self.excludes) result = t.render(**kwargs) _tracked_paths.add(result) return result
def _generic_library(self, directory: str, **kwargs) -> Path: # load common repo meta information (metadata that's not language specific). if "metadata" in kwargs: self._load_generic_metadata(kwargs["metadata"]) # if no samples were found, don't attempt to render a # samples/README.md. if not kwargs["metadata"]["samples"]: self.excludes.append("samples/README.md") t = templates.TemplateGroup(_TEMPLATES_DIR / directory, self.excludes) result = t.render(**kwargs) _tracked_paths.add(result) metadata.add_template_source(name=directory, origin="synthtool.gcp", version=__main__.VERSION) return result
def py_samples(*, root: PathOrStr = None, skip_readmes: bool = False) -> None: """ Find all samples projects and render templates. Samples projects always have a 'requirements.txt' file and may also have README.rst.in Args: root (Union[Path, str]): The samples directory root. skip_readmes (bool): If true, do not generate readmes. """ in_client_library = Path("samples").exists() and Path("setup.py").exists() if root is None: if in_client_library: root = "samples" else: root = "." excludes = [] if skip_readmes: excludes.append("README.rst") t = templates.TemplateGroup(SAMPLES_TEMPLATE_PATH, excludes=excludes) t.env.globals["get_help"] = _get_help # for sample readmegen for req in Path(root).glob("**/requirements.txt"): sample_project_dir = req.parent log.info(f"Generating templates for samples project '{sample_project_dir}'") excludes = ["**/*tmpl*"] # .tmpl. files are partial templates sample_readme_metadata: Dict[str, Any] = {} if not skip_readmes: sample_readme_metadata = _get_sample_readme_metadata(sample_project_dir) # Don't generate readme if there's no metadata if sample_readme_metadata == {}: excludes.append("**/README.rst") if Path(sample_project_dir / "noxfile_config.py").exists(): # Don't overwrite existing noxfile configs excludes.append("**/noxfile_config.py") result = t.render(subdir=sample_project_dir, **sample_readme_metadata) _tracked_paths.add(result) s.copy([result], excludes=excludes)
def py_samples(self, **kwargs) -> Path: """ Determines whether generation is being done in a client library or in a samples folder so it can either generate in the current directory or the client lib's 'samples' folder. A custom path for where to generate may also be specified. Renders README.md according to .repo-metadata.json """ # kwargs["metadata"] is required to load values from .repo-metadata.json if "metadata" not in kwargs: kwargs["metadata"] = {} # load common repo meta information (metadata that's not language specific). self._load_generic_metadata(kwargs["metadata"]) # temporary exclusion prior to old templates being migrated out self.excludes.extend([ "README.rst", "auth_api_key.tmpl.rst", "auth.tmpl.rst", "install_deps.tmpl.rst", "install_portaudio.tmpl.rst", "noxfile.py.j2", ]) in_client_library = Path("samples").exists() sample_project_dir = kwargs["metadata"]["repo"].get( "sample_project_dir") if sample_project_dir is None: # Not found in metadata if in_client_library: sample_project_dir = "samples" else: sample_project_dir = "." elif not Path(sample_project_dir).exists(): raise Exception(f"'{sample_project_dir}' does not exist") logger.debug( f"Generating templates for samples directory '{sample_project_dir}'" ) py_samples_templates = Path(self._template_root) / "python_samples" t = templates.TemplateGroup(py_samples_templates, self.excludes) result = t.render(subdir=sample_project_dir, **kwargs) _tracked_paths.add(result) return result
def test_excluded_file_not_removed(source_tree, preserve_track_obsolete_file_flag): metadata.set_track_obsolete_files(True) _tracked_paths.add(source_tree.tmpdir / "build") with metadata.MetadataTrackerAndWriter(source_tree.tmpdir / "synth.metadata"): source_tree.write("code/b") source_tree.write("code/c") metadata.reset() # Create a second source tree and copy it into the first. with metadata.MetadataTrackerAndWriter(source_tree.tmpdir / "synth.metadata"): # exclude code/c from being copied should mean it doesn't get deleted. transforms.move(source_tree.tmpdir / "build", excludes=["code/c"]) # Confirm remove_obsolete_files deletes b but not c. assert not os.path.exists("code/b") assert os.path.exists("code/c")
def test__move_to_dest(expand_path_fixtures): tmp_path = Path(str(expand_path_fixtures)) _tracked_paths.add(expand_path_fixtures) dest = Path(str(expand_path_fixtures / "dest")) transforms.move(tmp_path, dest, excludes=["dira/f.py"]) files = sorted([str(x) for x in transforms._expand_paths("**/*", root="dest")]) # Assert destination does not contain dira/e.py (excluded) assert files == [ "dest/a.txt", "dest/b.py", "dest/c.md", "dest/dira", "dest/dira/e.txt", "dest/dirb", "dest/dirb/suba", "dest/dirb/suba/g.py", ]
def clone( url: str, dest: pathlib.Path = None, committish: str = "master", force: bool = False, depth: int = None, ) -> pathlib.Path: if dest is None: dest = cache.get_cache_dir() dest = dest / pathlib.Path(url).stem if force and dest.exists(): shutil.rmtree(dest) if not dest.exists(): cmd = ["git", "clone", url, dest] if depth is not None: cmd.extend(["--depth", str(depth)]) shell.run(cmd) else: shell.run(["git", "pull"], cwd=str(dest)) shell.run(["git", "reset", "--hard", committish], cwd=str(dest)) # track all git repositories _tracked_paths.add(dest) # add repo to metadata sha, message = get_latest_commit(dest) commit_metadata = extract_commit_message_metadata(message) metadata.add_git_source( name=dest.name, remote=url, sha=sha, internal_ref=commit_metadata.get("PiperOrigin-RevId"), ) return dest
def test_copy_with_merge_file_permissions(expand_path_fixtures): destination_file = expand_path_fixtures / "executable_file.sh" template_directory = Path(__file__).parent / "fixtures" _tracked_paths.add(template_directory) template = template_directory / "executable_file.sh" # ensure that the destination existing file already has incorrect correct # file permissions assert os.path.exists(destination_file) assert os.stat(destination_file).st_mode != os.stat(template).st_mode # Move to a different dir to make sure that move doesn't depend on the cwd with util.chdir(tempfile.gettempdir()): transforms.move( sources=template_directory / "executable_file.sh", destination=expand_path_fixtures / "executable_file.sh", merge=_noop_merge, required=True, ) # ensure that the destination existing file now has the correct file permissions assert os.stat(destination_file).st_mode == os.stat(template).st_mode
logging.basicConfig(level=logging.DEBUG) staging = Path("owl-bot-staging") if staging.is_dir(): logging.info(f"Copying files from staging directory ${staging}.") # Copy bigtable library. # src/index.ts src/v2/index.ts has added AdminClients manually, we don't wanna override it. # src/*.ts is a added layer for the client libraries, they need extra setting in tsconfig.json & tslint.json # Tracking issues: 1. https://github.com/googleapis/nodejs-bigtable/issues/636 # 2. https://github.com/googleapis/nodejs-bigtable/issues/635 for version in ['v2']: library = staging / version _tracked_paths.add(library) s.copy([library], excludes=[ 'package.json', 'README.md', 'src/index.ts', 'src/v2/index.ts', 'tsconfig.json', 'tslint.json' ]) # Copy the admin library. # Not override system-test for admin/v2, just keep the v2 version. for version in ['v2']: library = staging / 'admin' / version _tracked_paths.add(library) s.copy([library], excludes=[ 'package.json', 'README.md', 'src/index.ts', 'src/v2/index.ts', 'tsconfig.json', 'tslint.json',
def _generate_code( self, service: str, version: str, language: str, *, private: bool = False, proto_path: Union[str, Path] = None, output_dir: Union[str, Path] = None, generator_version: str = "latest", generator_args: Mapping[str, str] = None, ): # Determine which googleapis repo to use if not private: googleapis = self._clone_googleapis() else: googleapis = self._clone_googleapis_private() # Sanity check: We should have a googleapis repo; if we do not, # something went wrong, and we should abort. if googleapis is None: raise RuntimeError( f"Unable to generate {service}, the googleapis repository" "is unavailable.") # Pull the code generator for the requested language. # If a code generator version was specified, honor that. log.debug( "Pulling Docker image: gapic-generator-{language}:{generator_version}" ) shell.run( [ "docker", "pull", f"gcr.io/gapic-images/gapic-generator-{language}:{generator_version}", ], hide_output=False, ) # Determine where the protos we are generating actually live. # We can sometimes (but not always) determine this from the service # and version; in other cases, the user must provide it outright. if proto_path: proto_path = Path(proto_path) if proto_path.is_absolute(): proto_path = proto_path.relative_to("/") else: proto_path = Path("google/cloud") / service / version # Sanity check: Do we have protos where we think we should? if not (googleapis / proto_path).exists(): raise FileNotFoundError( f"Unable to find directory for protos: {(googleapis / proto_path)}." ) if not tuple((googleapis / proto_path).glob("*.proto")): raise FileNotFoundError( f"Directory {(googleapis / proto_path)} exists, but no protos found." ) # Ensure the desired output directory exists. # If none was provided, create a temporary directory. if not output_dir: output_dir = tempfile.mkdtemp() output_dir = Path(output_dir).resolve() # The time has come, the walrus said, to talk of actually running # the code generator. log.debug(f"Generating code for: {proto_path}.") sep = os.path.sep shell.run([ "docker", "run", "--mount", f"type=bind,source={googleapis / proto_path}{sep},destination={Path('/in') / proto_path}{sep},readonly", "--mount", f"type=bind,source={output_dir}{sep},destination={Path('/out')}{sep}", "--rm", "--user", str(os.getuid()), f"gcr.io/gapic-images/gapic-generator-{language}", ]) # Sanity check: Does the output location have code in it? # If not, complain. if not tuple(output_dir.iterdir()): raise RuntimeError( f"Code generation seemed to succeed, but {output_dir} is empty." ) # Huzzah, it worked. log.success(f"Generated code into {output_dir}.") # Record this in the synthtool metadata. metadata.add_client_destination( source="googleapis" if not private else "googleapis-private", api_name=service, api_version=version, language=language, generator=f"gapic-generator-{language}", ) _tracked_paths.add(output_dir) return output_dir
def node_library(self, **kwargs) -> Path: kwargs["metadata"] = node.read_metadata() t = templates.TemplateGroup(_TEMPLATES_DIR / "node_library") result = t.render(**kwargs) _tracked_paths.add(result) return result
def php_library(self, **kwargs) -> Path: t = templates.TemplateGroup(_TEMPLATES_DIR / "php_library") result = t.render(**kwargs) _tracked_paths.add(result) return result
def _generate_code( self, service, version, language, config_path=None, artman_output_name=None, private=False, ): # map the language to the artman argument and subdir of genfiles GENERATE_FLAG_LANGUAGE = { "python": ("python_gapic", "python"), "nodejs": ("nodejs_gapic", "js"), "ruby": ("ruby_gapic", "ruby"), "php": ("php_gapic", "php"), "java": ("java_gapic", "java"), } if language not in GENERATE_FLAG_LANGUAGE: raise ValueError("provided language unsupported") gapic_language_arg, gen_language = GENERATE_FLAG_LANGUAGE[language] # Determine which googleapis repo to use if not private: googleapis = self._clone_googleapis() else: googleapis = self._clone_googleapis_private() if googleapis is None: raise RuntimeError( f"Unable to generate {config_path}, the googleapis repository" "is unavailable.") # Run the code generator. # $ artman --config path/to/artman_api.yaml generate python_gapic if config_path is None: config_path = (Path("google/cloud") / service / f"artman_{service}_{version}.yaml") elif Path(config_path).is_absolute(): config_path = Path(config_path).relative_to("/") else: config_path = Path("google/cloud") / service / Path(config_path) if not (googleapis / config_path).exists(): raise FileNotFoundError( f"Unable to find configuration yaml file: {(googleapis / config_path)}." ) log.debug(f"Running generator for {config_path}.") output_root = self._artman.run( f"googleapis/artman:{artman.ARTMAN_VERSION}", googleapis, config_path, gapic_language_arg, ) # Expect the output to be in the artman-genfiles directory. # example: /artman-genfiles/python/speech-v1 if artman_output_name is None: artman_output_name = f"{service}-{version}" genfiles = output_root / gen_language / artman_output_name if not genfiles.exists(): raise FileNotFoundError( f"Unable to find generated output of artman: {genfiles}.") log.success(f"Generated code into {genfiles}.") _tracked_paths.add(genfiles) return genfiles
def py_samples(self, **kwargs) -> List[Path]: """ Handles generation of README.md templates for Python samples - Determines whether generation is being done in a client library or in a samples folder automatically - Otherwise accepts manually set sample_project_dir through kwargs metadata - Delegates generation of additional sample documents alternate/overridden folders through py_samples_override() """ # kwargs["metadata"] is required to load values from .repo-metadata.json if "metadata" not in kwargs: kwargs["metadata"] = {} # load common repo meta information (metadata that's not language specific). self._load_generic_metadata(kwargs["metadata"]) # temporary exclusion prior to old templates being migrated out self.excludes.extend([ "README.rst", "auth_api_key.tmpl.rst", "auth.tmpl.rst", "install_deps.tmpl.rst", "install_portaudio.tmpl.rst", "noxfile.py.j2", ]) # ensure samples will generate kwargs["metadata"]["samples"] = True # determine if in client lib and set custom root sample dir if specified, else None in_client_library = Path("samples").exists() sample_project_dir = kwargs["metadata"]["repo"].get( "sample_project_dir") if sample_project_dir is None: # Not found in metadata if in_client_library: sample_project_dir = "samples" else: sample_project_dir = "." elif not Path(sample_project_dir).exists(): raise Exception(f"'{sample_project_dir}' does not exist") override_paths_to_samples: Dict[str, List[str]] = { } # Dict of format { override_path : sample(s) } samples_dict = deepcopy(kwargs["metadata"]["repo"].get("samples")) default_samples_dict = [ ] # Dict which will generate in sample_project_dir # Iterate through samples to store override_paths_to_samples for all existing # override paths for sample_idx, sample in enumerate(samples_dict): override_path = samples_dict[sample_idx].get("override_path") if override_path is not None: # add absolute path to metadata so `python foo.py --help` succeeds if sample.get("file") is not None: path = os.path.join(sample_project_dir, override_path, sample.get("file")) sample["abs_path"] = Path(path).resolve() cur_override_sample = override_paths_to_samples.get( override_path) # Base case: No samples are yet planned to gen in this override dir if cur_override_sample is None: override_paths_to_samples[override_path] = [sample] # Else: Sample docs will be generated in README merged with other # sample doc(s) already planned to generate in this dir else: cur_override_sample.append(sample) override_paths_to_samples[ override_path] = cur_override_sample # If override path none, will be generated in the default # folder: sample_project_dir else: if sample.get("file") is not None: path = os.path.join(sample_project_dir, sample.get("file")) sample["abs_path"] = Path(path).resolve() default_samples_dict.append(sample) # List of paths to tempdirs which will be copied into sample folders result = [] # deep copy is req. here to avoid kwargs being affected overridden_samples_kwargs = deepcopy(kwargs) for override_path in override_paths_to_samples: # Generate override sample docs result.append( self.py_samples_override( root=sample_project_dir, override_path=override_path, override_samples=override_paths_to_samples[override_path], **overridden_samples_kwargs, )) kwargs["metadata"]["repo"]["samples"] = default_samples_dict logger.debug( f"Generating templates for samples directory '{sample_project_dir}'" ) kwargs["subdir"] = sample_project_dir # Generate default sample docs result.append(self._generic_library("python_samples", **kwargs)) for path in result: # .add() records the root of the paths and needs to be applied to each _tracked_paths.add(path) return result
def render(self, template_name: str, **kwargs) -> Path: template = self._templates.render(template_name, **kwargs) _tracked_paths.add(template) return template
def clone( url: str, dest: pathlib.Path = None, committish: str = None, force: bool = False, ) -> pathlib.Path: """Clones a remote git repo. Will not actually clone the repo if it's already local via two ways: 1. It's in the cache (the default destitination). 2. It was supplied via the preconfig file. Arguments: url {str} -- Url pointing to remote git repo. Keyword Arguments: dest {pathlib.Path} -- Local folder where repo should be cloned. (default: {None}) committish {str} -- The commit hash to check out. (default: {None}) force {bool} -- Wipe out and reclone if it already exists it the cache. (default: {False}) Returns: pathlib.Path -- Local directory where the repo was cloned. """ preclone = get_preclone(url) if preclone: logger.debug(f"Using precloned repo {preclone}") dest = pathlib.Path(preclone) else: if dest is None: dest = cache.get_cache_dir() dest = dest / pathlib.Path(url).stem if force and dest.exists(): shutil.rmtree(dest) if not dest.exists(): cmd = [ "git", "clone", "--recurse-submodules", "--single-branch", url, dest ] shell.run(cmd, check=True) else: shell.run(["git", "checkout", "master"], cwd=str(dest), check=True) shell.run(["git", "pull"], cwd=str(dest), check=True) committish = committish or "master" if committish: shell.run(["git", "reset", "--hard", committish], cwd=str(dest)) # track all git repositories _tracked_paths.add(dest) # add repo to metadata sha, message = get_latest_commit(dest) commit_metadata = extract_commit_message_metadata(message) metadata.add_git_source( name=dest.name, remote=url, sha=sha, internal_ref=commit_metadata.get("PiperOrigin-RevId"), local_path=str(dest), ) return dest
def _generate_code( self, service: str, version: str, language: str, *, private: bool = False, proto_path: Union[str, Path] = None, output_dir: Union[str, Path] = None, bazel_target: str = None, ): # Determine which googleapis repo to use if not private: googleapis = self._clone_googleapis() else: googleapis = self._clone_googleapis_private() # Sanity check: We should have a googleapis repo; if we do not, # something went wrong, and we should abort. if googleapis is None: raise RuntimeError( f"Unable to generate {service}, the googleapis repository" "is unavailable." ) # Determine where the protos we are generating actually live. # We can sometimes (but not always) determine this from the service # and version; in other cases, the user must provide it outright. if proto_path: proto_path = Path(proto_path) if proto_path.is_absolute(): proto_path = proto_path.relative_to("/") else: proto_path = Path("google/cloud") / service / version # Determine bazel target based on per-language patterns # Java: google-cloud-{{assembly_name}}-{{version}}-java # Go: gapi-cloud-{{assembly_name}}-{{version}}-go # Python: {{assembly_name}}-{{version}}-py # PHP: google-cloud-{{assembly_name}}-{{version}}-php # Node.js: {{assembly_name}}-{{version}}-nodejs # Ruby: google-cloud-{{assembly_name}}-{{version}}-ruby # C#: google-cloud-{{assembly_name}}-{{version}}-csharp if bazel_target is None: parts = list(proto_path.parts) while len(parts) > 0 and parts[0] != "google": parts.pop(0) if len(parts) == 0: raise RuntimeError( f"Cannot determine bazel_target from proto_path {proto_path}." "Please set bazel_target explicitly." ) if language == "python": suffix = f"{service}-{version}-py" elif language == "nodejs": suffix = f"{service}-{version}-nodejs" elif language == "go": suffix = f"gapi-{'-'.join(parts[1:])}-go" else: suffix = f"{'-'.join(parts)}-{language}" bazel_target = f"//{os.path.sep.join(parts)}:{suffix}" # Sanity check: Do we have protos where we think we should? if not (googleapis / proto_path).exists(): raise FileNotFoundError( f"Unable to find directory for protos: {(googleapis / proto_path)}." ) if not tuple((googleapis / proto_path).glob("*.proto")): raise FileNotFoundError( f"Directory {(googleapis / proto_path)} exists, but no protos found." ) if not (googleapis / proto_path / "BUILD.bazel"): raise FileNotFoundError( f"File {(googleapis / proto_path / 'BUILD.bazel')} does not exist." ) # Ensure the desired output directory exists. # If none was provided, create a temporary directory. if not output_dir: output_dir = tempfile.mkdtemp() output_dir = Path(output_dir).resolve() # Let's build some stuff now. cwd = os.getcwd() os.chdir(str(googleapis)) bazel_run_args = ["bazel", "build", bazel_target] log.debug(f"Generating code for: {proto_path}.") shell.run(bazel_run_args) # We've got tar file! # its location: bazel-bin/google/cloud/language/v1/language-v1-nodejs.tar.gz # bazel_target: //google/cloud/language/v1:language-v1-nodejs tar_file = ( f"bazel-bin{os.path.sep}{bazel_target[2:].replace(':', os.path.sep)}.tar.gz" ) tar_run_args = [ "tar", "-C", str(output_dir), "--strip-components=1", "-xzf", tar_file, ] shell.run(tar_run_args) os.chdir(cwd) # Sanity check: Does the output location have code in it? # If not, complain. if not tuple(output_dir.iterdir()): raise RuntimeError( f"Code generation seemed to succeed, but {output_dir} is empty." ) # Huzzah, it worked. log.success(f"Generated code into {output_dir}.") # Record this in the synthtool metadata. metadata.add_client_destination( source="googleapis" if not private else "googleapis-private", api_name=service, api_version=version, language=language, generator="bazel", ) _tracked_paths.add(output_dir) return output_dir
def _generate_code( self, service, version, language, config_path=None, artman_output_name=None, private=False, include_protos=False, generator_args=None, ): # map the language to the artman argument and subdir of genfiles GENERATE_FLAG_LANGUAGE = { "python": ("python_gapic", "python"), "nodejs": ("nodejs_gapic", "js"), "ruby": ("ruby_gapic", "ruby"), "php": ("php_gapic", "php"), "java": ("java_gapic", "java"), } if language not in GENERATE_FLAG_LANGUAGE: raise ValueError("provided language unsupported") gapic_language_arg, gen_language = GENERATE_FLAG_LANGUAGE[language] # Determine which googleapis repo to use if not private: googleapis = self._clone_googleapis() else: googleapis = self._clone_googleapis_private() if googleapis is None: raise RuntimeError( f"Unable to generate {config_path}, the googleapis repository" "is unavailable.") generator_dir = LOCAL_GENERATOR if generator_dir is not None: log.debug(f"Using local generator at {generator_dir}") # Run the code generator. # $ artman --config path/to/artman_api.yaml generate python_gapic if config_path is None: config_path = (Path("google/cloud") / service / f"artman_{service}_{version}.yaml") elif Path(config_path).is_absolute(): config_path = Path(config_path).relative_to("/") else: config_path = Path("google/cloud") / service / Path(config_path) if not (googleapis / config_path).exists(): raise FileNotFoundError( f"Unable to find configuration yaml file: {(googleapis / config_path)}." ) log.debug(f"Running generator for {config_path}.") output_root = self._artman.run( f"googleapis/artman:{artman.ARTMAN_VERSION}", googleapis, config_path, gapic_language_arg, generator_dir=generator_dir, generator_args=generator_args, ) # Expect the output to be in the artman-genfiles directory. # example: /artman-genfiles/python/speech-v1 if artman_output_name is None: artman_output_name = f"{service}-{version}" genfiles = output_root / gen_language / artman_output_name if not genfiles.exists(): raise FileNotFoundError( f"Unable to find generated output of artman: {genfiles}.") log.success(f"Generated code into {genfiles}.") # Get the *.protos files and put them in a protos dir in the output if include_protos: import shutil source_dir = googleapis / config_path.parent / version proto_files = source_dir.glob("**/*.proto") # By default, put the protos at the root in a folder named 'protos'. # Specific languages can be cased here to put them in a more language # appropriate place. proto_output_path = genfiles / "protos" if language == "python": # place protos alongsize the *_pb2.py files proto_output_path = genfiles / f"google/cloud/{service}_{version}/proto" os.makedirs(proto_output_path, exist_ok=True) for i in proto_files: log.debug(f"Copy: {i} to {proto_output_path / i.name}") shutil.copyfile(i, proto_output_path / i.name) log.success(f"Placed proto files into {proto_output_path}.") metadata.add_client_destination( source="googleapis" if not private else "googleapis-private", api_name=service, api_version=version, language=language, generator="gapic", config=str(config_path), ) _tracked_paths.add(genfiles) return genfiles
def _generate_code( self, service: str, version: str, language: str, *, private: bool = False, proto_path: Union[str, Path] = None, extra_proto_files: List[str] = [], output_dir: Union[str, Path] = None, generator_version: str = "latest", generator_args: Mapping[str, str] = None, ): # Determine which googleapis repo to use if not private: googleapis = self._clone_googleapis() else: googleapis = self._clone_googleapis_private() # Sanity check: We should have a googleapis repo; if we do not, # something went wrong, and we should abort. if googleapis is None: raise RuntimeError( f"Unable to generate {service}, the googleapis repository" "is unavailable.") # Pull the code generator for the requested language. # If a code generator version was specified, honor that. log.debug( f"Pulling Docker image: gapic-generator-{language}:{generator_version}" ) shell.run( [ "docker", "pull", f"gcr.io/gapic-images/gapic-generator-{language}:{generator_version}", ], hide_output=False, ) # Determine where the protos we are generating actually live. # We can sometimes (but not always) determine this from the service # and version; in other cases, the user must provide it outright. if proto_path: proto_path = Path(proto_path) if proto_path.is_absolute(): proto_path = proto_path.relative_to("/") else: proto_path = Path("google/cloud") / service / version # Sanity check: Do we have protos where we think we should? if not (googleapis / proto_path).exists(): raise FileNotFoundError( f"Unable to find directory for protos: {(googleapis / proto_path)}." ) if not tuple((googleapis / proto_path).glob("*.proto")): raise FileNotFoundError( f"Directory {(googleapis / proto_path)} exists, but no protos found." ) # Ensure the desired output directory exists. # If none was provided, create a temporary directory. if not output_dir: output_dir = tempfile.mkdtemp() output_dir = Path(output_dir).resolve() # The time has come, the walrus said, to talk of actually running # the code generator. sep = os.path.sep # try to figure out user ID and stay compatible. # If there is no `os.getuid()`, fallback to `getpass.getuser()` getuid = getattr(os, "getuid", None) if getuid: user = str(getuid()) else: user = getpass.getuser() docker_run_args = [ "docker", "run", "--mount", f"type=bind,source={googleapis / proto_path}{sep},destination={Path('/in') / proto_path}{sep},readonly", "--mount", f"type=bind,source={output_dir}{sep},destination={Path('/out')}{sep}", "--rm", "--user", user, ] # Process extra proto files, e.g. google/cloud/common_resources.proto, # if they are required by this API. # First, bind mount all the extra proto files into the container. for proto in extra_proto_files: source_proto = googleapis / Path(proto) if not source_proto.exists(): raise FileNotFoundError( f"Unable to find extra proto file: {source_proto}.") docker_run_args.extend([ "--mount", f"type=bind,source={source_proto},destination={Path('/in') / proto},readonly", ]) docker_run_args.append( f"gcr.io/gapic-images/gapic-generator-{language}:{generator_version}" ) # Populate any additional CLI arguments provided for Docker. if generator_args: for key, value in generator_args.items(): docker_run_args.append(f"--{key}") docker_run_args.append(value) log.debug(f"Generating code for: {proto_path}.") shell.run(docker_run_args) # Sanity check: Does the output location have code in it? # If not, complain. if not tuple(output_dir.iterdir()): raise RuntimeError( f"Code generation seemed to succeed, but {output_dir} is empty." ) # Huzzah, it worked. log.success(f"Generated code into {output_dir}.") # Record this in the synthtool metadata. metadata.add_client_destination( source="googleapis" if not private else "googleapis-private", api_name=service, api_version=version, language=language, generator=f"gapic-generator-{language}", ) _tracked_paths.add(output_dir) return output_dir
import logging from pathlib import Path import subprocess import synthtool as s from synthtool.languages import php from synthtool import _tracked_paths logging.basicConfig(level=logging.DEBUG) src = Path(f"../{php.STAGING_DIR}/Logging").resolve() dest = Path().resolve() # Added so that we can pass copy_excludes in the owlbot_main() call _tracked_paths.add(src) php.owlbot_main(src=src, dest=dest) # document and utilize apiEndpoint instead of serviceAddress s.replace("**/Gapic/*GapicClient.php", r"'serviceAddress' =>", r"'apiEndpoint' =>") s.replace( "**/Gapic/*GapicClient.php", r"@type string \$serviceAddress\n\s+\*\s+The address", r"""@type string $serviceAddress * **Deprecated**. This option will be removed in a future major release. Please * utilize the `$apiEndpoint` option instead. * @type string $apiEndpoint * The address""") s.replace("**/Gapic/*GapicClient.php",
def _generate_code( self, service: str, version: str, language: str, *, private: bool = False, discogapic: bool = False, proto_path: Union[str, Path] = None, output_dir: Union[str, Path] = None, bazel_target: str = None, include_protos: bool = False, proto_output_path: Union[str, Path] = None, tar_strip_components: int = 1, ): # Determine which googleapis repo to use if discogapic: api_definitions_repo = self._clone_discovery_artifact_manager() api_definitions_repo_name = "discovery-artifact-manager" elif private: api_definitions_repo = self._clone_googleapis_private() api_definitions_repo_name = "googleapis_private" else: api_definitions_repo = self._clone_googleapis() api_definitions_repo_name = "googleapis" # Sanity check: We should have a googleapis repo; if we do not, # something went wrong, and we should abort. if not api_definitions_repo: raise RuntimeError( f"Unable to generate {service}, the sources repository repository" "is unavailable." ) # Calculate proto_path if necessary. if not bazel_target or include_protos: # If bazel_target is not specified explicitly, we will need # proto_path to calculate it. If include_protos is True, # we will need the proto_path to copy the protos. if not proto_path: if bazel_target: # Calculate proto_path from the full bazel target, which is # in the format "//proto_path:target_name proto_path = bazel_target.split(":")[0][2:] else: # If bazel_target is not specified, assume the protos are # simply under google/cloud, where the most of the protos # usually are. proto_path = f"google/cloud/{service}/{version}" protos = Path(proto_path) if protos.is_absolute(): protos = protos.relative_to("/") # Determine bazel target based on per-language patterns # Java: google-cloud-{{assembly_name}}-{{version}}-java # Go: gapi-cloud-{{assembly_name}}-{{version}}-go # Python: {{assembly_name}}-{{version}}-py # PHP: google-cloud-{{assembly_name}}-{{version}}-php # Node.js: {{assembly_name}}-{{version}}-nodejs # Ruby: google-cloud-{{assembly_name}}-{{version}}-ruby # C#: google-cloud-{{assembly_name}}-{{version}}-csharp if not bazel_target: # Determine where the protos we are generating actually live. # We can sometimes (but not always) determine this from the service # and version; in other cases, the user must provide it outright. parts = list(protos.parts) while len(parts) > 0 and parts[0] != "google": parts.pop(0) if len(parts) == 0: raise RuntimeError( f"Cannot determine bazel_target from proto_path {protos}." "Please set bazel_target explicitly." ) if language == "python": suffix = f"{service}-{version}-py" elif language == "nodejs": suffix = f"{service}-{version}-nodejs" elif language == "go": suffix = f"gapi-{'-'.join(parts[1:])}-go" else: suffix = f"{'-'.join(parts)}-{language}" bazel_target = f"//{os.path.sep.join(parts)}:{suffix}" # Sanity check: Do we have protos where we think we should? if not (api_definitions_repo / protos).exists(): raise FileNotFoundError( f"Unable to find directory for protos: {(api_definitions_repo / protos)}." ) if not tuple((api_definitions_repo / protos).glob("*.proto")): raise FileNotFoundError( f"Directory {(api_definitions_repo / protos)} exists, but no protos found." ) if not (api_definitions_repo / protos / "BUILD.bazel"): raise FileNotFoundError( f"File {(api_definitions_repo / protos / 'BUILD.bazel')} does not exist." ) # Ensure the desired output directory exists. # If none was provided, create a temporary directory. if not output_dir: output_dir = tempfile.mkdtemp() output_dir = Path(output_dir).resolve() # Let's build some stuff now. cwd = os.getcwd() os.chdir(str(api_definitions_repo)) bazel_run_args = [ "bazel", "--max_idle_secs=240", "build", bazel_target, ] logger.debug(f"Generating code for: {bazel_target}.") shell.run(bazel_run_args) # We've got tar file! # its location: bazel-bin/google/cloud/language/v1/language-v1-nodejs.tar.gz # bazel_target: //google/cloud/language/v1:language-v1-nodejs tar_file = ( f"bazel-bin{os.path.sep}{bazel_target[2:].replace(':', os.path.sep)}.tar.gz" ) tar_run_args = [ "tar", "-C", str(output_dir), f"--strip-components={tar_strip_components}", "-xzf", tar_file, ] shell.run(tar_run_args) # Get the *.protos files and put them in a protos dir in the output if include_protos: proto_files = protos.glob("**/*.proto") # By default, put the protos at the root in a folder named 'protos'. # Specific languages can be cased here to put them in a more language # appropriate place. if not proto_output_path: proto_output_path = output_dir / "protos" if language == "python": # place protos alongsize the *_pb2.py files proto_output_path = ( output_dir / f"google/cloud/{service}_{version}/proto" ) else: proto_output_path = Path(output_dir / proto_output_path) os.makedirs(proto_output_path, exist_ok=True) for i in proto_files: logger.debug(f"Copy: {i} to {proto_output_path / i.name}") shutil.copyfile(i, proto_output_path / i.name) logger.success(f"Placed proto files into {proto_output_path}.") os.chdir(cwd) # Sanity check: Does the output location have code in it? # If not, complain. if not tuple(output_dir.iterdir()): raise RuntimeError( f"Code generation seemed to succeed, but {output_dir} is empty." ) # Huzzah, it worked. logger.success(f"Generated code into {output_dir}.") # Record this in the synthtool metadata. metadata.add_client_destination( source=api_definitions_repo_name, api_name=service, api_version=version, language=language, generator="bazel", ) _tracked_paths.add(output_dir) return output_dir