def synthesize_loop_fixture() -> typing.Generator[SynthesizeLoopFixture, None, None]: with tempfile.TemporaryDirectory() as temp_dir, tempfile.TemporaryDirectory() as working_repo, util.OsChdirContext( working_repo ): # Create a git repo with a README. subprocess.check_call(["git", "init", "."]) with open("README.md", "wt") as readme: readme.write("Well done.") git.commit_all_changes("Added Readme") subprocess.check_call(["git", "checkout", "-b", "test"]) # Create a synthesizer. yield SynthesizeLoopFixture(temp_dir)
def synthesize_version_in_new_branch(self, synthesizer: AbstractSynthesizer, index: int) -> bool: """Invokes the synthesizer on the version specified by index. Stores the result in a new branch. Leaves the current branch unchanged. Arguments: synthesizer {AbstractSynthesizer} -- A synthesizer. index {int} -- index into self.versions Returns: bool -- True if the code generated differs. """ # Did we already generate this version? Return cached result. branch_already_has_changes = self.versions[index].branch_has_changes if branch_already_has_changes is not None: return branch_already_has_changes self.apply_version(index) self.checkout_new_branch(index) try: if 0 == index: if self.version_zero.branch_name: # Reuse version zero built for another source. executor.check_call([ "git", "merge", "--ff-only", self.version_zero.branch_name ]) return self.version_zero.has_changes synth_log_path = self.log_dir_path / str(index) / "sponge_log.log" if index + 1 == len(self.versions): # The youngest version. Let exceptions raise because the # current state is broken, and there's nothing we can do. synthesizer.synthesize(synth_log_path, self.environ) else: synthesizer.synthesize_and_catch_exception( synth_log_path, self.environ) # Save changes into the sub branch. i_has_changes = has_changes() git.commit_all_changes(self.versions[index].version.get_comment()) if 0 == index: # Record version zero info so other sources can reuse. self.version_zero.branch_name = self.sub_branch(0) self.version_zero.has_changes = i_has_changes # Cache the outcome. self.versions[index].branch_has_changes = i_has_changes return i_has_changes finally: executor.check_call(["git", "reset", "--hard", "HEAD"]) executor.check_call(["git", "checkout", self.branch])
def patch_merge_version(self, index: int, comment=None) -> bool: """Merges the given version into the current branch using a patch merge.""" sub_branch = self.sub_branch(index) if self.git_branches_differ("HEAD", sub_branch): patch_file_path = os.path.join(self._temp_dir, f"{sub_branch}.patch") git.patch_merge(sub_branch, patch_file_path) git.commit_all_changes( comment or self.versions[index].version.get_comment()) self.commit_count += 1 self.versions[index].merged = True return True return False
def test_synthesize_loop_track_obsolete_files( synthesize_loop_fixture: SynthesizeLoopFixture, ): # Create a synth.metadata with empty generatedFiles. metadata = {"generatedFiles": []} with open("synth.metadata", "wt") as synth_metadata: synth_metadata.write(json.dumps(metadata)) git.commit_all_changes("Added synth.metadata with empty generatedFiles.") # Create a generated change that populate synth.metadata's generatedFiles. metadata = {"generatedFiles": ["a.txt"]} write_metadata = WriteFile("synth.metadata", json.dumps(metadata)) # Invoke the synthesize loop. change_history = [[NoChange(), write_metadata]] source_versions = compile_histories(change_history, synthesize_loop_fixture.synthesizer) synthesize_loop_fixture.synthesize_loop(source_versions) # Confirm the synth loop pushed a change. calls = synthesize_loop_fixture.change_pusher.mock_calls assert call.push_changes(1, "test", "chore: start tracking obsolete files") in calls
def _inner_main(temp_dir: str) -> int: """ Returns: int -- Number of commits committed to the repo. """ parser = argparse.ArgumentParser() parser.add_argument("--github-user", default=os.environ.get("GITHUB_USER")) parser.add_argument("--github-email", default=os.environ.get("GITHUB_EMAIL")) parser.add_argument("--github-token", default=os.environ.get("GITHUB_TOKEN")) parser.add_argument("--repository", default=os.environ.get("REPOSITORY"), required=True) parser.add_argument("--synth-path", default=os.environ.get("SYNTH_PATH")) parser.add_argument("--metadata-path", default=os.environ.get("METADATA_PATH")) parser.add_argument( "--deprecated-execution", default=False, action="store_true", help= "If specified, execute synth.py directly instead of synthtool. This behavior is deprecated.", ) parser.add_argument("--branch-suffix", default=os.environ.get("BRANCH_SUFFIX", None)) parser.add_argument("--pr-title", default="") parser.add_argument("extra_args", nargs=argparse.REMAINDER) args = parser.parse_args() gh = github.GitHub(args.github_token) branch = "-".join(filter(None, ["autosynth", args.branch_suffix])) pr_title = args.pr_title or ( f"[CHANGE ME] Re-generated {args.synth_path or ''} to pick up changes in " f"the API or client library generator.") change_pusher: AbstractChangePusher = ChangePusher(args.repository, gh, branch) # capture logs for later base_synth_log_path = pathlib.Path( os.path.realpath("./logs")) / args.repository if args.synth_path: base_synth_log_path /= args.synth_path logger.info(f"logs will be written to: {base_synth_log_path}") working_repo_path = synthtool_git.clone( f"https://github.com/{args.repository}.git") try: os.chdir(working_repo_path) git.configure_git(args.github_user, args.github_email) git.setup_branch(branch) if args.synth_path: os.chdir(args.synth_path) metadata_path = os.path.join(args.metadata_path or "", "synth.metadata") flags = autosynth.flags.parse_flags() # Override flags specified in synth.py with flags specified in environment vars. for key in flags.keys(): env_value = os.environ.get(key, "") if env_value: flags[key] = False if env_value.lower( ) == "false" else env_value metadata = load_metadata(metadata_path) multiple_commits = flags[autosynth.flags.AUTOSYNTH_MULTIPLE_COMMITS] multiple_prs = flags[autosynth.flags.AUTOSYNTH_MULTIPLE_PRS] if (not multiple_commits and not multiple_prs) or not metadata: if change_pusher.check_if_pr_already_exists(branch): return 0 synth_log = Synthesizer( metadata_path, args.extra_args, deprecated_execution=args.deprecated_execution, ).synthesize(base_synth_log_path) if not has_changes(): logger.info("No changes. :)") sys.exit(EXIT_CODE_SKIPPED) git.commit_all_changes(pr_title) change_pusher.push_changes(1, branch, pr_title, synth_log) return 1 else: if not multiple_prs and change_pusher.check_if_pr_already_exists( branch): return 0 # There's already an existing PR # Enumerate the versions to loop over. sources = metadata.get("sources", []) source_versions = [ git_source.enumerate_versions_for_working_repo( metadata_path, sources) ] # Add supported source version types below: source_versions.extend( git_source.enumerate_versions(sources, pathlib.Path(temp_dir))) # Prepare to call synthesize loop. synthesizer = Synthesizer( metadata_path, args.extra_args, args.deprecated_execution, "synth.py", ) x = SynthesizeLoopToolbox( source_versions, branch, temp_dir, metadata_path, args.synth_path, base_synth_log_path, ) if not multiple_commits: change_pusher = SquashingChangePusher(change_pusher) # Call the loop. commit_count = synthesize_loop(x, multiple_prs, change_pusher, synthesizer) if commit_count == 0: logger.info("No changes. :)") sys.exit(EXIT_CODE_SKIPPED) return commit_count finally: if args.synth_path: # We're generating code in a mono repo. The state left behind will # probably be useful for generating the next API. pass else: # We're generating a single API in a single repo, and using a different # repo to generate the next API. So the next synth will not be able to # use any of this state. Clean it up to avoid running out of disk space. executor.run(["git", "clean", "-fdx"], cwd=working_repo_path)