async def checkout_repo(config, task, repo_path): """Perform a clone via robustcheckout, at ${directory}/src. This function will perform a clone via robustcheckout, using hg's share extension for a cache at 'config['hg_share_base_dir']' that robustcheckout will populate if necessary. Robustcheckout will retry network operations at most 3 times (robustcheckout's default) before giving up and causing FailedSubprocess to be raised. Args: config (dict): the running config. task (dict): the running task. repo_path (str): The directory to place the resulting clone. Raises: CheckoutError: if the clone attempt doesn't succeed. """ share_base = config["hg_share_base_dir"] upstream_repo = config["upstream_repo"] source_repo = get_source_repo(task) # branch default is used to pull tip of the repo at checkout time branch = get_branch(task, "default") await run_hg_command(config, "robustcheckout", source_repo, repo_path, "--sharebase", share_base, "--upstream", upstream_repo, "--branch", branch, exception=CheckoutError)
async def log_outgoing(config, task, repo_path): """Log current changes that will be pushed (or would have been, if dry-run). Args: config (dict): the running config task (dict): the running task repo_path (str): the source repo path Returns: int: the number of outgoing changesets """ log.info("Outgoing changesets...") repo = Repo(repo_path) branch = get_branch(task, "master") upstream_to_local_branch_interval = "{}..{}".format( _get_upstream_branch_name(branch), branch) log.debug( "Checking the number of changesets between these 2 references: {}". format(upstream_to_local_branch_interval)) num_changesets = len( list(repo.iter_commits(upstream_to_local_branch_interval))) diff = repo.git.diff(branch) if diff: path = os.path.join(config["artifact_dir"], "public", "logs", "outgoing.diff") makedirs(os.path.dirname(path)) with open(path, "w") as fh: fh.write(diff) log.info("Found {} new changesets".format(num_changesets)) return num_changesets
async def push(config, task, repo_path, target_repo): """Run `git push` against the current source repo. Args: config (dict): the running config task (dict): the running task repo_path (str): the source repo path target_repo (str): Destination repository url revision (str): A specific revision to push Raises: PushError: on failure """ ssh_config = config.get("git_ssh_config", {}).get(get_ssh_user(task), {}) ssh_key = ssh_config.get("keyfile") git_ssh_cmd = "ssh -i {}".format(ssh_key) if ssh_key else "ssh" repo = Repo(repo_path) target_repo_ssh = extract_github_repo_ssh_url(target_repo) repo.remote().set_url(target_repo_ssh, push=True) log.debug("Push using ssh command: {}".format(git_ssh_cmd)) branch = get_branch(task, "master") with repo.git.custom_environment(GIT_SSH_COMMAND=git_ssh_cmd): log.info("Pushing local changes to {}".format(target_repo_ssh)) push_results = repo.remote().push(branch, verbose=True, set_upstream=True) try: _check_if_push_successful(push_results) except PushError: await strip_outgoing(config, task, repo_path) raise log.info("Push done succesfully!")
async def checkout_repo(config, task, repo_path): """Perform a git clone at ${directory}/src. This function will perform a git clone. It will also checkout to the right branch, if provided in the task definition. Args: config (dict): the running config. task (dict): the running task. repo_path (str): The directory to place the resulting clone. Raises: TaskVerificationError: if the branch does not exist upstream. """ source_repo = get_source_repo(task) if os.path.exists(repo_path): log.info("Reusing existing repo_path: {}".format(repo_path)) repo = Repo(repo_path) else: log.info('Cloning source_repo "{}" to repo_path: {}'.format( source_repo, repo_path)) repo = Repo.clone_from( source_repo, repo_path, ) branch = get_branch(task) if branch: # GitPython cannot simply `git checkout` to right upstream branch. We have to manually # create a new branch and manually set the upstream branch log.info('Checking out branch "{}"'.format(branch)) remote_branches = repo.remotes.origin.fetch() remote_branches_names = [ fetch_info.name for fetch_info in remote_branches ] remote_branch = get_single_item_from_sequence( remote_branches_names, condition=lambda remote_branch_name: remote_branch_name == _get_upstream_branch_name(branch), ErrorClass=TaskVerificationError, no_item_error_message="Branch does not exist on remote repo", too_many_item_error_message="Too many branches with that name", ) repo.create_head(branch, remote_branch) repo.branches[branch].checkout() else: log.warn( "No branch provided in the task payload. Staying on the default one" )
async def strip_outgoing(config, task, repo_path): """Strip all unpushed outgoing revisions and purge the changes. This is something we should do on failed pushes. Args: config (dict): the running config task (dict): the running task repo_path (str): the path to the repo """ repo = Repo(repo_path) branch = get_branch(task, "master") log.info("Resetting repo state to match upstream's...") repo.head.reset(commit=_get_upstream_branch_name(branch), working_tree=True) repo.git.clean("-fdx") log.info("Repo state reset.")
def test_get_branch(task_defn, branch): if branch: task_defn["payload"]["branch"] = branch assert ttask.get_branch(task_defn) == branch
def test_get_branch(task_defn, branch, expected_result): if branch: task_defn["payload"]["branch"] = branch assert ttask.get_branch(task_defn) == expected_result