async def _ensure_summary_on_head_sha(ctxt: context.Context) -> None: for check in await ctxt.pull_engine_check_runs: if check[ "name"] == ctxt.SUMMARY_NAME and actions_runner.load_conclusions_line( ctxt, check): return opened = ctxt.has_been_opened() sha = await ctxt.get_cached_last_summary_head_sha() if sha is None: if not opened: ctxt.log.warning( "the pull request doesn't have the last summary head sha stored in redis" ) return previous_summary = await _get_summary_from_sha(ctxt, sha) if previous_summary: await ctxt.set_summary_check( check_api.Result( check_api.Conclusion(previous_summary["conclusion"]), title=previous_summary["output"]["title"], summary=previous_summary["output"]["summary"], )) elif not opened: ctxt.log.warning("the pull request doesn't have a summary")
async def _ensure_summary_on_head_sha(ctxt: context.Context) -> None: for check in await ctxt.pull_engine_check_runs: if check[ "name"] == ctxt.SUMMARY_NAME and actions_runner.load_conclusions_line( ctxt, check): return opened = ctxt.has_been_opened() sha = await ctxt.get_cached_last_summary_head_sha() if sha is None: if not opened: ctxt.log.warning( "the pull request doesn't have the last summary head sha stored in redis" ) return previous_summary = await _get_summary_from_sha(ctxt, sha) if previous_summary is None: # NOTE(sileht): If the cached summary sha expires and the next event we got for # a pull request is "synchronize" we will lose the summary. Most of the times # it's not a big deal, but if the pull request is queued for merge, it may # be stuck. previous_summary = await _get_summary_from_synchronize_event(ctxt) if previous_summary: await ctxt.set_summary_check( check_api.Result( check_api.Conclusion(previous_summary["conclusion"]), title=previous_summary["output"]["title"], summary=previous_summary["output"]["summary"], )) elif not opened: ctxt.log.warning("the pull request doesn't have a summary")
def load_conclusions( ctxt: context.Context, summary_check: typing.Optional[github_types.GitHubCheckRun] ) -> typing.Dict[str, check_api.Conclusion]: line = load_conclusions_line(ctxt, summary_check) if line: return { name: check_api.Conclusion(conclusion) for name, conclusion in yaml.safe_load( base64.b64decode(line[5:-4].encode()).decode()).items() } if not ctxt.has_been_opened(): ctxt.log.warning( "previous conclusion not found in summary", summary_check=summary_check, ) return {}
async def _ensure_summary_on_head_sha(ctxt: context.Context) -> None: if ctxt.has_been_opened(): return sha = await ctxt.get_cached_last_summary_head_sha() if sha is not None: if sha == ctxt.pull["head"]["sha"]: ctxt.log.debug("head sha didn't changed, no need to copy summary") return else: ctxt.log.debug("head sha changed need to copy summary", gh_pull_previous_head_sha=sha) previous_summary = None if sha is not None: ctxt.log.debug("checking summary from redis") previous_summary = await _get_summary_from_sha(ctxt, sha) if previous_summary is not None: ctxt.log.debug("got summary from redis") if previous_summary is None: # NOTE(sileht): If the cached summary sha expires and the next event we got for # a pull request is "synchronize" we will lose the summary. Most of the times # it's not a big deal, but if the pull request is queued for merge, it may # be stuck. previous_summary = await _get_summary_from_synchronize_event(ctxt) # Sync only if the external_id is the expected one if previous_summary and (previous_summary["external_id"] is None or previous_summary["external_id"] == "" or previous_summary["external_id"] == str( ctxt.pull["number"])): await ctxt.set_summary_check( check_api.Result( check_api.Conclusion(previous_summary["conclusion"]), title=previous_summary["output"]["title"], summary=previous_summary["output"]["summary"], )) elif previous_summary: ctxt.log.info( "got a previous summary, but collision detected with another pull request", other_pull=previous_summary["external_id"], )
async def run( ctxt: context.Context, sources: typing.List[context.T_PayloadEventSource], ) -> typing.Optional[check_api.Result]: LOG.debug("engine get context") ctxt.log.debug("engine start processing context") issue_comment_sources: typing.List[T_PayloadEventIssueCommentSource] = [] for source in sources: if source["event_type"] == "issue_comment": issue_comment_sources.append( typing.cast(T_PayloadEventIssueCommentSource, source)) else: ctxt.sources.append(source) permissions_need_to_be_updated = github_app.permissions_need_to_be_updated( ctxt.repository.installation.installation) if permissions_need_to_be_updated: return check_api.Result( check_api.Conclusion.FAILURE, title="Required GitHub permissions are missing.", summary="You can accept them at https://dashboard.mergify.com/", ) if ctxt.pull["base"]["repo"]["private"]: if not ctxt.subscription.has_feature( subscription.Features.PRIVATE_REPOSITORY): ctxt.log.info("mergify disabled: private repository", reason=ctxt.subscription.reason) return check_api.Result( check_api.Conclusion.FAILURE, title="Mergify is disabled", summary=ctxt.subscription.reason, ) else: if not ctxt.subscription.has_feature( subscription.Features.PUBLIC_REPOSITORY): ctxt.log.info("mergify disabled: public repository", reason=ctxt.subscription.reason) return check_api.Result( check_api.Conclusion.FAILURE, title="Mergify is disabled", summary=ctxt.subscription.reason, ) config_file = await ctxt.repository.get_mergify_config_file() try: ctxt.configuration_changed = await _check_configuration_changes( ctxt, config_file) except MultipleConfigurationFileFound as e: files = "\n * " + "\n * ".join(f["path"] for f in e.files) # NOTE(sileht): This replaces the summary, so we will may lost the # state of queue/comment action. But since we can't choice which config # file we need to use... we can't do much. return check_api.Result( check_api.Conclusion.FAILURE, title=constants.CONFIGURATION_MUTIPLE_FOUND_SUMMARY_TITLE, summary= f"You must keep only one of these configuration files in the repository: {files}", ) # BRANCH CONFIGURATION CHECKING try: mergify_config = await ctxt.repository.get_mergify_config() except rules.InvalidRules as e: # pragma: no cover ctxt.log.info( "The Mergify configuration is invalid", summary=str(e), annotations=e.get_annotations(e.filename), ) # Not configured, post status check with the error message for s in ctxt.sources: if s["event_type"] == "pull_request": event = typing.cast(github_types.GitHubEventPullRequest, s["data"]) if event["action"] in ("opened", "synchronize"): return check_api.Result( check_api.Conclusion.FAILURE, title="The current Mergify configuration is invalid", summary=str(e), annotations=e.get_annotations(e.filename), ) return None ctxt.log.debug("engine run pending commands") await commands_runner.run_pending_commands_tasks(ctxt, mergify_config) if issue_comment_sources: ctxt.log.debug("engine handle commands") for ic_source in issue_comment_sources: await commands_runner.handle( ctxt, mergify_config, ic_source["data"]["comment"]["body"], ic_source["data"]["comment"]["user"], ) await _ensure_summary_on_head_sha(ctxt) summary = await ctxt.get_engine_check_run(constants.SUMMARY_NAME) if (summary and summary["external_id"] is not None and summary["external_id"] != "" and summary["external_id"] != str(ctxt.pull["number"])): other_ctxt = await ctxt.repository.get_pull_request_context( github_types.GitHubPullRequestNumber(int(summary["external_id"]))) # NOTE(sileht): allow to override the summary of another pull request # only if this one is closed, but this can still confuse users as the # check-runs created by merge/queue action will not be cleaned. # TODO(sileht): maybe cancel all other mergify engine check-runs in this case? if not other_ctxt.closed: # TODO(sileht): try to report that without check-runs/statuses to the user # and without spamming him with comment ctxt.log.info( "sha collision detected between pull requests", other_pull=summary["external_id"], ) return None if not ctxt.has_been_opened() and summary is None: ctxt.log.warning( "the pull request doesn't have a summary", head_sha=ctxt.pull["head"]["sha"], ) ctxt.log.debug("engine handle actions") if ctxt.is_merge_queue_pr(): return await queue_runner.handle(mergify_config["queue_rules"], ctxt) else: return await actions_runner.handle( mergify_config["pull_request_rules"], ctxt)