Exemple #1
0
 async def _should_be_queued(self, ctxt: context.Context, q: queue.QueueT) -> bool:
     check = await ctxt.get_engine_check_run(constants.MERGE_QUEUE_SUMMARY_NAME)
     return not check or check_api.Conclusion(check["conclusion"]) in [
         check_api.Conclusion.SUCCESS,
         check_api.Conclusion.PENDING,
         check_api.Conclusion.NEUTRAL,
     ]
Exemple #2
0
async def _ensure_summary_on_head_sha(ctxt: context.Context) -> None:
    for check in await ctxt.pull_engine_check_runs:
        if check[
                "name"] == ctxt.SUMMARY_NAME and actions_runner.load_conclusions_line(
                    ctxt, check):
            return

    opened = ctxt.has_been_opened()
    sha = await ctxt.get_cached_last_summary_head_sha()
    if sha is None:
        if not opened:
            ctxt.log.warning(
                "the pull request doesn't have the last summary head sha stored in redis"
            )
        return

    previous_summary = await _get_summary_from_sha(ctxt, sha)

    if previous_summary is None:
        # NOTE(sileht): If the cached summary sha expires and the next event we got for
        # a pull request is "synchronize" we will lose the summary. Most of the times
        # it's not a big deal, but if the pull request is queued for merge, it may
        # be stuck.
        previous_summary = await _get_summary_from_synchronize_event(ctxt)

    if previous_summary:
        await ctxt.set_summary_check(
            check_api.Result(
                check_api.Conclusion(previous_summary["conclusion"]),
                title=previous_summary["output"]["title"],
                summary=previous_summary["output"]["summary"],
            ))
    elif not opened:
        ctxt.log.warning("the pull request doesn't have a summary")
Exemple #3
0
async def _ensure_summary_on_head_sha(ctxt: context.Context) -> None:
    for check in await ctxt.pull_engine_check_runs:
        if check[
                "name"] == ctxt.SUMMARY_NAME and actions_runner.load_conclusions_line(
                    ctxt, check):
            return

    opened = ctxt.has_been_opened()
    sha = await ctxt.get_cached_last_summary_head_sha()
    if sha is None:
        if not opened:
            ctxt.log.warning(
                "the pull request doesn't have the last summary head sha stored in redis"
            )
        return

    previous_summary = await _get_summary_from_sha(ctxt, sha)
    if previous_summary:
        await ctxt.set_summary_check(
            check_api.Result(
                check_api.Conclusion(previous_summary["conclusion"]),
                title=previous_summary["output"]["title"],
                summary=previous_summary["output"]["summary"],
            ))
    elif not opened:
        ctxt.log.warning("the pull request doesn't have a summary")
def get_previous_conclusion(previous_conclusions, name, checks):
    if name in previous_conclusions:
        return previous_conclusions[name]
    # TODO(sileht): Remove usage of legacy checks after the 15/02/2020 and if the
    # synchronization event issue is fixed
    elif name in checks:
        return check_api.Conclusion(checks[name]["conclusion"])
    return check_api.Conclusion.NEUTRAL
Exemple #5
0
    async def run(self, ctxt: context.Context,
                  rule: rules.EvaluatedRule) -> check_api.Result:
        check = await ctxt.get_engine_check_run(
            constants.MERGE_QUEUE_SUMMARY_NAME)
        if not check:
            return check_api.Result(
                check_api.Conclusion.FAILURE,
                title=
                "This pull request head commit has not been previously disembarked from queue.",
                summary="",
            )

        if check_api.Conclusion(check["conclusion"]) in [
                check_api.Conclusion.SUCCESS,
                check_api.Conclusion.NEUTRAL,
                check_api.Conclusion.PENDING,
        ]:
            return check_api.Result(
                check_api.Conclusion.NEUTRAL,
                title="This pull request is already queued",
                summary="",
            )

        await check_api.set_check_run(
            ctxt,
            constants.MERGE_QUEUE_SUMMARY_NAME,
            check_api.Result(
                check_api.Conclusion.NEUTRAL,
                "This pull request can be re-embarked automatically",
                "",
            ),
        )

        # NOTE(sileht): refresh it to maybe, retrigger the queue action.
        await utils.send_pull_refresh(
            ctxt.redis.stream,
            ctxt.pull["base"]["repo"],
            pull_request_number=ctxt.pull["number"],
            action="user",
            source="action/command/requeue",
        )

        await signals.send(
            ctxt.repository,
            ctxt.pull["number"],
            "action.requeue",
            signals.EventNoMetadata(),
        )

        return check_api.Result(
            check_api.Conclusion.SUCCESS,
            title=
            "The queue state of this pull request has been cleaned. It can be re-embarked automatically",
            summary="",
        )
Exemple #6
0
def get_previous_conclusion(
    previous_conclusions: typing.Dict[str, check_api.Conclusion],
    name: str,
    checks: typing.Dict[str, github_types.CachedGitHubCheckRun],
) -> check_api.Conclusion:
    if name in previous_conclusions:
        return previous_conclusions[name]
    # NOTE(sileht): fallback on posted check-run in case we lose the Summary
    # somehow
    elif name in checks:
        return check_api.Conclusion(checks[name]["conclusion"])
    return check_api.Conclusion.NEUTRAL
Exemple #7
0
def load_conclusions(ctxt, summary_check):
    line = load_conclusions_line(summary_check)
    if line:
        return dict(
            (name, check_api.Conclusion(conclusion))
            for name, conclusion in yaml.safe_load(
                base64.b64decode(line[5:-4].encode()).decode()).items())

    ctxt.log.warning(
        "previous conclusion not found in summary",
        summary_check=summary_check,
    )
    return {}
def load_conclusions(ctxt, summary_check):
    if summary_check and summary_check["output"]["summary"]:
        line = summary_check["output"]["summary"].splitlines()[-1]
        if line.startswith("<!-- ") and line.endswith(" -->"):
            return dict(
                (name, check_api.Conclusion(conclusion))
                for name, conclusion in yaml.safe_load(
                    base64.b64decode(line[5:-4].encode()).decode()).items())

    ctxt.log.warning(
        "previous conclusion not found in summary",
        summary_check=summary_check,
    )
    return {}
Exemple #9
0
    async def run(self, ctxt: context.Context,
                  rule: "rules.EvaluatedRule") -> check_api.Result:
        if not ctxt.subscription.has_feature(
                subscription.Features.QUEUE_ACTION):
            return check_api.Result(
                check_api.Conclusion.ACTION_REQUIRED,
                "Queue action is disabled",
                ctxt.subscription.missing_feature_reason(
                    ctxt.pull["base"]["repo"]["owner"]["login"]),
            )

        q = await merge_train.Train.from_context(ctxt)
        car = q.get_car(ctxt)
        if car and car.state == "updated":
            # NOTE(sileht): This car doesn't have tmp pull, so we have the
            # MERGE_QUEUE_SUMMARY and train reset here
            need_reset = ctxt.have_been_synchronized() or await ctxt.is_behind
            if need_reset:
                status = check_api.Conclusion.PENDING
                ctxt.log.info("train will be reset")
                await q.reset()
            else:
                queue_rule_evaluated = await self.queue_rule.get_pull_request_rule(
                    ctxt)
                status = await merge_train.get_queue_rule_checks_status(
                    ctxt, queue_rule_evaluated)
            await car.update_summaries(status, will_be_reset=need_reset)

        if ctxt.user_refresh_requested() or ctxt.admin_refresh_requested():
            # NOTE(sileht): user ask a refresh, we just remove the previous state of this
            # check and the method _should_be_queue will become true again :)
            check = await ctxt.get_engine_check_run(
                constants.MERGE_QUEUE_SUMMARY_NAME)
            if check and check_api.Conclusion(check["conclusion"]) not in [
                    check_api.Conclusion.SUCCESS,
                    check_api.Conclusion.PENDING,
            ]:
                await check_api.set_check_run(
                    ctxt,
                    constants.MERGE_QUEUE_SUMMARY_NAME,
                    check_api.Result(
                        check_api.Conclusion.PENDING,
                        "The pull request has been refreshed and is going to be re-embarked soon",
                        "",
                    ),
                )

        return await super().run(ctxt, rule)
Exemple #10
0
    async def has_previous_car_status_succeed(self) -> bool:
        previous_car = self._get_previous_car()
        if previous_car is None:
            return True

        previous_car_ctxt = await previous_car.get_context_to_evaluate()
        if previous_car_ctxt is None:
            return False

        previous_car_check = await previous_car_ctxt.get_engine_check_run(
            constants.MERGE_QUEUE_SUMMARY_NAME)
        if previous_car_check is None:
            return False

        return (check_api.Conclusion(
            previous_car_check["conclusion"]) == check_api.Conclusion.SUCCESS)
Exemple #11
0
    async def _should_be_merged(self, ctxt: context.Context, q: queue.QueueT) -> bool:
        if not await q.is_first_pull(ctxt):
            return False

        if not await ctxt.is_behind:
            queue_rule_evaluated = await self.queue_rule.get_pull_request_rule(ctxt)
            if not queue_rule_evaluated.missing_conditions:
                return True

        check = await ctxt.get_engine_check_run(constants.MERGE_QUEUE_SUMMARY_NAME)
        if check:
            return (
                check_api.Conclusion(check["conclusion"])
                == check_api.Conclusion.SUCCESS
            )
        return False
Exemple #12
0
 async def get_unqueue_status(self, ctxt: context.Context,
                              q: queue.QueueT) -> check_api.Result:
     check = await ctxt.get_engine_check_run(
         constants.MERGE_QUEUE_SUMMARY_NAME)
     manually_unqueued = (check and check_api.Conclusion(
         check["conclusion"]) == check_api.Conclusion.CANCELLED)
     if manually_unqueued:
         reason = "The pull request has been manually removed from the queue by an `unqueue` command."
     else:
         reason = (
             "The queue conditions cannot be satisfied due to failing checks or checks timeout. "
             f"{self.UNQUEUE_DOCUMENTATION}")
     return check_api.Result(
         check_api.Conclusion.CANCELLED,
         "The pull request has been removed from the queue",
         reason,
     )
def load_conclusions(
    ctxt: context.Context,
    summary_check: typing.Optional[github_types.GitHubCheckRun]
) -> typing.Dict[str, check_api.Conclusion]:
    line = load_conclusions_line(ctxt, summary_check)
    if line:
        return {
            name: check_api.Conclusion(conclusion)
            for name, conclusion in yaml.safe_load(
                base64.b64decode(line[5:-4].encode()).decode()).items()
        }

    if not ctxt.has_been_opened():
        ctxt.log.warning(
            "previous conclusion not found in summary",
            summary_check=summary_check,
        )
    return {}
Exemple #14
0
async def _ensure_summary_on_head_sha(ctxt: context.Context) -> None:
    if ctxt.has_been_opened():
        return

    sha = await ctxt.get_cached_last_summary_head_sha()
    if sha is not None:
        if sha == ctxt.pull["head"]["sha"]:
            ctxt.log.debug("head sha didn't changed, no need to copy summary")
            return
        else:
            ctxt.log.debug("head sha changed need to copy summary",
                           gh_pull_previous_head_sha=sha)

    previous_summary = None

    if sha is not None:
        ctxt.log.debug("checking summary from redis")
        previous_summary = await _get_summary_from_sha(ctxt, sha)
        if previous_summary is not None:
            ctxt.log.debug("got summary from redis")

    if previous_summary is None:
        # NOTE(sileht): If the cached summary sha expires and the next event we got for
        # a pull request is "synchronize" we will lose the summary. Most of the times
        # it's not a big deal, but if the pull request is queued for merge, it may
        # be stuck.
        previous_summary = await _get_summary_from_synchronize_event(ctxt)

    # Sync only if the external_id is the expected one
    if previous_summary and (previous_summary["external_id"] is None
                             or previous_summary["external_id"] == ""
                             or previous_summary["external_id"] == str(
                                 ctxt.pull["number"])):
        await ctxt.set_summary_check(
            check_api.Result(
                check_api.Conclusion(previous_summary["conclusion"]),
                title=previous_summary["output"]["title"],
                summary=previous_summary["output"]["summary"],
            ))
    elif previous_summary:
        ctxt.log.info(
            "got a previous summary, but collision detected with another pull request",
            other_pull=previous_summary["external_id"],
        )
Exemple #15
0
 async def get_unqueue_status(
     self, ctxt: context.Context, q: queue.QueueT
 ) -> check_api.Result:
     check = await ctxt.get_engine_check_run(constants.MERGE_QUEUE_SUMMARY_NAME)
     if (
         check
         and check_api.Conclusion(check["conclusion"])
         == check_api.Conclusion.CANCELLED
     ):
         # NOTE(sileht): already cancelled, keep the already reported reason
         reason = check["output"]["summary"]
     else:
         reason = (
             "The queue conditions cannot be satisfied due to failing checks or checks timeout. "
             f"{self.UNQUEUE_DOCUMENTATION}"
         )
     return check_api.Result(
         check_api.Conclusion.CANCELLED,
         "The pull request has been removed from the queue",
         reason,
     )
Exemple #16
0
def ensure_summary_on_head_sha(ctxt):
    for check in ctxt.pull_engine_check_runs:
        if check["name"] == ctxt.SUMMARY_NAME:
            return

    sha = ctxt.get_cached_last_summary_head_sha()
    if sha:
        previous_summary = get_summary_from_sha(ctxt, sha)
    else:
        previous_summary = None
        ctxt.log.warning(
            "the pull request doesn't have the last summary head sha stored in redis"
        )

    if previous_summary:
        ctxt.set_summary_check(
            check_api.Result(
                check_api.Conclusion(previous_summary["conclusion"]),
                title=previous_summary["output"]["title"],
                summary=previous_summary["output"]["summary"],
            )
        )
    else:
        ctxt.log.warning("the pull request doesn't have a summary")
Exemple #17
0
def test_conclusion_str() -> None:
    assert str(check_api.Conclusion(None)) == "🟠 pending"
    assert str(check_api.Conclusion("success")) == "✅ success"
Exemple #18
0
    async def run(self, ctxt: context.Context,
                  rule: "rules.EvaluatedRule") -> check_api.Result:
        subscription_status = await self._subscription_status(ctxt)
        if subscription_status:
            return subscription_status

        if self.config["method"] == "fast-forward":
            if self.config["update_method"] != "rebase":
                return check_api.Result(
                    check_api.Conclusion.FAILURE,
                    f"`update_method: {self.config['update_method']}` is not compatible with fast-forward merge method",
                    "`update_method` must be set to `rebase`.",
                )
            elif self.config["commit_message_template"] is not None:
                return check_api.Result(
                    check_api.Conclusion.FAILURE,
                    "Commit message can't be changed with fast-forward merge method",
                    "`commit_message_template` must not be set if `method: fast-forward` is set.",
                )
            elif self.queue_rule.config["batch_size"] > 1:
                return check_api.Result(
                    check_api.Conclusion.FAILURE,
                    "batch_size > 1 is not compatible with fast-forward merge method",
                    "The merge `method` or the queue configuration must be updated.",
                )
            elif self.queue_rule.config["speculative_checks"] > 1:
                return check_api.Result(
                    check_api.Conclusion.FAILURE,
                    "speculative_checks > 1 is not compatible with fast-forward merge method",
                    "The merge `method` or the queue configuration must be updated.",
                )
            elif not self.queue_rule.config["allow_inplace_checks"]:
                return check_api.Result(
                    check_api.Conclusion.FAILURE,
                    "allow_inplace_checks=False is not compatible with fast-forward merge method",
                    "The merge `method` or the queue configuration must be updated.",
                )

        protection = await ctxt.repository.get_branch_protection(
            ctxt.pull["base"]["ref"])
        if (protection and "required_status_checks" in protection
                and "strict" in protection["required_status_checks"]
                and protection["required_status_checks"]["strict"]):
            if self.queue_rule.config["batch_size"] > 1:
                return check_api.Result(
                    check_api.Conclusion.FAILURE,
                    "batch_size > 1 is not compatible with branch protection setting",
                    "The branch protection setting `Require branches to be up to date before merging` must be unset.",
                )
            elif self.queue_rule.config["speculative_checks"] > 1:
                return check_api.Result(
                    check_api.Conclusion.FAILURE,
                    "speculative_checks > 1 is not compatible with branch protection setting",
                    "The branch protection setting `Require branches to be up to date before merging` must be unset.",
                )

        # FIXME(sileht): we should use the computed update_bot_account in TrainCar.update_pull(),
        # not the original one
        try:
            await action_utils.render_bot_account(
                ctxt,
                self.config["update_bot_account"],
                option_name="update_bot_account",
                required_feature=subscription.Features.MERGE_BOT_ACCOUNT,
                missing_feature_message=
                "Queue with `update_bot_account` set is unavailable",
            )
        except action_utils.RenderBotAccountFailure as e:
            return check_api.Result(e.status, e.title, e.reason)

        try:
            merge_bot_account = await action_utils.render_bot_account(
                ctxt,
                self.config["merge_bot_account"],
                option_name="merge_bot_account",
                required_feature=subscription.Features.MERGE_BOT_ACCOUNT,
                missing_feature_message=
                "Queue with `merge_bot_account` set is unavailable",
                # NOTE(sileht): we don't allow admin, because if branch protection are
                # enabled, but not enforced on admins, we may bypass them
                required_permissions=["write", "maintain"],
            )
        except action_utils.RenderBotAccountFailure as e:
            return check_api.Result(e.status, e.title, e.reason)

        q = await merge_train.Train.from_context(ctxt)
        car = q.get_car(ctxt)
        await self._update_merge_queue_summary(ctxt, rule, q, car)

        if ctxt.user_refresh_requested() or ctxt.admin_refresh_requested():
            # NOTE(sileht): user ask a refresh, we just remove the previous state of this
            # check and the method _should_be_queued will become true again :)
            check = await ctxt.get_engine_check_run(
                constants.MERGE_QUEUE_SUMMARY_NAME)
            if check and check_api.Conclusion(check["conclusion"]) not in [
                    check_api.Conclusion.SUCCESS,
                    check_api.Conclusion.PENDING,
                    check_api.Conclusion.NEUTRAL,
            ]:
                await check_api.set_check_run(
                    ctxt,
                    constants.MERGE_QUEUE_SUMMARY_NAME,
                    check_api.Result(
                        check_api.Conclusion.PENDING,
                        "The pull request has been refreshed and is going to be re-embarked soon",
                        "",
                    ),
                )

        self._set_effective_priority(ctxt)

        result = await self.merge_report(ctxt)
        if result is None:
            if await self._should_be_queued(ctxt, q):
                await q.add_pull(
                    ctxt, typing.cast(queue.PullQueueConfig, self.config))
                try:
                    qf = await freeze.QueueFreeze.get(ctxt.repository,
                                                      self.config["name"])
                    if await self._should_be_merged(ctxt, q, qf):
                        result = await self._merge(ctxt, rule, q,
                                                   merge_bot_account)
                    else:
                        result = await self.get_queue_status(ctxt, rule, q, qf)

                except Exception:
                    await q.remove_pull(ctxt)
                    raise
            else:
                result = await self.get_unqueue_status(ctxt, q)

        if result.conclusion is not check_api.Conclusion.PENDING:
            await q.remove_pull(ctxt)

        # NOTE(sileht): Only refresh if the car still exists and is the same as
        # before we run the action
        new_car = q.get_car(ctxt)
        if (car and car.queue_pull_request_number is not None and new_car
                and new_car.creation_state == "created"
                and new_car.queue_pull_request_number is not None
                and new_car.queue_pull_request_number
                == car.queue_pull_request_number
                and self.need_draft_pull_request_refresh()
                and not ctxt.has_been_only_refreshed()):
            # NOTE(sileht): It's not only refreshed, so we need to
            # update the associated transient pull request.
            # This is mandatory to filter out refresh to avoid loop
            # of refreshes between this PR and the transient one.
            await utils.send_pull_refresh(
                ctxt.repository.installation.redis.stream,
                ctxt.pull["base"]["repo"],
                pull_request_number=new_car.queue_pull_request_number,
                action="internal",
                source="forward from queue action (run)",
            )
        return result
Exemple #19
0
async def run_pending_commands_tasks(
        ctxt: context.Context, mergify_config: rules.MergifyConfig) -> None:
    if ctxt.is_merge_queue_pr():
        # We don't allow any command yet
        return

    pendings = set()
    async for comment in ctxt.client.items(
            f"{ctxt.base_url}/issues/{ctxt.pull['number']}/comments",
            resource_name="comments",
            page_limit=20,
    ):
        if comment["user"]["id"] != config.BOT_USER_ID:
            continue

        # Old format
        match = COMMAND_RESULT_MATCHER_OLD.search(comment["body"])
        if match:
            command = match[1]
            state = match[2]
            if state == "pending":
                pendings.add(command)
            elif command in pendings:
                pendings.remove(command)

            continue

        # New format
        match = COMMAND_RESULT_MATCHER.search(comment["body"])

        if match is None:
            continue

        try:
            payload = json.loads(match[1])
        except Exception:
            LOG.warning("Unable to load command payload: %s", match[1])
            continue

        command = payload.get("command")
        if not command:
            continue

        conclusion_str = payload.get("conclusion")

        try:
            conclusion = check_api.Conclusion(conclusion_str)
        except ValueError:
            LOG.error("Unable to load conclusions %s", conclusion_str)
            continue

        if conclusion == check_api.Conclusion.PENDING:
            pendings.add(command)
        elif command in pendings:
            try:
                pendings.remove(command)
            except KeyError:
                LOG.error("Unable to remove command: %s", command)

    for pending in pendings:
        await handle(ctxt,
                     mergify_config,
                     f"@Mergifyio {pending}",
                     None,
                     rerun=True)