def test_disabled():
    action = request_reviews.RequestReviewsAction.get_schema()(
        {
            "random_count": 2,
            "teams": {
                "foobar": 2,
                "foobaz": 1,
            },
            "users": {
                "jd": 2,
                "sileht": 1,
            },
        },
    )
    client = mock.MagicMock()
    client.auth.installation.__getitem__.return_value = 123
    sub = subscription.Subscription(
        123,
        False,
        "No sub",
        {},
        frozenset({}),
    )
    ctxt = context.Context(
        client,
        {
            "number": 123,
            "state": None,
            "mergeable_state": "ok",
            "merged_by": None,
            "merged": None,
            "merged_at": None,
        },
        sub,
    )
    assert action.run(ctxt, None, None) == (
        "action_required",
        "Random request reviews are disabled",
        "⚠  The [subscription](https://dashboard.mergify.io/installation/123/subscription) needed to be updated to enable them.",
    )
    def _test_dismiss_reviews_fail(self, msg):
        rules = {
            "pull_request_rules": [{
                "name":
                "dismiss reviews",
                "conditions": [f"base={self.master_branch_name}"],
                "actions": {
                    "dismiss_reviews": {
                        "message": msg,
                        "approved": True,
                        "changes_requested": ["mergify-test1"],
                    }
                },
            }]
        }

        self.setup_repo(yaml.dump(rules))
        p, commits = self.create_pr()
        branch = self.get_full_branch_name("fork/pr%d" % self.pr_counter)
        self.create_review(p, commits[-1], "APPROVE")

        self.assertEqual(
            [("APPROVED", "mergify-test1")],
            [(r.state, r.user.login) for r in p.get_reviews()],
        )

        self._push_for_synchronize(branch)

        self.wait_for("pull_request", {"action": "synchronize"})

        ctxt = context.Context(self.cli_integration,
                               {"number": p.raw_data["number"]}, {})

        assert len(ctxt.pull_engine_check_runs) == 1
        check = ctxt.pull_engine_check_runs[0]
        assert "failure" == check["conclusion"]
        assert "The Mergify configuration is invalid" == check["output"][
            "title"]
        return check
Exemple #3
0
    async def test_create_pull_basic(self):
        await self.setup_repo(yaml.dump({}))

        p1, _ = await self.create_pr()
        p2, _ = await self.create_pr()

        ctxt = context.Context(self.repository_ctxt, p1.raw_data)
        q = await merge_train.Train.from_context(ctxt)
        head_sha = await q.get_head_sha()

        config = queue.QueueConfig(
            name="foo",
            strict_method="merge",
            priority=0,
            effective_priority=0,
            bot_account=None,
            update_bot_account=None,
        )

        car = merge_train.TrainCar(
            q,
            p2.number,
            [p1.number],
            config,
            head_sha,
            head_sha,
        )
        await car.create_pull()
        assert car.queue_pull_request_number is not None
        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 3

        await car.delete_pull()

        # NOTE(sileht): When branch is deleted the associated Pull is deleted in an async
        # fashion on GitHub side.
        time.sleep(1)
        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 2
def test_pull_behind(commits_tree_generator):
    expected, commits = commits_tree_generator
    client = mock.Mock()
    client.items.return_value = commits  # /pulls/X/commits
    client.item.return_value = {"commit": {"sha": "base"}}  # /branch/#foo

    ctxt = context.Context(
        client,
        {
            "number": 1,
            "mergeable_state": "clean",
            "state": "open",
            "merged": False,
            "merged_at": None,
            "merged_by": None,
            "base": {
                "ref": "#foo"
            },
        },
        {},
    )

    assert expected == ctxt.is_behind
def test_get_commits_to_cherry_pick_merge(commits):
    c1 = {"sha": "c1f", "parents": [], "commit": {"message": "foobar"}}
    c2 = {"sha": "c2", "parents": [c1], "commit": {"message": "foobar"}}
    commits.return_value = [c1, c2]

    client = mock.Mock()
    client.auth.get_access_token.return_value = "<token>"

    ctxt = context.Context(
        client,
        {
            "number": 6,
            "merged": True,
            "state": "closed",
            "html_url": "<html_url>",
            "base": {
                "sha": "sha",
                "ref": "ref",
                "user": {"login": "******"},
                "repo": {"full_name": "user/ref", "name": "name", "private": False},
            },
            "head": {
                "ref": "fork",
                "repo": {"full_name": "fork/other", "name": "other", "private": False},
            },
            "user": {"login": "******"},
            "merged_at": None,
            "merged_by": None,
            "mergeable_state": "clean",
        },
        {},
    )

    base_branch = {"sha": "base_branch", "parents": []}
    merge_commit = {"sha": "merge_commit", "parents": [base_branch, c2]}

    assert duplicate_pull._get_commits_to_cherrypick(ctxt, merge_commit) == [c1, c2]
Exemple #6
0
    async def test_queue_no_tmp_pull_request(self):
        rules = {
            "queue_rules": [
                {
                    "name": "default",
                    "conditions": [
                        "status-success=continuous-integration/fake-ci",
                    ],
                },
            ],
            "pull_request_rules": [
                {
                    "name": "Merge train",
                    "conditions": [
                        f"base={self.master_branch_name}",
                        "label=queue",
                    ],
                    "actions": {"queue": {"name": "default"}},
                },
            ],
        }
        await self.setup_repo(yaml.dump(rules))

        p1, _ = await self.create_pr()
        await self.create_status(p1)
        await self.add_label(p1, "queue")
        await self.run_engine()

        ctxt_p1 = context.Context(self.repository_ctxt, p1.raw_data)
        q = await merge_train.Train.from_context(ctxt_p1)
        pulls_in_queue = await q.get_pulls()
        assert pulls_in_queue == []

        # pull merged without need of a train car
        p1.update()
        assert p1.merged
    def _test_comment_template_error(self, msg):
        rules = {
            "pull_request_rules": [
                {
                    "name": "comment",
                    "conditions": [f"base={self.master_branch_name}"],
                    "actions": {"comment": {"message": msg}},
                }
            ]
        }

        self.setup_repo(yaml.dump(rules))

        p, _ = self.create_pr()

        p.update()

        ctxt = context.Context(self.cli_integration, p.raw_data, {})

        assert len(ctxt.pull_engine_check_runs) == 1
        check = ctxt.pull_engine_check_runs[0]
        assert "failure" == check["conclusion"]
        assert "The Mergify configuration is invalid" == check["output"]["title"]
        return check
Exemple #8
0
def run(client, pull, sub, sources):
    LOG.debug("engine get context")
    ctxt = context.Context(client, pull, sub)
    ctxt.log.debug("engine start processing context")

    issue_comment_sources = []

    for source in sources:
        if source["event_type"] == "issue_comment":
            issue_comment_sources.append(source)
        else:
            ctxt.sources.append(source)

    ctxt.log.debug("engine run pending commands")
    commands_runner.run_pending_commands_tasks(ctxt)

    if issue_comment_sources:
        ctxt.log.debug("engine handle commands")
        for source in issue_comment_sources:
            commands_runner.handle(
                ctxt,
                source["data"]["comment"]["body"],
                source["data"]["comment"]["user"],
            )

    if not ctxt.sources:
        return

    if ctxt.client.auth.permissions_need_to_be_updated:
        ctxt.set_summary_check(
            check_api.Result(
                check_api.Conclusion.FAILURE,
                title="Required GitHub permissions are missing.",
                summary="You can accept them at https://dashboard.mergify.io/",
            )
        )
        return

    ctxt.log.debug("engine check configuration change")
    if check_configuration_changes(ctxt):
        ctxt.log.info("Configuration changed, ignoring")
        return

    ctxt.log.debug("engine get configuration")
    # BRANCH CONFIGURATION CHECKING
    try:
        filename, mergify_config = rules.get_mergify_config(
            ctxt.client, ctxt.pull["base"]["repo"]["name"]
        )
    except rules.NoRules:  # pragma: no cover
        ctxt.log.info("No need to proceed queue (.mergify.yml is missing)")
        return
    except rules.InvalidRules as e:  # pragma: no cover
        ctxt.log.info(
            "The Mergify configuration is invalid",
            summary=str(e),
            annotations=e.get_annotations(e.filename),
        )
        # Not configured, post status check with the error message
        if any(
            (
                s["event_type"] == "pull_request"
                and s["data"]["action"] in ["opened", "synchronize"]
                for s in ctxt.sources
            )
        ):
            ctxt.set_summary_check(
                check_api.Result(
                    check_api.Conclusion.FAILURE,
                    title="The Mergify configuration is invalid",
                    summary=str(e),
                    annotations=e.get_annotations(e.filename),
                )
            )
        return

    # Add global and mandatory rules
    mergify_config["pull_request_rules"].rules.extend(DEFAULT_PULL_REQUEST_RULES.rules)

    if ctxt.pull["base"]["repo"]["private"] and not ctxt.subscription.has_feature(
        subscription.Features.PRIVATE_REPOSITORY
    ):
        ctxt.log.info("mergify disabled: private repository")
        ctxt.set_summary_check(
            check_api.Result(
                check_api.Conclusion.FAILURE,
                title="Mergify is disabled",
                summary=sub.reason,
            )
        )
        return

    ensure_summary_on_head_sha(ctxt)

    # NOTE(jd): that's fine for now, but I wonder if we wouldn't need a higher abstraction
    # to have such things run properly. Like hooks based on events that you could
    # register. It feels hackish otherwise.
    if any(
        s["event_type"] == "pull_request" and s["data"]["action"] == "closed"
        for s in ctxt.sources
    ):
        ctxt.clear_cached_last_summary_head_sha()

    ctxt.log.debug("engine handle actions")
    actions_runner.handle(mergify_config["pull_request_rules"], ctxt)
Exemple #9
0
    async def test_queue_ci_failure(self):
        rules = {
            "queue_rules": [
                {
                    "name": "default",
                    "conditions": [
                        "status-success=continuous-integration/fake-ci",
                    ],
                }
            ],
            "pull_request_rules": [
                {
                    "name": "Merge priority high",
                    "conditions": [
                        f"base={self.master_branch_name}",
                        "label=queue",
                    ],
                    "actions": {"queue": {"name": "default", "priority": "high"}},
                },
            ],
        }
        await self.setup_repo(yaml.dump(rules))

        p1, _ = await self.create_pr()
        p2, _ = await self.create_pr()

        # To force others to be rebased
        p, _ = await self.create_pr()
        p.merge()
        await self.wait_for("pull_request", {"action": "closed"})
        await self.run_engine()
        p.update()

        await self.add_label(p1, "queue")
        await self.add_label(p2, "queue")
        await self.run_engine()

        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 3

        tmp_mq_p2 = pulls[0]
        assert tmp_mq_p2.number not in [p1.number, p2.number]

        ctxt = context.Context(self.repository_ctxt, p.raw_data)
        q = await merge_train.Train.from_context(ctxt)
        await self._assert_cars_contents(
            q,
            [
                TrainCarMatcher(
                    p1.number,
                    [],
                    p.merge_commit_sha,
                    p.merge_commit_sha,
                    "updated",
                    None,
                ),
                TrainCarMatcher(
                    p2.number,
                    [p1.number],
                    p.merge_commit_sha,
                    p.merge_commit_sha,
                    "created",
                    tmp_mq_p2.number,
                ),
            ],
        )

        # tmp merge-queue pull p2 fail
        await self.create_status(tmp_mq_p2, state="failure")
        await self.run_engine()

        # then p1 fail too
        p1.update()
        await self.create_status(p1, state="failure")
        await self.run_engine()

        # TODO(sileht): Add some assertion on check-runs content

        # tmp merge-queue pull p2 have been closed and p2 updated/rebased
        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 2
        await self._assert_cars_contents(
            q,
            [
                TrainCarMatcher(
                    p2.number,
                    [],
                    p.merge_commit_sha,
                    p.merge_commit_sha,
                    "updated",
                    None,
                ),
            ],
        )

        # Merge p2
        p2.update()
        await self.create_status(p2)
        await self.run_engine()
        await self.wait_for("push", {"ref": f"refs/heads/{self.master_branch_name}"})
        await self.run_engine()

        # Only p1 is still there and the queue is empty
        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 1
        assert pulls[0].number == p1.number
        await self._assert_cars_contents(q, [])
    def test_draft(self):
        rules = {
            "pull_request_rules": [{
                "name": "no-draft",
                "conditions": ["draft"],
                "actions": {
                    "comment": {
                        "message": "draft pr"
                    }
                },
            }]
        }
        self.setup_repo(yaml.dump(rules))

        pr, _ = self.create_pr()
        ctxt = context.Context(self.cli_integration, {"number": pr.number}, {})
        assert not ctxt.pull_request.draft

        pr, _ = self.create_pr(draft=True)

        self.wait_for("issue_comment", {"action": "created"})

        ctxt = context.Context(self.cli_integration, {"number": pr.number}, {})
        assert ctxt.pull_request.draft

        pr.update()
        comments = list(pr.get_issue_comments())
        self.assertEqual("draft pr", comments[-1].body)

        # Test underscore/dash attributes
        assert ctxt.pull_request.review_requested == []

        with pytest.raises(AttributeError):
            assert ctxt.pull_request.foobar

        # Test items
        assert list(ctxt.pull_request) == list(context.PullRequest.ATTRIBUTES)
        assert dict(ctxt.pull_request.items()) == {
            "number": pr.number,
            "closed": False,
            "locked": False,
            "assignee": [],
            "approved-reviews-by": [],
            "files": ["test2"],
            "status-neutral": [],
            "commented-reviews-by": [],
            "milestone": "",
            "label": [],
            "body": "Pull request n2 from fork",
            "base": self.master_branch_name,
            "review-requested": [],
            "status-success": ["Summary"],
            "changes-requested-reviews-by": [],
            "merged": False,
            "head": self.get_full_branch_name("fork/pr2"),
            "author": "mergify-test2",
            "dismissed-reviews-by": [],
            "merged-by": "",
            "status-failure": [],
            "title": "Pull request n2 from fork",
            "conflict": False,
        }
Exemple #11
0
    async def test_unqueue(self) -> None:
        rules = {
            "queue_rules": [{
                "name":
                "default",
                "conditions": [
                    "status-success=continuous-integration/fake-ci",
                ],
            }],
            "pull_request_rules": [
                {
                    "name": "Queue",
                    "conditions": [f"base={self.main_branch_name}"],
                    "actions": {
                        "queue": {
                            "name": "default"
                        }
                    },
                },
            ],
        }
        await self.setup_repo(yaml.dump(rules))

        p1 = await self.create_pr()
        await self.run_engine()
        ctxt = context.Context(self.repository_ctxt, p1)
        q = await merge_train.Train.from_context(ctxt)
        base_sha = await q.get_base_sha()
        await test_queue.TestQueueAction._assert_cars_contents(
            q,
            base_sha,
            [
                test_queue.TrainCarMatcher(
                    [p1["number"]],
                    [],
                    base_sha,
                    "updated",
                    p1["number"],
                ),
            ],
        )

        check = first(
            await context.Context(self.repository_ctxt,
                                  p1).pull_engine_check_runs,
            key=lambda c: c["name"] == "Rule: Queue (queue)",
        )
        assert check is not None
        assert (check["output"]["title"] ==
                "The pull request is the 1st in the queue to be merged")

        await self.create_comment_as_admin(p1["number"], "@mergifyio requeue")
        await self.run_engine()
        await self.wait_for("issue_comment", {"action": "created"})

        comments = await self.get_issue_comments(p1["number"])
        assert (comments[-1]["body"] == """> requeue

#### ☑️ This pull request is already queued



<!--
DO NOT EDIT
-*- Mergify Payload -*-
{"command": "requeue", "conclusion": "neutral"}
-*- Mergify Payload End -*-
-->
""")

        check = first(
            await context.Context(self.repository_ctxt,
                                  p1).pull_engine_check_runs,
            key=lambda c: c["name"] == "Rule: Queue (queue)",
        )
        assert check is not None
        assert (check["output"]["title"] ==
                "The pull request is the 1st in the queue to be merged")

        await self.create_comment_as_admin(p1["number"], "@mergifyio unqueue")
        await self.run_engine()
        await self.wait_for("issue_comment", {"action": "created"})

        await test_queue.TestQueueAction._assert_cars_contents(q, None, [])

        check = first(
            await context.Context(self.repository_ctxt,
                                  p1).pull_engine_check_runs,
            key=lambda c: c["name"] == "Rule: Queue (queue)",
        )
        assert check is not None
        assert check["conclusion"] == "cancelled"
        assert (check["output"]["title"] ==
                "The pull request has been removed from the queue")
        assert (
            check["output"]["summary"] ==
            "The pull request has been manually removed from the queue by an `unqueue` command."
        )

        check = first(
            await context.Context(self.repository_ctxt,
                                  p1).pull_engine_check_runs,
            key=lambda c: c["name"] == constants.MERGE_QUEUE_SUMMARY_NAME,
        )
        assert check is not None
        assert check["conclusion"] == "cancelled"
        assert (
            check["output"]["title"] ==
            "The pull request has been removed from the queue by an `unqueue` command"
        )

        await self.create_comment_as_admin(p1["number"], "@mergifyio requeue")
        await self.run_engine()
        await self.wait_for("issue_comment", {"action": "created"})

        check = first(
            await context.Context(self.repository_ctxt,
                                  p1).pull_engine_check_runs,
            key=lambda c: c["name"] == constants.MERGE_QUEUE_SUMMARY_NAME,
        )
        assert check is not None
        assert check["conclusion"] is None
        assert check["output"]["title"].startswith(
            "The pull request is embarked with")

        check = first(
            await context.Context(self.repository_ctxt,
                                  p1).pull_engine_check_runs,
            key=lambda c: c["name"] == "Rule: Queue (queue)",
        )
        assert check is not None
        assert (check["output"]["title"] ==
                "The pull request is the 1st in the queue to be merged")
        await self.create_status(p1)
        await self.run_engine()

        p1 = await self.get_pull(p1["number"])
        assert p1["merged"]
Exemple #12
0
def report(
    url: str,
) -> typing.Union[context.Context, github.GithubInstallationClient, None]:
    path = url.replace("https://github.com/", "")

    pull_number: typing.Optional[str]
    repo: typing.Optional[str]

    try:
        owner, repo, _, pull_number = path.split("/")
    except ValueError:
        pull_number = None
        try:
            owner, repo = path.split("/")
        except ValueError:
            owner = path
            repo = None

    try:
        client = github.get_client(owner)
    except exceptions.MergifyNotInstalled:
        print(f"* Mergify is not installed on account {owner}")
        return None

    print("* INSTALLATION ID: %s" % client.auth.installation["id"])

    cached_sub, db_sub = utils.async_run(
        subscription.Subscription.get_subscription(client.auth.owner_id),
        subscription.Subscription._retrieve_subscription_from_db(
            client.auth.owner_id),
    )

    if repo is None:
        slug = None
    else:
        slug = owner + "/" + repo

    print("* SUBSCRIBED (cache/db): %s / %s" %
          (cached_sub.active, db_sub.active))
    print("* Features (cache):")
    for f in cached_sub.features:
        print(f"  - {f.value}")
    report_sub(client.auth.installation["id"], cached_sub, "ENGINE-CACHE",
               slug)
    report_sub(client.auth.installation["id"], db_sub, "DASHBOARD", slug)

    utils.async_run(report_worker_status(client.auth.owner))

    if repo is not None:

        repo_info = client.item(f"/repos/{owner}/{repo}")
        print(
            f"* REPOSITORY IS {'PRIVATE' if repo_info['private'] else 'PUBLIC'}"
        )

        print("* CONFIGURATION:")
        mergify_config = None
        try:
            filename, mergify_config_content = rules.get_mergify_config_content(
                client, repo)
        except rules.NoRules:  # pragma: no cover
            print(".mergify.yml is missing")
        else:
            print(f"Config filename: {filename}")
            print(mergify_config_content.decode())
            try:
                mergify_config = rules.UserConfigurationSchema(
                    mergify_config_content)
            except rules.InvalidRules as e:  # pragma: no cover
                print("configuration is invalid %s" % str(e))
            else:
                mergify_config["pull_request_rules"].rules.extend(
                    engine.DEFAULT_PULL_REQUEST_RULES.rules)

        if pull_number is None:
            for branch in client.items(f"/repos/{owner}/{repo}/branches"):
                q = queue.Queue(
                    utils.get_redis_for_cache(),
                    client.auth.installation["id"],
                    client.auth.owner,
                    repo,
                    branch["name"],
                )
                pulls = q.get_pulls()
                if not pulls:
                    continue

                print(f"* QUEUES {branch['name']}:")

                for priority, grouped_pulls in itertools.groupby(
                        pulls, key=lambda v: q.get_config(v)["priority"]):
                    try:
                        fancy_priority = helpers.PriorityAliases(priority).name
                    except ValueError:
                        fancy_priority = priority
                    formatted_pulls = ", ".join(
                        (f"#{p}" for p in grouped_pulls))
                    print(f"** {formatted_pulls} (priority: {fancy_priority})")
        else:
            pull_raw = client.item(
                f"/repos/{owner}/{repo}/pulls/{pull_number}")
            ctxt = context.Context(
                client,
                pull_raw,
                cached_sub,
                [{
                    "event_type": "mergify-debugger",
                    "data": {}
                }],
            )

            # FIXME queues could also be printed if no pull number given
            q = queue.Queue.from_context(ctxt)
            print("* QUEUES: %s" % ", ".join([f"#{p}" for p in q.get_pulls()]))
            print("* PULL REQUEST:")
            pr_data = dict(ctxt.pull_request.items())
            pprint.pprint(pr_data, width=160)

            print("is_behind: %s" % ctxt.is_behind)

            print("mergeable_state: %s" % ctxt.pull["mergeable_state"])

            print("* MERGIFY LAST CHECKS:")
            for c in ctxt.pull_engine_check_runs:
                print("[%s]: %s | %s" %
                      (c["name"], c["conclusion"], c["output"].get("title")))
                print("> " +
                      "\n> ".join(c["output"].get("summary").split("\n")))

            if mergify_config is not None:
                print("* MERGIFY LIVE MATCHES:")
                match = mergify_config[
                    "pull_request_rules"].get_pull_request_rule(ctxt)
                summary_title, summary = actions_runner.gen_summary(
                    ctxt, match)
                print("> %s" % summary_title)
                print(summary)

            return ctxt

    return client
Exemple #13
0
    def test_merge_priority(self):
        rules = {
            "pull_request_rules": [
                {
                    "name": "Merge priority high",
                    "conditions":
                    [f"base={self.master_branch_name}", "label=high"],
                    "actions": {
                        "merge": {
                            "strict": "smart+ordered",
                            "priority": "high"
                        }
                    },
                },
                {
                    "name":
                    "Merge priority default",
                    "conditions":
                    [f"base={self.master_branch_name}", "label=medium"],
                    "actions": {
                        "merge": {
                            "strict": "smart+ordered"
                        }
                    },
                },
                {
                    "name": "Merge priority low",
                    "conditions":
                    [f"base={self.master_branch_name}", "label=low"],
                    "actions": {
                        "merge": {
                            "strict": "smart+ordered",
                            "priority": 1
                        }
                    },
                },
            ]
        }

        self.setup_repo(yaml.dump(rules))

        p_high, _ = self.create_pr()
        p_medium, _ = self.create_pr()
        p_low, _ = self.create_pr()

        # To force others to be rebased
        p, _ = self.create_pr()
        p.merge()
        self.wait_for("pull_request", {"action": "closed"}),

        # Merge them in reverse priority to ensure there are reordered
        self.add_label(p_low, "low")
        self.add_label(p_medium, "medium")
        self.add_label(p_high, "high")

        ctxt = context.Context(self.cli_integration, p.raw_data, {})
        q = queue.Queue.from_context(ctxt)
        pulls_in_queue = q.get_pulls()
        assert pulls_in_queue == [p_high.number, p_medium.number, p_low.number]

        queue.Queue.process_queues()
        self.wait_for("pull_request", {"action": "closed"})

        queue.Queue.process_queues()
        self.wait_for("pull_request", {"action": "closed"})

        queue.Queue.process_queues()
        self.wait_for("pull_request", {"action": "closed"})

        p_low.update()
        p_medium.update()
        p_high.update()
        self.assertEqual(True, p_low.merged)
        self.assertEqual(True, p_high.merged)
        self.assertEqual(True, p_medium.merged)
        assert p_low.merged_at > p_medium.merged_at > p_high.merged_at
Exemple #14
0
def test_get_pull_request_rule():

    client = mock.Mock()

    get_reviews = [{
        "user": {
            "login": "******",
            "type": "User"
        },
        "state": "APPROVED",
        "author_association": "MEMBER",
    }]
    get_files = [{"filename": "README.rst"}, {"filename": "setup.py"}]
    get_team_members = [{"login": "******"}, {"login": "******"}]

    get_checks = []
    get_statuses = [{
        "context": "continuous-integration/fake-ci",
        "state": "success"
    }]
    client.item.return_value = {"permission": "write"}  # get review user perm

    def client_items(url, *args, **kwargs):
        if url == "/repos/another-jd/name/pulls/1/reviews":
            return get_reviews
        elif url == "/repos/another-jd/name/pulls/1/files":
            return get_files
        elif url == "/repos/another-jd/name/commits/<sha>/check-runs":
            return get_checks
        elif url == "/repos/another-jd/name/commits/<sha>/status":
            return get_statuses
        elif url == "/orgs/orgs/teams/my-reviewers/members":
            return get_team_members
        else:
            raise RuntimeError(f"not handled url {url}")

    client.items.side_effect = client_items

    ctxt = context.Context(
        client,
        {
            "number": 1,
            "html_url": "<html_url>",
            "state": "closed",
            "merged_by": None,
            "merged_at": None,
            "merged": False,
            "draft": False,
            "milestone": None,
            "mergeable_state": "unstable",
            "assignees": [],
            "labels": [],
            "base": {
                "ref": "master",
                "repo": {
                    "name": "name",
                    "private": False
                },
                "user": {
                    "login": "******"
                },
                "sha": "mew",
            },
            "head": {
                "ref": "myfeature",
                "sha": "<sha>"
            },
            "locked": False,
            "requested_reviewers": [],
            "requested_teams": [],
            "title": "My awesome job",
            "body": "I rock",
            "user": {
                "login": "******"
            },
        },
        {},
    )

    # Empty conditions
    pull_request_rules = rules.PullRequestRules(
        [rules.Rule(name="default", conditions=[], actions={})])

    match = pull_request_rules.get_pull_request_rule(ctxt)
    assert [r.name for r in match.rules] == ["default"]
    assert [r.name for r in match.matching_rules] == ["default"]
    assert [rules.EvaluatedRule.from_rule(r, [])
            for r in match.rules] == match.matching_rules
    for rule in match.rules:
        assert rule.actions == {}

    pull_request_rules = pull_request_rule_from_list([{
        "name":
        "hello",
        "conditions": ["base:master"],
        "actions": {}
    }])

    match = pull_request_rules.get_pull_request_rule(ctxt)
    assert [r.name for r in match.rules] == ["hello"]
    assert [r.name for r in match.matching_rules] == ["hello"]
    assert [rules.EvaluatedRule.from_rule(r, [])
            for r in match.rules] == match.matching_rules
    for rule in match.rules:
        assert rule.actions == {}

    pull_request_rules = pull_request_rule_from_list([
        {
            "name": "hello",
            "conditions": ["base:master"],
            "actions": {}
        },
        {
            "name": "backport",
            "conditions": ["base:master"],
            "actions": {}
        },
    ])

    match = pull_request_rules.get_pull_request_rule(ctxt)
    assert [r.name for r in match.rules] == ["hello", "backport"]
    assert [r.name for r in match.matching_rules] == ["hello", "backport"]
    assert [rules.EvaluatedRule.from_rule(r, [])
            for r in match.rules] == match.matching_rules
    for rule in match.rules:
        assert rule.actions == {}

    pull_request_rules = pull_request_rule_from_list([
        {
            "name": "hello",
            "conditions": ["author:foobar"],
            "actions": {}
        },
        {
            "name": "backport",
            "conditions": ["base:master"],
            "actions": {}
        },
    ])

    match = pull_request_rules.get_pull_request_rule(ctxt)
    assert [r.name for r in match.rules] == ["hello", "backport"]
    assert [r.name for r in match.matching_rules] == ["backport"]
    for rule in match.rules:
        assert rule.actions == {}

    pull_request_rules = pull_request_rule_from_list([
        {
            "name": "hello",
            "conditions": ["author:another-jd"],
            "actions": {}
        },
        {
            "name": "backport",
            "conditions": ["base:master"],
            "actions": {}
        },
    ])

    match = pull_request_rules.get_pull_request_rule(ctxt)
    assert [r.name for r in match.rules] == ["hello", "backport"]
    assert [r.name for r in match.matching_rules] == ["hello", "backport"]
    assert [rules.EvaluatedRule.from_rule(r, [])
            for r in match.rules] == match.matching_rules
    for rule in match.rules:
        assert rule.actions == {}

    # No match
    pull_request_rules = pull_request_rule_from_list([{
        "name":
        "merge",
        "conditions": [
            "base=xyz",
            "check-success=continuous-integration/fake-ci",
            "#approved-reviews-by>=1",
        ],
        "actions": {},
    }])

    match = pull_request_rules.get_pull_request_rule(ctxt)
    assert [r.name for r in match.rules] == ["merge"]
    assert [r.name for r in match.matching_rules] == []

    pull_request_rules = pull_request_rule_from_list([{
        "name":
        "merge",
        "conditions": [
            "base=master",
            "check-success=continuous-integration/fake-ci",
            "#approved-reviews-by>=1",
        ],
        "actions": {},
    }])

    match = pull_request_rules.get_pull_request_rule(ctxt)
    assert [r.name for r in match.rules] == ["merge"]
    assert [r.name for r in match.matching_rules] == ["merge"]
    assert [rules.EvaluatedRule.from_rule(r, [])
            for r in match.rules] == match.matching_rules
    for rule in match.rules:
        assert rule.actions == {}

    pull_request_rules = pull_request_rule_from_list([
        {
            "name":
            "merge",
            "conditions": [
                "base=master",
                "check-success=continuous-integration/fake-ci",
                "#approved-reviews-by>=2",
            ],
            "actions": {},
        },
        {
            "name":
            "fast merge",
            "conditions": [
                "base=master",
                "label=fast-track",
                "check-success=continuous-integration/fake-ci",
                "#approved-reviews-by>=1",
            ],
            "actions": {},
        },
        {
            "name":
            "fast merge with alternate ci",
            "conditions": [
                "base=master",
                "label=fast-track",
                "check-success=continuous-integration/fake-ci-bis",
                "#approved-reviews-by>=1",
            ],
            "actions": {},
        },
        {
            "name":
            "fast merge from a bot",
            "conditions": [
                "base=master",
                "author=mybot",
                "check-success=continuous-integration/fake-ci",
            ],
            "actions": {},
        },
    ])
    match = pull_request_rules.get_pull_request_rule(ctxt)

    assert [r.name for r in match.rules] == [
        "merge",
        "fast merge",
        "fast merge with alternate ci",
        "fast merge from a bot",
    ]
    assert [r.name for r in match.matching_rules] == [
        "merge",
        "fast merge",
        "fast merge with alternate ci",
    ]
    for rule in match.rules:
        assert rule.actions == {}

    assert match.matching_rules[0].name == "merge"
    assert len(match.matching_rules[0].missing_conditions) == 1
    assert (str(match.matching_rules[0].missing_conditions[0]) ==
            "#approved-reviews-by>=2")

    assert match.matching_rules[1].name == "fast merge"
    assert len(match.matching_rules[1].missing_conditions) == 1
    assert str(
        match.matching_rules[1].missing_conditions[0]) == "label=fast-track"

    assert match.matching_rules[2].name == "fast merge with alternate ci"
    assert len(match.matching_rules[2].missing_conditions) == 2
    assert str(
        match.matching_rules[2].missing_conditions[0]) == "label=fast-track"
    assert (str(match.matching_rules[2].missing_conditions[1]) ==
            "check-success=continuous-integration/fake-ci-bis")

    # Team conditions with one review missing
    pull_request_rules = pull_request_rule_from_list([{
        "name":
        "default",
        "conditions": [
            "approved-reviews-by=@orgs/my-reviewers",
            "#approved-reviews-by>=2",
        ],
        "actions": {},
    }])

    match = pull_request_rules.get_pull_request_rule(ctxt)
    assert [r.name for r in match.rules] == ["default"]
    assert [r.name for r in match.matching_rules] == ["default"]

    assert match.matching_rules[0].name == "default"
    assert len(match.matching_rules[0].missing_conditions) == 1
    assert (str(match.matching_rules[0].missing_conditions[0]) ==
            "#approved-reviews-by>=2")

    get_reviews.append({
        "user": {
            "login": "******",
            "type": "User"
        },
        "state": "APPROVED",
        "author_association": "MEMBER",
    })

    del ctxt.__dict__["reviews"]
    del ctxt.__dict__["consolidated_reviews"]

    # Team conditions with no review missing
    pull_request_rules = pull_request_rule_from_list([{
        "name":
        "default",
        "conditions": [
            "approved-reviews-by=@orgs/my-reviewers",
            "#approved-reviews-by>=2",
        ],
        "actions": {},
    }])

    match = pull_request_rules.get_pull_request_rule(ctxt)
    assert [r.name for r in match.rules] == ["default"]
    assert [r.name for r in match.matching_rules] == ["default"]

    assert match.matching_rules[0].name == "default"
    assert len(match.matching_rules[0].missing_conditions) == 0

    # Forbidden labels, when no label set
    pull_request_rules = pull_request_rule_from_list([{
        "name":
        "default",
        "conditions": ["-label~=^(status/wip|status/blocked|review/need2)$"],
        "actions": {},
    }])

    match = pull_request_rules.get_pull_request_rule(ctxt)
    assert [r.name for r in match.rules] == ["default"]
    assert [r.name for r in match.matching_rules] == ["default"]
    assert match.matching_rules[0].name == "default"
    assert len(match.matching_rules[0].missing_conditions) == 0

    # Forbidden labels, when forbiden label set
    ctxt.pull["labels"] = [{"name": "status/wip"}]

    match = pull_request_rules.get_pull_request_rule(ctxt)
    assert [r.name for r in match.rules] == ["default"]
    assert [r.name for r in match.matching_rules] == ["default"]
    assert match.matching_rules[0].name == "default"
    assert len(match.matching_rules[0].missing_conditions) == 1
    assert str(match.matching_rules[0].missing_conditions[0]) == (
        "-label~=^(status/wip|status/blocked|review/need2)$")

    # Forbidden labels, when other label set
    ctxt.pull["labels"] = [{"name": "allowed"}]

    match = pull_request_rules.get_pull_request_rule(ctxt)
    assert [r.name for r in match.rules] == ["default"]
    assert [r.name for r in match.matching_rules] == ["default"]
    assert match.matching_rules[0].name == "default"
    assert len(match.matching_rules[0].missing_conditions) == 0

    # Test team expander
    pull_request_rules = pull_request_rule_from_list([{
        "name":
        "default",
        "conditions": ["author~=^(user1|user2|another-jd)$"],
        "actions": {},
    }])
    match = pull_request_rules.get_pull_request_rule(ctxt)
    assert [r.name for r in match.rules] == ["default"]
    assert [r.name for r in match.matching_rules] == ["default"]
    assert match.matching_rules[0].name == "default"
    assert len(match.matching_rules[0].missing_conditions) == 0
Exemple #15
0
    def test_merge_rule_switch(self):
        rules = {
            "pull_request_rules": [
                {
                    "name":
                    "Merge priority high",
                    "conditions": [
                        f"base={self.master_branch_name}",
                        "label=high",
                        "status-success=continuous-integration/fake-ci",
                    ],
                    "actions": {
                        "merge": {
                            "strict": "smart+ordered",
                            "priority": "high"
                        }
                    },
                },
                {
                    "name":
                    "Merge priority medium",
                    "conditions": [
                        f"base={self.master_branch_name}",
                        "label=medium",
                        "status-success=continuous-integration/fake-ci",
                    ],
                    "actions": {
                        "merge": {
                            "strict": "smart+ordered"
                        }
                    },
                },
                {
                    "name":
                    "Merge priority low",
                    "conditions": [
                        f"base={self.master_branch_name}",
                        "label=low",
                        "status-success=continuous-integration/fake-ci",
                    ],
                    "actions": {
                        "merge": {
                            "strict": "smart+ordered",
                            "priority": 1
                        }
                    },
                },
            ]
        }

        self.setup_repo(yaml.dump(rules))

        p1, _ = self.create_pr()
        p2, _ = self.create_pr()

        # To force others to be rebased
        p, _ = self.create_pr()
        p.merge()
        self.wait_for("pull_request", {"action": "closed"}),

        # Merge them in reverse priority to ensure there are reordered
        self.add_label(p1, "medium")
        self.add_label(p2, "low")
        self.create_status(p1)
        self.create_status(p2)
        self.run_engine()

        ctxt = context.Context(self.cli_integration, p.raw_data, {})
        q = queue.Queue.from_context(ctxt)
        pulls_in_queue = q.get_pulls()
        assert pulls_in_queue == [p1.number, p2.number]

        p2.remove_from_labels("low")
        self.add_label(p2, "high")
        self.run_engine()
        pulls_in_queue = q.get_pulls()
        assert pulls_in_queue == [p2.number, p1.number]
Exemple #16
0
    def test_merge_priority(self):
        rules = {
            "pull_request_rules": [
                {
                    "name":
                    "Merge priority high",
                    "conditions": [
                        f"base={self.master_branch_name}",
                        "label=high",
                        "status-success=continuous-integration/fake-ci",
                    ],
                    "actions": {
                        "merge": {
                            "strict": "smart+ordered",
                            "priority": "high"
                        }
                    },
                },
                {
                    "name":
                    "Merge priority default",
                    "conditions": [
                        f"base={self.master_branch_name}",
                        "label=medium",
                        "status-success=continuous-integration/fake-ci",
                    ],
                    "actions": {
                        "merge": {
                            "strict": "smart+ordered"
                        }
                    },
                },
                {
                    "name":
                    "Merge priority low",
                    "conditions": [
                        f"base={self.master_branch_name}",
                        "label=low",
                        "status-success=continuous-integration/fake-ci",
                    ],
                    "actions": {
                        "merge": {
                            "strict": "smart+ordered",
                            "priority": 1
                        }
                    },
                },
            ]
        }

        self.setup_repo(yaml.dump(rules))

        p_high, _ = self.create_pr()
        p_medium, _ = self.create_pr()
        p_low, _ = self.create_pr()

        # To force others to be rebased
        p, _ = self.create_pr()
        p.merge()
        self.wait_for("pull_request", {"action": "closed"}),
        self.run_engine()

        # Merge them in reverse priority to ensure there are reordered
        self.add_label(p_low, "low")
        self.create_status(p_low)
        self.add_label(p_medium, "medium")
        self.create_status(p_medium)
        self.add_label(p_high, "high")
        self.create_status(p_high)
        self.run_engine()

        ctxt = context.Context(self.cli_integration, p.raw_data, {})
        q = queue.Queue.from_context(ctxt)
        pulls_in_queue = q.get_pulls()
        assert pulls_in_queue == [p_high.number, p_medium.number, p_low.number]

        # Each PR can rebased, because we insert them in reserve order, but they are still
        # all in queue
        self.wait_for("pull_request", {"action": "synchronize"})
        self.wait_for("pull_request", {"action": "synchronize"})
        self.wait_for("pull_request", {"action": "synchronize"})

        self.run_engine()
        p_high.update()
        self.create_status(p_high)
        self.run_engine()  # PR merged, refresh emitted on next PR
        self.wait_for("pull_request", {"action": "closed"})
        self.run_engine()  # exec the refresh

        self.wait_for("pull_request", {"action": "synchronize"})
        self.run_engine()
        p_medium.update()
        self.create_status(p_medium)
        self.run_engine()  # PR merged, refresh emitted on next PR
        self.wait_for("pull_request", {"action": "closed"})
        self.run_engine()  # exec the refresh

        self.wait_for("pull_request", {"action": "synchronize"})
        self.run_engine()
        p_low.update()
        self.create_status(p_low)
        self.run_engine()  # PR merged, refresh emitted on next PR
        self.wait_for("pull_request", {"action": "closed"})

        p_low = p_low.base.repo.get_pull(p_low.number)
        p_medium = p_medium.base.repo.get_pull(p_medium.number)
        p_high = p_high.base.repo.get_pull(p_high.number)
        self.assertEqual(True, p_low.merged)
        self.assertEqual(True, p_medium.merged)
        self.assertEqual(True, p_high.merged)

        assert p_low.merged_at > p_medium.merged_at > p_high.merged_at
Exemple #17
0
    async def test_ongoing_train_second_pr_ready_first(self):
        rules = {
            "queue_rules": [
                {
                    "name": "default",
                    "conditions": [
                        "status-success=continuous-integration/fake-ci",
                    ],
                }
            ],
            "pull_request_rules": [
                {
                    "name": "Merge priority high",
                    "conditions": [
                        f"base={self.master_branch_name}",
                        "label=queue",
                    ],
                    "actions": {"queue": {"name": "default", "priority": "high"}},
                },
            ],
        }
        await self.setup_repo(yaml.dump(rules))

        p1, _ = await self.create_pr()
        p2, _ = await self.create_pr()

        # To force others to be rebased
        p, _ = await self.create_pr()
        p.merge()
        await self.wait_for("pull_request", {"action": "closed"})
        await self.run_engine()
        p.update()

        # Queue two pulls
        await self.add_label(p1, "queue")
        await self.add_label(p2, "queue")
        await self.run_engine()

        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 3

        tmp_mq_p2 = pulls[0]
        assert tmp_mq_p2.number not in [p1.number, p2.number]

        ctxt = context.Context(self.repository_ctxt, p.raw_data)
        q = await merge_train.Train.from_context(ctxt)
        await self._assert_cars_contents(
            q,
            [
                TrainCarMatcher(
                    p1.number,
                    [],
                    p.merge_commit_sha,
                    p.merge_commit_sha,
                    "updated",
                    None,
                ),
                TrainCarMatcher(
                    p2.number,
                    [p1.number],
                    p.merge_commit_sha,
                    p.merge_commit_sha,
                    "created",
                    tmp_mq_p2.number,
                ),
            ],
        )

        # p2 is ready first, ensure it's not merged
        await self.create_status(tmp_mq_p2)
        await self.run_engine()
        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 2

        # Nothing change
        await self._assert_cars_contents(
            q,
            [
                TrainCarMatcher(
                    p1.number,
                    [],
                    p.merge_commit_sha,
                    p.merge_commit_sha,
                    "updated",
                    None,
                ),
                TrainCarMatcher(
                    p2.number,
                    [p1.number],
                    p.merge_commit_sha,
                    p.merge_commit_sha,
                    "created",
                    tmp_mq_p2.number,
                ),
            ],
        )
        # TODO(sileht): look state of p2 merge queue check-run

        # p1 is ready, check both are merged in a row
        p1.update()
        await self.create_status(p1)
        await self.run_engine()

        await self.wait_for("push", {"ref": f"refs/heads/{self.master_branch_name}"})
        await self.run_engine(3)

        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 0
        await self._assert_cars_contents(q, [])
        p1.update()
        assert p1.merged
        p2.update()
        assert p2.merged
Exemple #18
0
    async def test_ongoing_train_basic(self):
        rules = {
            "queue_rules": [
                {
                    "name": "default",
                    "conditions": [
                        "status-success=continuous-integration/fake-ci",
                    ],
                }
            ],
            "pull_request_rules": [
                {
                    "name": "Merge priority high",
                    "conditions": [
                        f"base={self.master_branch_name}",
                        "label=queue",
                    ],
                    "actions": {"queue": {"name": "default", "priority": "high"}},
                },
            ],
        }
        await self.setup_repo(yaml.dump(rules))

        p1, _ = await self.create_pr()
        p2, _ = await self.create_pr()
        p3, _ = await self.create_pr()

        # To force others to be rebased
        p, _ = await self.create_pr()
        p.merge()
        await self.wait_for("pull_request", {"action": "closed"})
        await self.run_engine()
        p.update()

        # Queue PRs
        await self.add_label(p1, "queue")
        await self.run_engine()
        await self.add_label(p2, "queue")
        await self.run_engine()

        # Check Queue
        pulls = list(self.r_o_admin.get_pulls())
        # 1 queued and rebased PR + 1 queue PR with its tmp PR + 1 one not queued PR
        assert len(pulls) == 4

        tmp_mq_p2 = pulls[0]
        assert tmp_mq_p2.number not in [p1.number, p2.number, p3.number]

        ctxt = context.Context(self.repository_ctxt, p.raw_data)
        q = await merge_train.Train.from_context(ctxt)

        await self._assert_cars_contents(
            q,
            [
                TrainCarMatcher(
                    p1.number,
                    [],
                    p.merge_commit_sha,
                    p.merge_commit_sha,
                    "updated",
                    None,
                ),
                TrainCarMatcher(
                    p2.number,
                    [p1.number],
                    p.merge_commit_sha,
                    p.merge_commit_sha,
                    "created",
                    tmp_mq_p2.number,
                ),
            ],
        )

        # ensure it have been rebased
        head_sha = p1.head.sha
        p1.update()
        assert p1.head.sha != head_sha

        # Merge p1
        await self.create_status(p1)
        await self.run_engine()
        await self.wait_for("push", {"ref": f"refs/heads/{self.master_branch_name}"})
        await self.run_engine(3)
        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 3
        p1.update()
        assert p1.merged

        # ensure base is p, it's tested with p1, but current_base_sha have changed since
        # we create the tmp pull request
        await self._assert_cars_contents(
            q,
            [
                TrainCarMatcher(
                    p2.number,
                    [p1.number],
                    p.merge_commit_sha,
                    p1.merge_commit_sha,
                    "created",
                    tmp_mq_p2.number,
                ),
            ],
        )

        # Queue p3
        await self.add_label(p3, "queue")
        await self.run_engine()

        # Check train state
        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 4

        tmp_mq_p3 = pulls[0]
        assert tmp_mq_p3.number not in [
            p1.number,
            p2.number,
            p3.number,
            tmp_mq_p2.number,
        ]

        q = await merge_train.Train.from_context(ctxt)

        await self._assert_cars_contents(
            q,
            [
                # Ensure p2 car is still the same
                TrainCarMatcher(
                    p2.number,
                    [p1.number],
                    p.merge_commit_sha,
                    p1.merge_commit_sha,
                    "created",
                    tmp_mq_p2.number,
                ),
                # Ensure base is p1 and only p2 is tested with p3
                TrainCarMatcher(
                    p3.number,
                    [p2.number],
                    p1.merge_commit_sha,
                    p1.merge_commit_sha,
                    "created",
                    tmp_mq_p3.number,
                ),
            ],
        )
Exemple #19
0
async def test_summary_synchronization_cache() -> None:
    client = mock.MagicMock()
    client.auth.get_access_token.return_value = "<token>"

    ctxt = context.Context(
        client,
        {
            "id": github_types.GitHubPullRequestId(github_types.GitHubIssueId(0)),
            "maintainer_can_modify": False,
            "rebaseable": False,
            "draft": False,
            "merge_commit_sha": None,
            "labels": [],
            "number": github_types.GitHubPullRequestNumber(
                github_types.GitHubIssueNumber(6)
            ),
            "merged": True,
            "state": "closed",
            "html_url": "<html_url>",
            "base": {
                "label": "",
                "sha": github_types.SHAType("sha"),
                "user": {
                    "login": github_types.GitHubLogin("user"),
                    "id": github_types.GitHubAccountIdType(0),
                    "type": "User",
                },
                "ref": github_types.GitHubRefType("ref"),
                "label": "",
                "repo": {
                    "archived": False,
                    "url": "",
                    "default_branch": github_types.GitHubRefType(""),
                    "id": github_types.GitHubRepositoryIdType(456),
                    "full_name": "user/ref",
                    "name": "name",
                    "private": False,
                    "owner": {
                        "login": github_types.GitHubLogin("user"),
                        "id": github_types.GitHubAccountIdType(0),
                        "type": "User",
                    },
                },
            },
            "head": {
                "label": "",
                "sha": github_types.SHAType("old-sha-one"),
                "ref": github_types.GitHubRefType("fork"),
                "user": {
                    "login": github_types.GitHubLogin("user"),
                    "id": github_types.GitHubAccountIdType(0),
                    "type": "User",
                },
                "repo": {
                    "archived": False,
                    "url": "",
                    "default_branch": github_types.GitHubRefType(""),
                    "id": github_types.GitHubRepositoryIdType(123),
                    "full_name": "fork/other",
                    "name": "other",
                    "private": False,
                    "owner": {
                        "login": github_types.GitHubLogin("user"),
                        "id": github_types.GitHubAccountIdType(0),
                        "type": "User",
                    },
                },
            },
            "user": {
                "login": github_types.GitHubLogin("user"),
                "id": github_types.GitHubAccountIdType(0),
                "type": "User",
            },
            "merged_by": None,
            "merged_at": None,
            "mergeable_state": "clean",
        },
        subscription.Subscription(1, False, "", {}, frozenset()),
    )
    assert await ctxt.get_cached_last_summary_head_sha() is None
    await ctxt.set_summary_check(
        check_api.Result(check_api.Conclusion.SUCCESS, "foo", "bar")
    )

    assert await ctxt.get_cached_last_summary_head_sha() == "old-sha-one"
    ctxt.clear_cached_last_summary_head_sha()

    assert await ctxt.get_cached_last_summary_head_sha() is None
Exemple #20
0
    async def test_queue_cant_create_tmp_pull_request(self):
        rules = {
            "queue_rules": [
                {
                    "name": "default",
                    "conditions": [
                        "status-success=continuous-integration/fake-ci",
                    ],
                }
            ],
            "pull_request_rules": [
                {
                    "name": "Merge priority high",
                    "conditions": [
                        f"base={self.master_branch_name}",
                        "label=queue",
                    ],
                    "actions": {"queue": {"name": "default", "priority": "high"}},
                },
            ],
        }
        await self.setup_repo(yaml.dump(rules))
        p1, _ = await self.create_pr(files={"conflicts": "well"})
        p2, _ = await self.create_pr(files={"conflicts": "boom"})
        p3, _ = await self.create_pr()

        await self.add_label(p1, "queue")
        await self.add_label(p2, "queue")
        await self.add_label(p3, "queue")
        await self.run_engine(3)

        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 4

        tmp_mq_p3 = pulls[0]
        assert tmp_mq_p3.number not in [p1.number, p2.number, p3.number]

        # Check only p1 and p3 are in the train
        ctxt_p1 = context.Context(self.repository_ctxt, p1.raw_data)
        q = await merge_train.Train.from_context(ctxt_p1)
        await self._assert_cars_contents(
            q,
            [
                TrainCarMatcher(
                    p1.number,
                    [],
                    p1.base.sha,
                    p1.base.sha,
                    "updated",
                    None,
                ),
                TrainCarMatcher(
                    p3.number,
                    [p1.number],
                    p1.base.sha,
                    p1.base.sha,
                    "created",
                    tmp_mq_p3.number,
                ),
            ],
        )

        # Ensure p2 status is updated with the failure
        p2.update()
        ctxt_p2 = context.Context(self.repository_ctxt, p2.raw_data)
        check = first(
            await ctxt_p2.pull_engine_check_runs,
            key=lambda c: c["name"] == constants.MERGE_QUEUE_SUMMARY_NAME,
        )
        assert (
            check["output"]["title"] == "This pull request cannot be embarked for merge"
        )
        assert (
            check["output"]["summary"]
            == "The merge-queue pull request can't be created\nDetails: `Merge conflict`"
        )

        # Merge the train
        await self.create_status(p1)
        await self.run_engine()
        await self.wait_for("push", {"ref": f"refs/heads/{self.master_branch_name}"})
        await self.create_status(tmp_mq_p3)
        await self.run_engine()
        await self.wait_for("push", {"ref": f"refs/heads/{self.master_branch_name}"})

        # Only p2 is remaining and not in train
        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 1
        assert pulls[0].number == p2.number
        await self._assert_cars_contents(q, [])
Exemple #21
0
    async def test_queue_cancel_and_refresh(self):
        rules = {
            "queue_rules": [
                {
                    "name": "default",
                    "conditions": [
                        "status-success=continuous-integration/fake-ci",
                    ],
                }
            ],
            "pull_request_rules": [
                {
                    "name": "Tchou tchou",
                    "conditions": [
                        f"base={self.master_branch_name}",
                        "label=queue",
                    ],
                    "actions": {"queue": {"name": "default"}},
                },
            ],
        }
        await self.setup_repo(yaml.dump(rules))

        p1, _ = await self.create_pr()
        p2, _ = await self.create_pr()
        p3, _ = await self.create_pr()

        # Queue PRs
        await self.add_label(p1, "queue")
        await self.add_label(p2, "queue")
        await self.add_label(p3, "queue")
        await self.run_engine()

        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 5

        tmp_mq_p3 = pulls[0]
        tmp_mq_p2 = pulls[1]
        assert tmp_mq_p3.number not in [p1.number, p2.number, p3.number]
        assert tmp_mq_p2.number not in [p1.number, p2.number, p3.number]

        ctxt_p_merged = context.Context(self.repository_ctxt, p1.raw_data)
        q = await merge_train.Train.from_context(ctxt_p_merged)
        await self._assert_cars_contents(
            q,
            [
                TrainCarMatcher(
                    p1.number,
                    [],
                    p1.base.sha,
                    p1.base.sha,
                    "updated",
                    None,
                ),
                TrainCarMatcher(
                    p2.number,
                    [p1.number],
                    p1.base.sha,
                    p1.base.sha,
                    "created",
                    tmp_mq_p2.number,
                ),
                TrainCarMatcher(
                    p3.number,
                    [p1.number, p2.number],
                    p1.base.sha,
                    p1.base.sha,
                    "created",
                    tmp_mq_p3.number,
                ),
            ],
        )

        await self.create_status(p1)
        await self.run_engine()

        # Ensure p1 is removed and current_head_sha have been updated on p2 and p3
        p1.update()
        await self._assert_cars_contents(
            q,
            [
                TrainCarMatcher(
                    p2.number,
                    [p1.number],
                    p1.base.sha,
                    p1.merge_commit_sha,
                    "created",
                    tmp_mq_p2.number,
                ),
                TrainCarMatcher(
                    p3.number,
                    [p1.number, p2.number],
                    p1.base.sha,
                    p1.merge_commit_sha,
                    "created",
                    tmp_mq_p3.number,
                ),
            ],
        )

        # tmp merge-queue pr p2, CI fails
        await self.create_status(tmp_mq_p2, state="failure")
        await self.run_engine()

        # tmp merge-queue pr p2 and p3 have been closed
        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 2

        # p3 is now rebased instead of havin a tmp merge-queue pr
        await self._assert_cars_contents(
            q,
            [
                TrainCarMatcher(
                    p3.number,
                    [],
                    p1.merge_commit_sha,
                    p1.merge_commit_sha,
                    "updated",
                    None,
                ),
            ],
        )

        # refresh to add it back in queue
        ctxt = await self.repository_ctxt.get_pull_request_context(
            p2.number, p2.raw_data
        )
        check = first(
            await ctxt.pull_engine_check_runs,
            key=lambda c: c["name"] == constants.MERGE_QUEUE_SUMMARY_NAME,
        )
        check_suite_id = check["check_suite"]["id"]

        # click on refresh btn
        await self.installation_ctxt.client.post(
            f"{self.repository_ctxt.base_url}/check-suites/{check_suite_id}/rerequest"
        )
        await self.wait_for("check_suite", {"action": "rerequested"})
        await self.run_engine()

        # Check pull is back to the queue and tmp pull recreated

        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 3

        tmp_mq_p2_bis = pulls[0]
        assert tmp_mq_p2_bis.number not in [p1.number, p2.number, p3.number]

        await self._assert_cars_contents(
            q,
            [
                TrainCarMatcher(
                    p3.number,
                    [],
                    p1.merge_commit_sha,
                    p1.merge_commit_sha,
                    "updated",
                    None,
                ),
                TrainCarMatcher(
                    p2.number,
                    [p3.number],
                    p1.merge_commit_sha,
                    p1.merge_commit_sha,
                    "created",
                    tmp_mq_p2_bis.number,
                ),
            ],
        )
Exemple #22
0
    async def test_basic_queue(self):
        rules = {
            "queue_rules": [
                {
                    "name": "default",
                    "conditions": [
                        "status-success=continuous-integration/fake-ci",
                    ],
                }
            ],
            "pull_request_rules": [
                {
                    "name": "Merge priority high",
                    "conditions": [
                        f"base={self.master_branch_name}",
                        "label=queue",
                    ],
                    "actions": {"queue": {"name": "default", "priority": "high"}},
                },
            ],
        }
        await self.setup_repo(yaml.dump(rules))

        p1, _ = await self.create_pr()
        p2, _ = await self.create_pr(two_commits=True)

        # To force others to be rebased
        p, _ = await self.create_pr()
        p.merge()
        await self.wait_for("pull_request", {"action": "closed"})
        await self.run_engine()
        p.update()

        await self.add_label(p1, "queue")
        await self.add_label(p2, "queue")
        await self.run_engine()

        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 3

        tmp_pull = pulls[0]
        assert tmp_pull.number not in [p1.number, p2.number]

        ctxt = context.Context(self.repository_ctxt, p.raw_data)
        q = await merge_train.Train.from_context(ctxt)

        await self._assert_cars_contents(
            q,
            [
                TrainCarMatcher(
                    p1.number,
                    [],
                    p.merge_commit_sha,
                    p.merge_commit_sha,
                    "updated",
                    None,
                ),
                TrainCarMatcher(
                    p2.number,
                    [p1.number],
                    p.merge_commit_sha,
                    p.merge_commit_sha,
                    "created",
                    tmp_pull.number,
                ),
            ],
        )

        # TODO(sileht): Add some assertion on check-runs content

        assert tmp_pull.commits == 5
        await self.create_status(tmp_pull)

        head_sha = p1.head.sha
        p1.update()
        assert p1.head.sha != head_sha  # ensure it have been rebased
        await self.create_status(p1)

        await self.run_engine()

        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 0

        await self._assert_cars_contents(q, [])
Exemple #23
0
def run(client, pull, subscription, sources):
    LOG.debug("engine get context")
    ctxt = context.Context(client, pull, subscription)
    ctxt.log.debug("engine start processing context")

    issue_comment_sources = []

    for source in sources:
        if source["event_type"] == "issue_comment":
            issue_comment_sources.append(source)
        else:
            ctxt.sources.append(source)

    ctxt.log.debug("engine run pending commands")
    commands_runner.run_pending_commands_tasks(ctxt)

    if issue_comment_sources:
        ctxt.log.debug("engine handle commands")
        for source in issue_comment_sources:
            commands_runner.handle(
                ctxt,
                source["data"]["comment"]["body"],
                source["data"]["comment"]["user"],
            )

    if not ctxt.sources:
        return

    if ctxt.client.auth.permissions_need_to_be_updated:
        check_api.set_check_run(
            ctxt,
            "Summary",
            "completed",
            "failure",
            output={
                "title": "Required GitHub permissions are missing.",
                "summary":
                "You can accept them at https://dashboard.mergify.io/",
            },
        )
        return

    ctxt.log.debug("engine check configuration change")
    if check_configuration_changes(ctxt):
        ctxt.log.info("Configuration changed, ignoring")
        return

    ctxt.log.debug("engine get configuration")
    # BRANCH CONFIGURATION CHECKING
    try:
        filename, mergify_config = rules.get_mergify_config(ctxt)
    except rules.NoRules:  # pragma: no cover
        ctxt.log.info("No need to proceed queue (.mergify.yml is missing)")
        return
    except rules.InvalidRules as e:  # pragma: no cover
        # Not configured, post status check with the error message
        if any((s["event_type"] == "pull_request"
                and s["data"]["action"] in ["opened", "synchronize"]
                for s in ctxt.sources)):
            check_api.set_check_run(
                ctxt,
                actions_runner.SUMMARY_NAME,
                "completed",
                "failure",
                output={
                    "title": "The Mergify configuration is invalid",
                    "summary": str(e),
                    "annotations": e.get_annotations(e.filename),
                },
            )
        return

    # Add global and mandatory rules
    mergify_config["pull_request_rules"].rules.extend(
        rules.PullRequestRules.from_list(MERGIFY_RULE["rules"]).rules)

    if ctxt.pull["base"]["repo"][
            "private"] and not subscription["subscription_active"]:
        check_api.set_check_run(
            ctxt,
            actions_runner.SUMMARY_NAME,
            "completed",
            "failure",
            output={
                "title": "Mergify is disabled",
                "summary": subscription["subscription_reason"],
            },
        )
        return

    # CheckRun are attached to head sha, so when user add commits or force push
    # we can't directly get the previous Mergify Summary. So we copy it here, then
    # anything that looks at it in next engine runs will find it.

    synchronize_events = dict(((s["data"]["after"], s["data"])
                               for s in ctxt.sources
                               if s["event_type"] == "pull_request"
                               and s["data"]["action"] == "synchronize"))
    if synchronize_events:
        ctxt.log.debug("engine synchronize summary")

        # NOTE(sileht): We sometimes got many synchronize in a row, that not always the
        # last one that have the Summary, so we also looks in older one if necessary.
        after_sha = ctxt.pull["head"]["sha"]
        while synchronize_events:
            sync_event = synchronize_events.pop(after_sha, None)
            if sync_event:
                if copy_summary_from_previous_head_sha(ctxt,
                                                       sync_event["before"]):
                    break
                else:
                    after_sha = sync_event["before"]
            else:
                ctxt.log.warning(
                    "Got synchronize event but didn't find Summary on previous head sha",
                )
                break

    ctxt.log.debug("engine handle actions")
    actions_runner.handle(mergify_config["pull_request_rules"], ctxt)
Exemple #24
0
    async def test_queue_manual_merge(self):
        rules = {
            "queue_rules": [
                {
                    "name": "default",
                    "conditions": [
                        "status-success=continuous-integration/fake-ci",
                    ],
                }
            ],
            "pull_request_rules": [
                {
                    "name": "Merge priority high",
                    "conditions": [
                        f"base={self.master_branch_name}",
                        "label=queue",
                    ],
                    "actions": {"queue": {"name": "default", "priority": "high"}},
                },
            ],
        }
        await self.setup_repo(yaml.dump(rules))

        p1, _ = await self.create_pr()
        p2, _ = await self.create_pr()

        # To force others to be rebased
        p, _ = await self.create_pr()
        p.merge()
        await self.wait_for("pull_request", {"action": "closed"})
        await self.run_engine()
        p.update()

        # Queue PRs
        await self.add_label(p1, "queue")
        await self.add_label(p2, "queue")

        await self.run_engine()

        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 3

        tmp_mq_p2 = pulls[0]
        assert tmp_mq_p2.number not in [p1.number, p2.number]

        ctxt_p_merged = context.Context(self.repository_ctxt, p.raw_data)
        q = await merge_train.Train.from_context(ctxt_p_merged)
        await self._assert_cars_contents(
            q,
            [
                TrainCarMatcher(
                    p1.number,
                    [],
                    p.merge_commit_sha,
                    p.merge_commit_sha,
                    "updated",
                    None,
                ),
                TrainCarMatcher(
                    p2.number,
                    [p1.number],
                    p.merge_commit_sha,
                    p.merge_commit_sha,
                    "created",
                    tmp_mq_p2.number,
                ),
            ],
        )

        # Ensure p1 have been rebased
        head_sha = p1.head.sha
        p1.update()
        assert p1.head.sha != head_sha

        # Merge a not queued PR manually
        p_merged_in_meantime, _ = await self.create_pr()
        p_merged_in_meantime.merge()
        await self.wait_for("pull_request", {"action": "closed"})
        await self.wait_for("push", {"ref": f"refs/heads/{self.master_branch_name}"})
        p_merged_in_meantime.update()

        await self.run_engine(3)

        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 3

        tmp_mq_p2_bis = pulls[0]
        assert tmp_mq_p2_bis.number not in [p1.number, p2.number]

        await self._assert_cars_contents(
            q,
            [
                TrainCarMatcher(
                    p1.number,
                    [],
                    p_merged_in_meantime.merge_commit_sha,
                    p_merged_in_meantime.merge_commit_sha,
                    "updated",
                    None,
                ),
                TrainCarMatcher(
                    p2.number,
                    [p1.number],
                    p_merged_in_meantime.merge_commit_sha,
                    p_merged_in_meantime.merge_commit_sha,
                    "created",
                    tmp_mq_p2_bis.number,
                ),
            ],
        )

        # Check train have been reseted on top on the new sha one
        # Ensure p1 have been rebased again and p2 got recreate with more commits
        head_sha = p1.head.sha
        p1.update()
        assert p1.head.sha != head_sha
        assert tmp_mq_p2_bis.commits == 5

        # Merge the train
        await self.create_status(p1)
        await self.create_status(tmp_mq_p2_bis)
        await self.run_engine()

        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 0

        await self._assert_cars_contents(q, [])
Exemple #25
0
    def test_merge_rule_switch(self):
        rules = {
            "pull_request_rules": [
                {
                    "name": "Merge priority high",
                    "conditions":
                    [f"base={self.master_branch_name}", "label=high"],
                    "actions": {
                        "merge": {
                            "strict": "smart+ordered",
                            "priority": "high"
                        }
                    },
                },
                {
                    "name":
                    "Merge priority medium",
                    "conditions":
                    [f"base={self.master_branch_name}", "label=medium"],
                    "actions": {
                        "merge": {
                            "strict": "smart+ordered"
                        }
                    },
                },
                {
                    "name": "Merge priority low",
                    "conditions":
                    [f"base={self.master_branch_name}", "label=low"],
                    "actions": {
                        "merge": {
                            "strict": "smart+ordered",
                            "priority": 1
                        }
                    },
                },
            ]
        }

        self.setup_repo(yaml.dump(rules))

        p1, _ = self.create_pr()
        p2, _ = self.create_pr()

        # To force others to be rebased
        p, _ = self.create_pr()
        p.merge()
        self.wait_for("pull_request", {"action": "closed"}),

        # Merge them in reverse priority to ensure there are reordered
        self.add_label(p1, "medium")
        self.add_label(p2, "low")

        ctxt = context.Context(self.cli_integration, p.raw_data, {})
        q = queue.Queue.from_context(ctxt)
        pulls_in_queue = q.get_pulls()
        assert pulls_in_queue == [p1.number, p2.number]

        # NOTE(sileht): The removal and the add must be part of the same batch to make the
        # test useful
        p2.remove_from_labels("low")
        self.add_label(p2, "high")
        pulls_in_queue = q.get_pulls()
        assert pulls_in_queue == [p2.number, p1.number]
Exemple #26
0
    async def test_queue_priority(self):
        rules = {
            "queue_rules": [
                {
                    "name": "urgent",
                    "conditions": [
                        "status-success=continuous-integration/fast-ci",
                    ],
                },
                {
                    "name": "default",
                    "conditions": [
                        "status-success=continuous-integration/slow-ci",
                    ],
                },
            ],
            "pull_request_rules": [
                {
                    "name": "Merge priority high",
                    "conditions": [
                        f"base={self.master_branch_name}",
                        "label=queue-urgent",
                    ],
                    "actions": {"queue": {"name": "urgent"}},
                },
                {
                    "name": "Merge priority high",
                    "conditions": [
                        f"base={self.master_branch_name}",
                        "label=queue",
                    ],
                    "actions": {"queue": {"name": "default"}},
                },
            ],
        }
        await self.setup_repo(yaml.dump(rules))

        p1, _ = await self.create_pr()
        p2, _ = await self.create_pr()

        # To force others to be rebased
        p_merged, _ = await self.create_pr()
        p_merged.merge()
        await self.wait_for("pull_request", {"action": "closed"})
        await self.run_engine()
        p_merged.update()

        # Put first PR in queue
        await self.add_label(p1, "queue")
        await self.run_engine()

        ctxt_p_merged = context.Context(self.repository_ctxt, p_merged.raw_data)
        q = await merge_train.Train.from_context(ctxt_p_merged)

        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 2

        await self._assert_cars_contents(
            q,
            [
                TrainCarMatcher(
                    p1.number,
                    [],
                    p_merged.merge_commit_sha,
                    p_merged.merge_commit_sha,
                    "updated",
                    None,
                ),
            ],
        )

        # ensure it have been rebased
        head_sha = p1.head.sha
        p1.update()
        assert p1.head.sha != head_sha
        assert p1.commits == 2

        # Put second PR at the begining of the queue via queue priority
        await self.add_label(p2, "queue-urgent")
        await self.run_engine()

        pulls = list(self.r_o_admin.get_pulls())
        assert len(pulls) == 3

        tmp_mq_p1 = pulls[0]
        assert tmp_mq_p1.number not in [p1.number, p2.number]

        # p2 insert at the begining
        await self._assert_cars_contents(
            q,
            [
                TrainCarMatcher(
                    p2.number,
                    [],
                    p_merged.merge_commit_sha,
                    p_merged.merge_commit_sha,
                    "updated",
                    None,
                ),
                TrainCarMatcher(
                    p1.number,
                    [p2.number],
                    p_merged.merge_commit_sha,
                    p_merged.merge_commit_sha,
                    "created",
                    tmp_mq_p1.number,
                ),
            ],
        )

        # ensure it have been rebased and tmp merge-queue pr of p1 have all commits
        head_sha = p2.head.sha
        p2.update()
        assert p2.head.sha != head_sha
        assert tmp_mq_p1.commits == 5
Exemple #27
0
def report(url):
    redis = utils.get_redis_for_cache()
    path = url.replace("https://github.com/", "")
    try:
        owner, repo, _, pull_number = path.split("/")
    except ValueError:
        print(f"Wrong URL: {url}")
        return
    slug = owner + "/" + repo

    try:
        installation = github.get_installation(owner, repo)
    except exceptions.MergifyNotInstalled:
        print("* Mergify is not installed there")
        return

    client = github.get_client(owner, repo, installation)

    print("* INSTALLATION ID: %s" % client.installation["id"])

    cached_sub = sub_utils.get_subscription(redis, client.installation["id"])
    db_sub = sub_utils._retrieve_subscription_from_db(
        client.installation["id"])
    print("* SUBSCRIBED (cache/db): %s / %s" %
          (cached_sub["subscription_active"], db_sub["subscription_active"]))
    report_sub(client.installation["id"], slug, cached_sub, "ENGINE-CACHE")
    report_sub(client.installation["id"], slug, db_sub, "DASHBOARD")

    pull_raw = client.item(f"pulls/{pull_number}")
    ctxt = context.Context(client, pull_raw, [{
        "event_type": "mergify-debugger",
        "data": {}
    }])

    print("* REPOSITORY IS %s" %
          "PRIVATE" if ctxt.pull["base"]["repo"]["private"] else "PUBLIC")

    print("* CONFIGURATION:")
    try:
        filename, mergify_config_content = rules.get_mergify_config_content(
            ctxt)
    except rules.NoRules:  # pragma: no cover
        print(".mergify.yml is missing")
        pull_request_rules = None
    else:
        print(f"Config filename: {filename}")
        print(mergify_config_content.decode())
        try:
            mergify_config = rules.UserConfigurationSchema(
                mergify_config_content)
        except rules.InvalidRules as e:  # pragma: no cover
            print("configuration is invalid %s" % str(e))
        else:
            pull_request_rules_raw = mergify_config[
                "pull_request_rules"].as_dict()
            pull_request_rules = rules.PullRequestRules.from_list(
                pull_request_rules_raw["rules"] + engine.MERGIFY_RULE["rules"])

    print("* PULL REQUEST:")
    pr_data = dict(ctxt.pull_request.items())
    pprint.pprint(pr_data, width=160)

    print("is_behind: %s" % ctxt.is_behind)

    print("mergeable_state: %s" % ctxt.pull["mergeable_state"])

    print("* MERGIFY LAST CHECKS:")
    for c in ctxt.pull_engine_check_runs:
        print("[%s]: %s | %s" %
              (c["name"], c["conclusion"], c["output"].get("title")))
        print("> " + "\n> ".join(c["output"].get("summary").split("\n")))

    if pull_request_rules is not None:
        print("* MERGIFY LIVE MATCHES:")
        match = pull_request_rules.get_pull_request_rule(ctxt)
        summary_title, summary = actions_runner.gen_summary(ctxt, match)
        print("> %s" % summary_title)
        print(summary)

    return ctxt
Exemple #28
0
def test_user_permission_cache() -> None:
    class FakeClient(github.GithubInstallationClient):
        called: int

        def __init__(self, owner, repo):
            super().__init__(auth=None)
            self.owner = owner
            self.repo = repo
            self.called = 0

        def item(self, url, *args, **kwargs):
            self.called += 1
            if self.repo == "test":
                if (url ==
                        f"/repos/{self.owner}/{self.repo}/collaborators/foo/permission"
                    ):
                    return {"permission": "admin"}
                elif url.startswith(
                        f"/repos/{self.owner}/{self.repo}/collaborators/"):
                    return {"permission": "loser"}
            elif self.repo == "test2":
                if (url ==
                        f"/repos/{self.owner}/{self.repo}/collaborators/bar/permission"
                    ):
                    return {"permission": "admin"}
                elif url.startswith(
                        f"/repos/{self.owner}/{self.repo}/collaborators/"):
                    return {"permission": "loser"}
            raise ValueError(f"Unknown test URL `{url}` for repo {self.repo}")

    owner = github_types.GitHubAccount({
        "id":
        github_types.GitHubAccountIdType(123),
        "login":
        github_types.GitHubLogin("jd"),
        "type":
        "User",
    })

    repo = github_types.GitHubRepository({
        "id":
        github_types.GitHubRepositoryIdType(0),
        "owner":
        owner,
        "full_name":
        "",
        "archived":
        False,
        "url":
        "",
        "default_branch":
        github_types.GitHubRefType(""),
        "name":
        "test",
        "private":
        False,
    })

    def make_pr(
            repo: github_types.GitHubRepository,
            owner: github_types.GitHubAccount
    ) -> github_types.GitHubPullRequest:
        return github_types.GitHubPullRequest({
            "id":
            github_types.GitHubPullRequestId(github_types.GitHubIssueId(0)),
            "maintainer_can_modify":
            False,
            "head": {
                "user": owner,
                "label": "",
                "ref": github_types.GitHubRefType(""),
                "sha": github_types.SHAType(""),
                "repo": repo,
            },
            "user":
            owner,
            "number":
            github_types.GitHubPullRequestNumber(
                github_types.GitHubIssueNumber(0)),
            "rebaseable":
            False,
            "draft":
            False,
            "merge_commit_sha":
            None,
            "html_url":
            "",
            "state":
            "closed",
            "mergeable_state":
            "unknown",
            "merged_by":
            None,
            "merged":
            False,
            "merged_at":
            None,
            "labels": [],
            "base": {
                "ref": github_types.GitHubRefType("main"),
                "sha": github_types.SHAType(""),
                "label": "",
                "repo": repo,
                "user": owner,
            },
        })

    user_1 = github_types.GitHubAccount({
        "id":
        github_types.GitHubAccountIdType(1),
        "login":
        github_types.GitHubLogin("foo"),
        "type":
        "User",
    })
    user_2 = github_types.GitHubAccount({
        "id":
        github_types.GitHubAccountIdType(2),
        "login":
        github_types.GitHubLogin("bar"),
        "type":
        "User",
    })
    user_3 = github_types.GitHubAccount({
        "id":
        github_types.GitHubAccountIdType(3),
        "login":
        github_types.GitHubLogin("baz"),
        "type":
        "User",
    })

    sub = subscription.Subscription(0, False, "", {}, frozenset())
    client = FakeClient(owner["login"], repo["name"])
    c = context.Context(client, make_pr(repo, owner), sub)
    assert client.called == 0
    assert c.has_write_permission(user_1)
    assert client.called == 1
    assert c.has_write_permission(user_1)
    assert client.called == 1
    assert not c.has_write_permission(user_2)
    assert client.called == 2
    assert not c.has_write_permission(user_2)
    assert client.called == 2
    assert not c.has_write_permission(user_3)
    assert client.called == 3

    repo = github_types.GitHubRepository({
        "id":
        github_types.GitHubRepositoryIdType(1),
        "owner":
        owner,
        "full_name":
        "",
        "archived":
        False,
        "url":
        "",
        "default_branch":
        github_types.GitHubRefType(""),
        "name":
        "test2",
        "private":
        False,
    })

    client = FakeClient(owner["login"], repo["name"])
    c = context.Context(client, make_pr(repo, owner), sub)
    assert client.called == 0
    assert c.has_write_permission(user_2)
    assert client.called == 1
    assert c.has_write_permission(user_2)
    assert client.called == 1
    assert not c.has_write_permission(user_1)
    assert client.called == 2
    context.Context.clear_user_permission_cache_for_repo(owner, repo)
    assert not c.has_write_permission(user_1)
    assert client.called == 3
    assert not c.has_write_permission(user_3)
    assert client.called == 4
    context.Context.clear_user_permission_cache_for_org(owner)
    assert not c.has_write_permission(user_3)
    assert client.called == 5
    assert c.has_write_permission(user_2)
    assert client.called == 6
    assert c.has_write_permission(user_2)
    assert client.called == 6
    context.Context.clear_user_permission_cache_for_user(owner, repo, user_2)
    assert c.has_write_permission(user_2)
    assert client.called == 7
Exemple #29
0
    def process(self):
        pull_numbers = self.get_pulls()

        self.log.info("%d pulls queued", len(pull_numbers), queue=list(pull_numbers))

        if not pull_numbers:
            return

        pull_number = pull_numbers[0]

        with github.get_client(self.owner) as client:
            ctxt = None
            try:
                sub = asyncio.run(
                    subscription.Subscription.get_subscription(client.auth.owner_id)
                )
                data = client.item(
                    f"/repos/{self.owner}/{self.repo}/pulls/{pull_number}"
                )

                ctxt = context.Context(client, data, sub)
                if ctxt.pull["base"]["ref"] != self.ref:
                    ctxt.log.info(
                        "pull request base branch have changed",
                        old_branch=self.ref,
                        new_branch=ctxt.pull["base"]["ref"],
                    )
                    self.move_pull_to_new_base_branch(
                        ctxt.pull["number"],
                        self.get_queue(ctxt.pull["base"]["ref"]),
                    )
                elif ctxt.pull["state"] == "closed" or ctxt.is_behind:
                    # NOTE(sileht): Pick up this pull request and rebase it again
                    # or update its status and remove it from the queue
                    ctxt.log.info(
                        "pull request needs to be updated again or has been closed",
                    )
                    self.handle_first_pull_in_queue(ctxt)
                else:
                    # NOTE(sileht): Pull request has not been merged or cancelled
                    # yet wait next loop
                    ctxt.log.info("pull request checks are still in progress")

            except Exception as exc:  # pragma: no cover
                log = self.log if ctxt is None else ctxt.log

                if exceptions.should_be_ignored(exc):
                    log.info(
                        "Fail to process merge queue, remove the pull request from the queue",
                        exc_info=True,
                    )
                    self.remove_pull(ctxt.pull["number"])

                elif exceptions.need_retry(exc):
                    log.info("Fail to process merge queue, need retry", exc_info=True)
                    if isinstance(exc, exceptions.MergeableStateUnknown):
                        # NOTE(sileht): We need GitHub to recompute the state here (by
                        # merging something else for example), so move it to the end
                        self._move_pull_at_end(pull_number)

                else:
                    log.error("Fail to process merge queue", exc_info=True)
                    self._move_pull_at_end(pull_number)
def test_review_permission_cache():
    class FakeClient(object):
        def __init__(self, owner, repo):
            self.owner = owner
            self.repo = repo

        def item(self, url, *args, **kwargs):
            if self.repo == "test":
                if (
                    url
                    == f"/repos/{self.owner}/{self.repo}/collaborators/foo/permission"
                ):
                    return {"permission": "admin"}
                elif url.startswith(f"/repos/{self.owner}/{self.repo}/collaborators/"):
                    return {"permission": "loser"}
            elif self.repo == "test2":
                if (
                    url
                    == f"/repos/{self.owner}/{self.repo}/collaborators/bar/permission"
                ):
                    return {"permission": "admin"}
                elif url.startswith(f"/repos/{self.owner}/{self.repo}/collaborators/"):
                    return {"permission": "loser"}
            raise ValueError(f"Unknown test URL `{url}` for repo {self.repo}")

    owner = "jd"
    repo = "test"

    def make_pr(repo, owner):
        return {
            "number": 123,
            "state": "closed",
            "mergeable_state": "hello",
            "merged_by": None,
            "merged": None,
            "merged_at": None,
            "base": {
                "ref": "main",
                "sha": "mew",
                "repo": {
                    "name": repo,
                    "private": False,
                },
                "user": {
                    "login": owner,
                },
            },
        }

    c = context.Context(FakeClient(owner, repo), make_pr(repo, owner), None)
    assert c._write_permission_cache.currsize == 0
    assert c.has_write_permissions("foo")
    assert c._write_permission_cache.currsize == 1
    assert c.has_write_permissions("foo")
    assert c._write_permission_cache.currsize == 1
    assert not c.has_write_permissions("bar")
    assert c._write_permission_cache.currsize == 2
    assert not c.has_write_permissions("bar")
    assert c._write_permission_cache.currsize == 2
    assert not c.has_write_permissions("baz")
    assert c._write_permission_cache.currsize == 3
    assert not c.has_write_permissions("baz")
    assert c._write_permission_cache.currsize == 3

    repo = "test2"

    c = context.Context(FakeClient(owner, repo), make_pr(repo, owner), None)
    assert c._write_permission_cache.currsize == 0
    assert c.has_write_permissions("bar")
    assert c._write_permission_cache.currsize == 1
    assert c.has_write_permissions("bar")
    assert c._write_permission_cache.currsize == 1
    assert not c.has_write_permissions("foo")
    assert c._write_permission_cache.currsize == 2
    assert not c.has_write_permissions("foo")
    assert c._write_permission_cache.currsize == 2
    assert not c.has_write_permissions("baz")
    assert c._write_permission_cache.currsize == 3
    assert not c.has_write_permissions("baz")
    assert c._write_permission_cache.currsize == 3