Beispiel #1
0
async def refresh_branch(
    owner: github_types.GitHubLogin,
    repo_name: github_types.GitHubRepositoryName,
    branch: str,
    redis_cache: utils.RedisCache = fastapi.Depends(  # noqa: B008
        redis.get_redis_cache
    ),
    redis_stream: utils.RedisStream = fastapi.Depends(  # noqa: B008
        redis.get_redis_stream
    ),
) -> responses.Response:
    async with github.aget_client(owner_name=owner) as client:
        try:
            repository = await client.item(f"/repos/{owner}/{repo_name}")
        except http.HTTPNotFound:
            return responses.JSONResponse(
                status_code=404, content="repository not found"
            )

    await github_events.send_refresh(
        redis_cache,
        redis_stream,
        repository,
        ref=github_types.GitHubRefType(f"refs/heads/{branch}"),
    )
    return responses.Response("Refresh queued", status_code=202)
Beispiel #2
0
async def test_run_command_with_rerun_and_without_user(redis_cache,
                                                       monkeypatch):

    client = github.aget_client(owner_name="Mergifyio", owner_id=123)

    ctxt = await _create_context(redis_cache, client)

    http_calls = []

    async def mock_post(*args, **kwargs):
        http_calls.append((args, kwargs))
        return

    monkeypatch.setattr(client, "post", mock_post)

    await handle(
        ctxt=ctxt,
        mergify_config={},
        comment="@mergifyio something",
        user=None,
        rerun=True,
    )

    assert ("Sorry but I didn't understand the command."
            in http_calls[0][1]["json"]["body"])
Beispiel #3
0
async def refresh_pull(
    owner_login: github_types.GitHubLogin,
    repo_name: github_types.GitHubRepositoryName,
    pull_request_number: github_types.GitHubPullRequestNumber,
    action: github_types.GitHubEventRefreshActionType = "user",
    redis_links: redis_utils.RedisLinks = fastapi.Depends(  # noqa: B008
        redis.get_redis_links),
) -> responses.Response:
    action = RefreshActionSchema(action)

    installation_json = await github.get_installation_from_login(owner_login)
    async with github.aget_client(installation_json) as client:
        try:
            repository = await client.item(f"/repos/{owner_login}/{repo_name}")
        except http.HTTPNotFound:
            return responses.JSONResponse(status_code=404,
                                          content="repository not found")

    await utils.send_pull_refresh(
        redis_links.stream,
        repository,
        action=action,
        pull_request_number=pull_request_number,
        source="API",
    )
    return responses.Response("Refresh queued", status_code=202)
Beispiel #4
0
async def refresh_pull(
    owner: github_types.GitHubLogin,
    repo_name: github_types.GitHubRepositoryName,
    pull_request_number: github_types.GitHubPullRequestNumber,
    action: github_types.GitHubEventRefreshActionType = "user",
    redis_cache: utils.RedisCache = fastapi.Depends(  # noqa: B008
        redis.get_redis_cache),
    redis_stream: utils.RedisStream = fastapi.Depends(  # noqa: B008
        redis.get_redis_stream),
) -> responses.Response:
    action = RefreshActionSchema(action)
    async with github.aget_client(owner_name=owner) as client:
        try:
            repository = await client.item(f"/repos/{owner}/{repo_name}")
        except http.HTTPNotFound:
            return responses.JSONResponse(status_code=404,
                                          content="repository not found")

    await utils.send_refresh(
        redis_cache,
        redis_stream,
        repository,
        pull_request_number=pull_request_number,
        action=action,
    )
    return responses.Response("Refresh queued", status_code=202)
Beispiel #5
0
async def _simulator(redis_cache, pull_request_rules, owner, repo, pull_number, token):
    try:
        if token:
            auth = github.GithubTokenAuth(token)
        else:
            auth = github.get_auth(owner)

        async with github.aget_client(auth=auth) as client:
            try:
                data = await client.item(f"/repos/{owner}/{repo}/pulls/{pull_number}")
            except http.HTTPNotFound:
                raise PullRequestUrlInvalid(
                    message=f"Pull request {owner}/{repo}/pulls/{pull_number} not found"
                )

            sub = await subscription.Subscription.get_subscription(
                redis_cache, data["base"]["user"]["id"]
            )

            installation = context.Installation(
                data["base"]["user"]["id"],
                owner,
                sub,
                client,
                redis_cache,
            )
            repository = context.Repository(installation, data["base"]["repo"])
            ctxt = await repository.get_pull_request_context(data["number"], data)
            ctxt.sources = [{"event_type": "mergify-simulator", "data": []}]
            match = await pull_request_rules.get_pull_request_rule(ctxt)
            return await actions_runner.gen_summary(ctxt, pull_request_rules, match)
    except exceptions.MergifyNotInstalled:
        raise PullRequestUrlInvalid(
            message=f"Mergify not installed on repository '{owner}/{repo}'"
        )
Beispiel #6
0
async def refresh_branch(
    owner_login: github_types.GitHubLogin,
    repo_name: github_types.GitHubRepositoryName,
    branch: str,
    redis_cache: utils.RedisCache = fastapi.Depends(  # noqa: B008
        redis.get_redis_cache),
    redis_stream: utils.RedisStream = fastapi.Depends(  # noqa: B008
        redis.get_redis_stream),
) -> responses.Response:
    installation_json = await github.get_installation_from_login(owner_login)
    async with github.aget_client(installation_json) as client:
        try:
            repository = await client.item(f"/repos/{owner_login}/{repo_name}")
        except http.HTTPNotFound:
            return responses.JSONResponse(status_code=404,
                                          content="repository not found")

    await utils.send_branch_refresh(
        redis_cache,
        redis_stream,
        repository,
        action="user",
        source="API",
        ref=github_types.GitHubRefType(f"refs/heads/{branch}"),
    )
    return responses.Response("Refresh queued", status_code=202)
Beispiel #7
0
 async def test_owner(
     installation: github_types.GitHubInstallation = fastapi.
     Depends(  # noqa: B008
         security.get_installation),
 ) -> ResponseTest:
     async with github.aget_client(installation) as client:
         org = await client.item(f"/user/{installation['account']['id']}")
         return ResponseTest(org["login"])
Beispiel #8
0
async def on_each_event(event: github_types.GitHubEventIssueComment) -> None:
    action = load_action(event["comment"]["body"])
    if action:
        owner = event["repository"]["owner"]["login"]
        repo = event["repository"]["name"]
        async with github.aget_client(owner) as client:
            await client.post(
                f"/repos/{owner}/{repo}/issues/comments/{event['comment']['id']}/reactions",
                json={"content": "+1"},
                api_version="squirrel-girl",
            )  # type: ignore[call-arg]
Beispiel #9
0
async def on_each_event(event: github_types.GitHubEventIssueComment) -> None:
    action_classes = actions.get_commands()
    match = COMMAND_MATCHER.search(event["comment"]["body"])
    if match and match[1] in action_classes:
        owner = event["repository"]["owner"]["login"]
        repo = event["repository"]["name"]
        async with github.aget_client(owner) as client:
            await client.post(
                f"/repos/{owner}/{repo}/issues/comments/{event['comment']['id']}/reactions",
                json={"content": "+1"},
                api_version="squirrel-girl",
            )
Beispiel #10
0
async def refresh_repo(
        owner: github_types.GitHubLogin,
        repo_name: github_types.GitHubRepositoryName) -> responses.Response:
    global _AREDIS_STREAM, _AREDIS_CACHE
    async with github.aget_client(owner_name=owner) as client:
        try:
            repository = await client.item(f"/repos/{owner}/{repo_name}")
        except http.HTTPNotFound:
            return responses.JSONResponse(status_code=404,
                                          content="repository not found")

    await github_events.send_refresh(_AREDIS_CACHE, _AREDIS_STREAM, repository)
    return responses.Response("Refresh queued", status_code=202)
Beispiel #11
0
async def test_client_401_raise_ratelimit(
        httpserver: httpserver.HTTPServer) -> None:
    owner = github_types.GitHubLogin("owner")
    repo = "repo"

    httpserver.expect_request("/users/owner/installation").respond_with_json({
        "id":
        12345,
        "target_type":
        "User",
        "permissions": {
            "checks": "write",
            "contents": "write",
            "pull_requests": "write",
        },
        "account": {
            "login": "******",
            "id": 12345
        },
    })
    httpserver.expect_request(
        "/app/installations/12345/access_tokens").respond_with_json(
            {
                "token": "<token>",
                "expires_at": "2100-12-31T23:59:59Z"
            },
            headers={
                "X-RateLimit-Remaining": 5000,
                "X-RateLimit-Reset": 1234567890
            },
        )

    httpserver.expect_oneshot_request(
        "/repos/owner/repo/pull/1").respond_with_json(
            {"message": "quota !"},
            status=403,
            headers={
                "X-RateLimit-Remaining": 0,
                "X-RateLimit-Reset": 1234567890
            },
        )

    with mock.patch(
            "mergify_engine.config.GITHUB_API_URL",
            httpserver.url_for("/")[:-1],
    ):
        async with github.aget_client(owner) as client:
            with pytest.raises(exceptions.RateLimited):
                await client.item(f"/repos/{owner}/{repo}/pull/1")

    httpserver.check_assertions()
Beispiel #12
0
async def create_initial_summary(
        redis: utils.RedisCache,
        event: github_types.GitHubEventPullRequest) -> None:
    owner = event["repository"]["owner"]
    repo = event["pull_request"]["base"]["repo"]

    if not await redis.exists(
            context.Repository.get_config_location_cache_key(
                owner["login"],
                repo["name"],
            )):
        # Mergify is probably not activated on this repo
        return

    # NOTE(sileht): It's possible that a "push" event creates a summary before we
    # received the pull_request/opened event.
    # So we check first if a summary does not already exists, to not post
    # the summary twice. Since this method can ran in parallel of the worker
    # this is not a 100% reliable solution, but if we post a duplicate summary
    # check_api.set_check_run() handle this case and update both to not confuse users.
    summary_exists = await context.Context.summary_exists(
        redis, owner["id"], repo["id"], event["pull_request"])

    if summary_exists:
        return

    installation_json = await github.get_installation_from_account_id(
        owner["id"])
    async with github.aget_client(installation_json) as client:
        post_parameters = {
            "name": constants.SUMMARY_NAME,
            "head_sha": event["pull_request"]["head"]["sha"],
            "status": check_api.Status.IN_PROGRESS.value,
            "started_at": date.utcnow().isoformat(),
            "details_url": f"{event['pull_request']['html_url']}/checks",
            "output": {
                "title": "Your rules are under evaluation",
                "summary": "Be patient, the page will be updated soon.",
            },
            "external_id": str(event["pull_request"]["number"]),
        }
        try:
            await client.post(
                f"/repos/{event['pull_request']['base']['user']['login']}/{event['pull_request']['base']['repo']['name']}/check-runs",
                api_version="antiope",
                json=post_parameters,
            )
        except http.HTTPClientSideError as e:
            if e.status_code == 422 and "No commit found for SHA" in e.message:
                return
            raise
Beispiel #13
0
    async def test_github_async_client(self):

        rules = {
            "pull_request_rules": [{
                "name":
                "simulator",
                "conditions": [f"base!={self.master_branch_name}"],
                "actions": {
                    "merge": {}
                },
            }]
        }
        other_branch = self.get_full_branch_name("other")
        await self.setup_repo(yaml.dump(rules), test_branches=[other_branch])
        p1, _ = await self.create_pr()
        p2, _ = await self.create_pr()
        await self.create_pr(base=other_branch)

        client = github.aget_client("mergifyio-testing")

        url = f"/repos/mergifyio-testing/{self.REPO_NAME}/pulls"

        pulls = [p async for p in client.items(url)]
        self.assertEqual(3, len(pulls))

        pulls = [p async for p in client.items(url, per_page=1)]
        self.assertEqual(3, len(pulls))

        pulls = [p async for p in client.items(url, per_page=1, page=2)]
        self.assertEqual(2, len(pulls))

        pulls = [
            p async for p in client.items(url, base=other_branch, state="all")
        ]
        self.assertEqual(1, len(pulls))

        pulls = [p async for p in client.items(url, base="unknown")]
        self.assertEqual(0, len(pulls))

        pull = await client.item(f"{url}/{p1['number']}")
        self.assertEqual(p1["number"], pull["number"])

        pull = await client.item(f"{url}/{p2['number']}")
        self.assertEqual(p2["number"], pull["number"])

        with self.assertRaises(http.HTTPStatusError) as ctxt:
            await client.item(f"{url}/10000000000")

        self.assertEqual(404, ctxt.exception.response.status_code)
Beispiel #14
0
async def test_client_401_raise_ratelimit(
        respx_mock: respx.MockRouter) -> None:
    owner_id = github_types.GitHubAccountIdType(12345)
    owner_login = github_types.GitHubLogin("owner")
    repo = "repo"

    respx_mock.get("/user/12345/installation").respond(
        200,
        json={
            "id": 12345,
            "target_type": "User",
            "permissions": {
                "checks": "write",
                "contents": "write",
                "pull_requests": "write",
            },
            "account": {
                "login": "******",
                "id": 12345
            },
        },
    )

    respx_mock.post("/app/installations/12345/access_tokens").respond(
        200,
        json={
            "token": "<token>",
            "expires_at": "2100-12-31T23:59:59Z"
        },
        headers={
            "X-RateLimit-Remaining": "5000",
            "X-RateLimit-Reset": "1234567890",
        },
    )

    respx_mock.get("/repos/owner/repo/pull/1").respond(
        403,
        json={"message": "quota !"},
        headers={
            "X-RateLimit-Remaining": "0",
            "X-RateLimit-Reset": "1234567890"
        },
    )

    installation_json = await github.get_installation_from_account_id(owner_id)
    async with github.aget_client(installation_json) as client:
        with pytest.raises(exceptions.RateLimited):
            await client.item(f"/repos/{owner_login}/{repo}/pull/1")
Beispiel #15
0
 async def populate_with_collaborators_with_write_users_access(
         self) -> None:
     async with github.AsyncGithubClient(
             auth=github_app.GithubBearerAuth(), ) as app_client:
         async for installation in app_client.items(
                 "/app/installations",
                 resource_name="installations",
                 page_limit=100,
         ):
             installation = typing.cast(github_types.GitHubInstallation,
                                        installation)
             org = SeatAccount(installation["account"]["id"],
                               installation["account"]["login"])
             async with github.aget_client(installation) as client:
                 try:
                     async for repository in client.items(
                             "/installation/repositories",
                             list_items="repositories",
                             resource_name="repositories",
                             page_limit=100,
                     ):
                         repository = typing.cast(
                             github_types.GitHubRepository, repository)
                         repo = SeatRepository(repository["id"],
                                               repository["name"])
                         async for collaborator in client.items(
                                 f"{repository['url']}/collaborators",
                                 resource_name="collaborators",
                                 page_limit=100,
                         ):
                             if collaborator["permissions"]["push"]:
                                 seat = SeatAccount(collaborator["id"],
                                                    collaborator["login"])
                                 repo_seats = self.seats[org][repo]
                                 if repo_seats["write_users"] is None:
                                     repo_seats["write_users"] = {seat}
                                 else:
                                     repo_seats["write_users"].add(seat)
                 except exceptions.MergifyNotInstalled:
                     LOG.warning(
                         "can't retrieve collaborators with write users access",
                         account_id=installation["account"]["id"],
                         account_login=installation["account"]["login"],
                         suspended_at=installation["suspended_at"],
                         suspended_by=installation["suspended_by"],
                     )
Beispiel #16
0
async def test_run_command_with_user(user_id, permission, result, redis_cache,
                                     monkeypatch):
    client = github.aget_client(owner_name="Mergifyio", owner_id=123)

    ctxt = await _create_context(redis_cache, client)

    user = github_types.GitHubAccount(
        {
            "id": user_id,
            "login": "******",
            "type": "Bot",
            "avatar_url": "https://avatars.githubusercontent.com/u/583231?v=4",
        }, )

    class MockResponse:
        @staticmethod
        def json():
            return {
                "permission": permission,
                "user": {
                    "login": "******",
                },
            }

    async def mock_get(*args, **kwargs):
        return MockResponse()

    monkeypatch.setattr(client, "get", mock_get)

    http_calls = []

    async def mock_post(*args, **kwargs):
        http_calls.append((args, kwargs))
        return

    monkeypatch.setattr(client, "post", mock_post)

    await handle(ctxt=ctxt,
                 mergify_config={},
                 comment="@mergifyio something",
                 user=user)

    assert len(http_calls) == 1
    assert result in http_calls[0][1]["json"]["body"]
Beispiel #17
0
async def create_initial_summary(
        redis: utils.RedisCache,
        event: github_types.GitHubEventPullRequest) -> None:
    owner = event["repository"]["owner"]["login"]

    if not await redis.exists(
            context.Repository.get_config_location_cache_key(
                event["pull_request"]["base"]["repo"]["owner"]["login"],
                event["pull_request"]["base"]["repo"]["name"],
            )):
        # Mergify is probably not activated on this repo
        return

    # NOTE(sileht): It's possible that a "push" event creates a summary before we
    # received the pull_request/opened event.
    # So we check first if a summary does not already exists, to not post
    # the summary twice. Since this method can ran in parallel of the worker
    # this is not a 100% reliable solution, but if we post a duplicate summary
    # check_api.set_check_run() handle this case and update both to not confuse users.
    sha = await context.Context.get_cached_last_summary_head_sha_from_pull(
        redis, event["pull_request"])

    if sha is not None or sha == event["pull_request"]["head"]["sha"]:
        return

    async with github.aget_client(owner) as client:
        post_parameters = {
            "name": context.Context.SUMMARY_NAME,
            "head_sha": event["pull_request"]["head"]["sha"],
            "status": check_api.Status.IN_PROGRESS.value,
            "started_at": utils.utcnow().isoformat(),
            "details_url": f"{event['pull_request']['html_url']}/checks",
            "output": {
                "title": "Your rules are under evaluation",
                "summary": "Be patient, the page will be updated soon.",
            },
        }
        await client.post(
            f"/repos/{event['pull_request']['base']['user']['login']}/{event['pull_request']['base']['repo']['name']}/check-runs",
            api_version="antiope",  # type: ignore[call-arg]
            json=post_parameters,
        )
    async def test_github_async_client_with_owner_id(self):

        rules = {
            "pull_request_rules": [
                {
                    "name": "fake PR",
                    "conditions": ["base=master"],
                    "actions": {"merge": {}},
                }
            ]
        }

        await self.setup_repo(yaml.dump(rules))
        p, _ = await self.create_pr()

        client = github.aget_client(owner_id=self.o_integration.id)

        url = f"/repos/{self.o_integration.login}/{self.r_o_integration.name}/pulls"

        pulls = [p async for p in client.items(url)]
        self.assertEqual(1, len(pulls))
Beispiel #19
0
async def queues_by_owner_id(owner_id):
    global _AREDIS_CACHE
    queues = collections.defaultdict(dict)
    async for queue in _AREDIS_CACHE.scan_iter(
            match=f"merge-queue~{owner_id}~*"):
        _, _, repo_id, branch = queue.split("~")
        async with github.aget_client(owner_id=owner_id) as client:
            try:
                repo = await client.item(f"/repositories/{repo_id}")
            except exceptions.RateLimited:
                return responses.JSONResponse(
                    status_code=403,
                    content={
                        "message":
                        f"{client.auth.owner} account with {client.auth.owner_id} ID, rate limited by GitHub"
                    },
                )
            queues[client.auth.owner + "/" + repo["name"]][branch] = [
                int(pull) async for pull, _ in _AREDIS_CACHE.zscan_iter(queue)
            ]

    return responses.JSONResponse(status_code=200, content=queues)
Beispiel #20
0
async def get_repository_context(
    owner: github_types.GitHubLogin = fastapi.Path(  # noqa: B008
        ..., description="The owner of the repository"),
    repository: github_types.GitHubRepositoryName = fastapi.Path(  # noqa: B008
        ..., description="The name of the repository"),
    redis_links: redis_utils.RedisLinks = fastapi.Depends(  # noqa: B008
        redis.get_redis_links),
    installation_json: github_types.GitHubInstallation = fastapi.
    Depends(  # noqa: B008
        get_installation),
) -> typing.AsyncGenerator[context.Repository, None]:
    async with github.aget_client(installation_json) as client:
        try:
            # Check this token has access to this repository
            repo = typing.cast(
                github_types.GitHubRepository,
                await client.item(f"/repos/{owner}/{repository}"),
            )
        except (http.HTTPNotFound, http.HTTPForbidden, http.HTTPUnauthorized):
            raise fastapi.HTTPException(status_code=404)

        sub = await subscription.Subscription.get_subscription(
            redis_links.cache, installation_json["account"]["id"])

        installation = context.Installation(installation_json, sub, client,
                                            redis_links)

        repository_ctxt = installation.get_repository_from_github_data(repo)

        # NOTE(sileht): Since this method is used as fastapi Depends only, it's safe to set this
        # for the ongoing http request
        sentry_sdk.set_user(
            {"username": repository_ctxt.installation.owner_login})
        sentry_sdk.set_tag("gh_owner",
                           repository_ctxt.installation.owner_login)
        sentry_sdk.set_tag("gh_repo", repository_ctxt.repo["name"])

        yield repository_ctxt
Beispiel #21
0
async def refresh_pull(
    owner: github_types.GitHubLogin,
    repo_name: github_types.GitHubRepositoryName,
    pull_request_number: github_types.GitHubPullRequestNumber,
    action: github_types.GitHubEventRefreshActionType = "user",
) -> responses.Response:
    action = RefreshActionSchema(action)
    async with github.aget_client(owner_name=owner) as client:
        try:
            repository = await client.item(f"/repos/{owner}/{repo_name}")
        except http.HTTPNotFound:
            return responses.JSONResponse(status_code=404,
                                          content="repository not found")

    global _AREDIS_STREAM, _AREDIS_CACHE
    await github_events.send_refresh(
        _AREDIS_CACHE,
        _AREDIS_STREAM,
        repository,
        pull_request_number=pull_request_number,
        action=action,
    )
    return responses.Response("Refresh queued", status_code=202)
Beispiel #22
0
    async def test_github_async_client(self) -> None:

        rules = {
            "pull_request_rules": [{
                "name":
                "simulator",
                "conditions": [f"base!={self.main_branch_name}"],
                "actions": {
                    "merge": {}
                },
            }]
        }
        other_branch = self.get_full_branch_name("other")
        await self.setup_repo(yaml.dump(rules), test_branches=[other_branch])
        p1 = await self.create_pr()
        p2 = await self.create_pr()
        await self.create_pr(base=other_branch)

        installation_json = await github.get_installation_from_login(
            github_types.GitHubLogin("mergifyio-testing"))
        client = github.aget_client(installation_json)

        url = f"/repos/mergifyio-testing/{self.RECORD_CONFIG['repository_name']}/pulls"

        pulls = [
            p async for p in client.items(
                url, resource_name="pull", page_limit=5)
        ]
        self.assertEqual(3, len(pulls))

        pulls = [
            p async for p in client.items(url,
                                          params={"per_page": "1"},
                                          resource_name="pull",
                                          page_limit=5)
        ]
        self.assertEqual(3, len(pulls))

        pulls = [
            p async for p in client.items(
                url,
                params={
                    "per_page": "1",
                    "page": "2"
                },
                resource_name="pull",
                page_limit=5,
            )
        ]
        self.assertEqual(2, len(pulls))

        pulls = [
            p async for p in client.items(
                url,
                params={
                    "base": other_branch,
                    "state": "all"
                },
                resource_name="pull",
                page_limit=5,
            )
        ]
        self.assertEqual(1, len(pulls))

        pulls = [
            p async for p in client.items(url,
                                          params={"base": "unknown"},
                                          resource_name="pull",
                                          page_limit=5)
        ]
        self.assertEqual(0, len(pulls))

        pull = await client.item(f"{url}/{p1['number']}")
        self.assertEqual(p1["number"], pull["number"])

        pull = await client.item(f"{url}/{p2['number']}")
        self.assertEqual(p2["number"], pull["number"])

        with self.assertRaises(http.HTTPStatusError) as ctxt:
            await client.item(f"{url}/10000000000")

        self.assertEqual(404, ctxt.exception.response.status_code)
Beispiel #23
0
    async def asyncSetUp(self) -> None:
        super(FunctionalTestBase, self).setUp()

        # NOTE(sileht): don't preempted bucket consumption
        # Otherwise preemption doesn't occur at the same moment during record
        # and replay. Making some tests working during record and failing
        # during replay.
        config.BUCKET_PROCESSING_MAX_SECONDS = 100000

        config.API_ENABLE = True

        self.existing_labels: typing.List[str] = []
        self.pr_counter: int = 0
        self.git_counter: int = 0

        mock.patch.object(branch_updater.gitter, "Gitter",
                          self.get_gitter).start()
        mock.patch.object(duplicate_pull.gitter, "Gitter",
                          self.get_gitter).start()

        # Web authentification always pass
        mock.patch("hmac.compare_digest", return_value=True).start()

        self.main_branch_name = self.get_full_branch_name("main")

        self.git = self.get_gitter(LOG)
        await self.git.init()
        self.addAsyncCleanup(self.git.cleanup)

        self.redis_links = redis_utils.RedisLinks(max_idle_time=0)
        await self.clear_redis()

        installation_json = await github.get_installation_from_account_id(
            config.TESTING_ORGANIZATION_ID)
        self.client_integration = github.aget_client(installation_json)
        self.client_admin = github.AsyncGithubInstallationClient(
            auth=github.GithubTokenAuth(token=config.ORG_ADMIN_PERSONAL_TOKEN,
                                        ))
        self.client_fork = github.AsyncGithubInstallationClient(
            auth=github.GithubTokenAuth(token=self.FORK_PERSONAL_TOKEN, ))
        self.addAsyncCleanup(self.client_integration.aclose)
        self.addAsyncCleanup(self.client_admin.aclose)
        self.addAsyncCleanup(self.client_fork.aclose)

        await self.client_admin.item("/user")
        await self.client_fork.item("/user")

        self.url_origin = (
            f"/repos/mergifyio-testing/{self.RECORD_CONFIG['repository_name']}"
        )
        self.url_fork = f"/repos/mergify-test2/{self.RECORD_CONFIG['repository_name']}"
        self.git_origin = f"{config.GITHUB_URL}/mergifyio-testing/{self.RECORD_CONFIG['repository_name']}"
        self.git_fork = (
            f"{config.GITHUB_URL}/mergify-test2/{self.RECORD_CONFIG['repository_name']}"
        )

        self.installation_ctxt = context.Installation(
            installation_json,
            self.subscription,
            self.client_integration,
            self.redis_links,
        )
        self.repository_ctxt = await self.installation_ctxt.get_repository_by_id(
            github_types.GitHubRepositoryIdType(
                self.RECORD_CONFIG["repository_id"]))

        # NOTE(sileht): We mock this method because when we replay test, the
        # timing maybe not the same as when we record it, making the formatted
        # elapsed time different in the merge queue summary.
        def fake_pretty_datetime(dt: datetime.datetime) -> str:
            return "<fake_pretty_datetime()>"

        mock.patch(
            "mergify_engine.date.pretty_datetime",
            side_effect=fake_pretty_datetime,
        ).start()

        self._event_reader = EventReader(self.app,
                                         self.RECORD_CONFIG["repository_id"])
        await self._event_reader.drain()

        # Track when worker work
        real_consume_method = worker.StreamProcessor.consume

        self.worker_concurrency_works = 0

        async def tracked_consume(
            inner_self: worker.StreamProcessor,
            bucket_org_key: worker_lua.BucketOrgKeyType,
            owner_id: github_types.GitHubAccountIdType,
            owner_login_for_tracing: github_types.GitHubLoginForTracing,
        ) -> None:
            self.worker_concurrency_works += 1
            try:
                await real_consume_method(inner_self, bucket_org_key, owner_id,
                                          owner_login_for_tracing)
            finally:
                self.worker_concurrency_works -= 1

        worker.StreamProcessor.consume = tracked_consume  # type: ignore[assignment]

        def cleanup_consume() -> None:
            worker.StreamProcessor.consume = real_consume_method  # type: ignore[assignment]

        self.addCleanup(cleanup_consume)
Beispiel #24
0
async def report(
    url: str,
) -> typing.Union[context.Context, github.AsyncGithubInstallationClient, None]:
    redis_links = redis_utils.RedisLinks(max_idle_time=0)

    try:
        owner_login, repo, pull_number = _url_parser(url)
    except ValueError:
        print(f"{url} is not valid")
        return None

    try:
        installation_json = await github.get_installation_from_login(
            owner_login)
        client = github.aget_client(installation_json)
    except exceptions.MergifyNotInstalled:
        print(f"* Mergify is not installed on account {owner_login}")
        return None

    # Do a dumb request just to authenticate
    await client.get("/")

    print(f"* INSTALLATION ID: {installation_json['id']}")

    if repo is None:
        slug = None
    else:
        slug = owner_login + "/" + repo

    owner_id = installation_json["account"]["id"]
    cached_sub = await subscription.Subscription.get_subscription(
        redis_links.cache, owner_id)
    db_sub = await subscription.Subscription._retrieve_subscription_from_db(
        redis_links.cache, owner_id)

    cached_tokens = await user_tokens.UserTokens.get(redis_links.cache,
                                                     owner_id)
    if config.SAAS_MODE:
        db_tokens = typing.cast(
            user_tokens.UserTokens,
            (await user_tokens.UserTokensSaas._retrieve_from_db(
                redis_links.cache, owner_id)),
        )
    else:
        db_tokens = cached_tokens

    print("* Features (db):")
    for v in sorted(f.value for f in db_sub.features):
        print(f"  - {v}")
    print("* Features (cache):")
    for v in sorted(f.value for f in cached_sub.features):
        print(f"  - {v}")

    installation = context.Installation(installation_json, cached_sub, client,
                                        redis_links)

    await report_dashboard_synchro(installation.installation["id"], cached_sub,
                                   cached_tokens, "ENGINE-CACHE", slug)
    await report_dashboard_synchro(installation.installation["id"], db_sub,
                                   db_tokens, "DASHBOARD", slug)

    await report_worker_status(owner_login)

    if repo is not None:
        repository = await installation.get_repository_by_name(repo)

        print(
            f"* REPOSITORY IS {'PRIVATE' if repository.repo['private'] else 'PUBLIC'}"
        )

        print(f"* DEFAULT BRANCH: {repository.repo['default_branch']}")

        print("* CONFIGURATION:")
        mergify_config = None
        config_file = await repository.get_mergify_config_file()
        if not config_file:
            print(".mergify.yml is missing")
        else:
            print(f"Config filename: {config_file['path']}")
            print(config_file["decoded_content"])
            try:
                mergify_config = await repository.get_mergify_config()
            except rules.InvalidRules as e:  # pragma: no cover
                print(f"configuration is invalid {str(e)}")

        if pull_number is None:
            async for branch in typing.cast(
                    typing.AsyncGenerator[github_types.GitHubBranch, None],
                    client.items(
                        f"/repos/{owner_login}/{repo}/branches",
                        resource_name="branches",
                        page_limit=100,
                    ),
            ):
                q = merge_train.Train(repository, branch["name"])
                await q.load()
                await report_queue("TRAIN", q)

        else:
            repository = await installation.get_repository_by_name(
                github_types.GitHubRepositoryName(repo))
            try:
                ctxt = await repository.get_pull_request_context(
                    github_types.GitHubPullRequestNumber(int(pull_number)))
            except http.HTTPNotFound:
                print(f"Pull request `{url}` does not exist")
                return client

            # FIXME queues could also be printed if no pull number given
            # TODO(sileht): display train if any
            q = await merge_train.Train.from_context(ctxt)
            print(
                f"* TRAIN: {', '.join([f'#{p}' for p in await q.get_pulls()])}"
            )
            print("* PULL REQUEST:")
            pr_data = await ctxt.pull_request.items()
            pprint.pprint(pr_data, width=160)

            is_behind = await ctxt.is_behind
            print(f"is_behind: {is_behind}")

            print(f"mergeable_state: {ctxt.pull['mergeable_state']}")

            print("* MERGIFY LAST CHECKS:")
            for c in await ctxt.pull_engine_check_runs:
                print(
                    f"[{c['name']}]: {c['conclusion']} | {c['output'].get('title')} | {c['html_url']}"
                )
                print("> " + "\n> ".join(
                    ("No Summary", ) if c["output"]["summary"] is None else
                    c["output"]["summary"].split("\n")))

            if mergify_config is not None:
                print("* MERGIFY LIVE MATCHES:")
                pull_request_rules = mergify_config["pull_request_rules"]
                match = await pull_request_rules.get_pull_request_rule(ctxt)
                summary_title, summary = await actions_runner.gen_summary(
                    ctxt, pull_request_rules, match)
                print(f"[Summary]: success | {summary_title}")
                print("> " + "\n> ".join(summary.strip().split("\n")))

            return ctxt

    return client
Beispiel #25
0
    async def asyncSetUp(self):
        super(FunctionalTestBase, self).setUp()
        self.existing_labels: typing.List[str] = []
        self.protected_branches: typing.Set[str] = set()
        self.pr_counter: int = 0
        self.git_counter: int = 0
        self.cassette_library_dir = os.path.join(CASSETTE_LIBRARY_DIR_BASE,
                                                 self.__class__.__name__,
                                                 self._testMethodName)

        # Recording stuffs
        if RECORD:
            if os.path.exists(self.cassette_library_dir):
                shutil.rmtree(self.cassette_library_dir)
            os.makedirs(self.cassette_library_dir)

        self.recorder = vcr.VCR(
            cassette_library_dir=self.cassette_library_dir,
            record_mode="all" if RECORD else "none",
            match_on=["method", "uri"],
            ignore_localhost=True,
            filter_headers=[
                ("Authorization", "<TOKEN>"),
                ("X-Hub-Signature", "<SIGNATURE>"),
                ("User-Agent", None),
                ("Accept-Encoding", None),
                ("Connection", None),
            ],
            before_record_response=self.response_filter,
        )

        if RECORD:
            github.CachedToken.STORAGE = {}
        else:
            # Never expire token during replay
            mock.patch.object(github_app,
                              "get_or_create_jwt",
                              return_value="<TOKEN>").start()
            mock.patch.object(
                github.GithubAppInstallationAuth,
                "get_access_token",
                return_value="<TOKEN>",
            ).start()

            # NOTE(sileht): httpx pyvcr stubs does not replay auth_flow as it directly patch client.send()
            # So anything occurring during auth_flow have to be mocked during replay
            def get_auth(owner_name=None, owner_id=None, auth=None):
                if auth is None:
                    auth = github.get_auth(owner_name, owner_id)
                    auth.installation = {
                        "id": config.INSTALLATION_ID,
                    }
                    auth.permissions_need_to_be_updated = False
                    auth.owner_id = config.TESTING_ORGANIZATION_ID
                    auth.owner = config.TESTING_ORGANIZATION
                return auth

            def github_aclient(owner_name=None, owner_id=None, auth=None):
                return github.AsyncGithubInstallationClient(
                    get_auth(owner_name, owner_id, auth))

            mock.patch.object(github, "aget_client", github_aclient).start()

        mock.patch.object(branch_updater.gitter, "Gitter",
                          self.get_gitter).start()
        mock.patch.object(duplicate_pull.gitter, "Gitter",
                          self.get_gitter).start()

        if not RECORD:
            # NOTE(sileht): Don't wait exponentialy during replay
            mock.patch.object(context.Context._ensure_complete.retry, "wait",
                              None).start()

        # Web authentification always pass
        mock.patch("hmac.compare_digest", return_value=True).start()

        branch_prefix_path = os.path.join(self.cassette_library_dir,
                                          "branch_prefix")

        if RECORD:
            self.BRANCH_PREFIX = datetime.datetime.utcnow().strftime(
                "%Y%m%d%H%M%S")
            with open(branch_prefix_path, "w") as f:
                f.write(self.BRANCH_PREFIX)
        else:
            with open(branch_prefix_path, "r") as f:
                self.BRANCH_PREFIX = f.read()

        self.master_branch_name = self.get_full_branch_name("master")

        self.git = self.get_gitter(LOG)
        await self.git.init()
        self.addAsyncCleanup(self.git.cleanup)

        await root.startup()
        self.app = httpx.AsyncClient(app=root.app, base_url="http://localhost")

        await self.clear_redis_cache()
        self.redis_cache = utils.create_aredis_for_cache(max_idle_time=0)
        self.subscription = subscription.Subscription(
            self.redis_cache,
            config.TESTING_ORGANIZATION_ID,
            self.SUBSCRIPTION_ACTIVE,
            "You're not nice",
            frozenset(
                getattr(subscription.Features, f)
                for f in subscription.Features.__members__)
            if self.SUBSCRIPTION_ACTIVE else frozenset(),
        )
        await self.subscription._save_subscription_to_cache()
        self.user_tokens = user_tokens.UserTokens(
            self.redis_cache,
            config.TESTING_ORGANIZATION_ID,
            {
                "mergify-test1": config.ORG_ADMIN_GITHUB_APP_OAUTH_TOKEN,
                "mergify-test3": config.ORG_USER_PERSONAL_TOKEN,
            },
        )
        await self.user_tokens.save_to_cache()

        # Let's start recording
        cassette = self.recorder.use_cassette("http.json")
        cassette.__enter__()
        self.addCleanup(cassette.__exit__)

        self.client_integration = github.aget_client(
            config.TESTING_ORGANIZATION, config.TESTING_ORGANIZATION_ID)
        self.client_admin = github.AsyncGithubInstallationClient(
            auth=github.GithubTokenAuth(token=config.ORG_ADMIN_PERSONAL_TOKEN))
        self.client_fork = github.AsyncGithubInstallationClient(
            auth=github.GithubTokenAuth(token=self.FORK_PERSONAL_TOKEN))
        self.addAsyncCleanup(self.client_integration.aclose)
        self.addAsyncCleanup(self.client_admin.aclose)
        self.addAsyncCleanup(self.client_fork.aclose)

        await self.client_admin.item("/user")
        await self.client_fork.item("/user")
        if RECORD:
            assert self.client_admin.auth.owner == "mergify-test1"
            assert self.client_fork.auth.owner == "mergify-test2"
        else:
            self.client_admin.auth.owner = "mergify-test1"
            self.client_fork.auth.owner = "mergify-test2"

        self.url_main = f"/repos/mergifyio-testing/{self.REPO_NAME}"
        self.url_fork = f"/repos/{self.client_fork.auth.owner}/{self.REPO_NAME}"
        self.git_main = f"{config.GITHUB_URL}/mergifyio-testing/{self.REPO_NAME}"
        self.git_fork = (
            f"{config.GITHUB_URL}/{self.client_fork.auth.owner}/{self.REPO_NAME}"
        )

        self.installation_ctxt = context.Installation(
            config.TESTING_ORGANIZATION_ID,
            config.TESTING_ORGANIZATION,
            self.subscription,
            self.client_integration,
            self.redis_cache,
        )
        self.repository_ctxt = context.Repository(self.installation_ctxt,
                                                  self.REPO_NAME, self.REPO_ID)

        real_get_subscription = subscription.Subscription.get_subscription

        async def fake_retrieve_subscription_from_db(redis_cache, owner_id):
            if owner_id == config.TESTING_ORGANIZATION_ID:
                return self.subscription
            return subscription.Subscription(
                redis_cache,
                owner_id,
                False,
                "We're just testing",
                set(),
            )

        async def fake_subscription(redis_cache, owner_id):
            if owner_id == config.TESTING_ORGANIZATION_ID:
                return await real_get_subscription(redis_cache, owner_id)
            return subscription.Subscription(
                redis_cache,
                owner_id,
                False,
                "We're just testing",
                set(),
            )

        mock.patch(
            "mergify_engine.subscription.Subscription._retrieve_subscription_from_db",
            side_effect=fake_retrieve_subscription_from_db,
        ).start()

        mock.patch(
            "mergify_engine.subscription.Subscription.get_subscription",
            side_effect=fake_subscription,
        ).start()

        async def fake_retrieve_user_tokens_from_db(redis_cache, owner_id):
            if owner_id == config.TESTING_ORGANIZATION_ID:
                return self.user_tokens
            return user_tokens.UserTokens(redis_cache, owner_id, {})

        real_get_user_tokens = user_tokens.UserTokens.get

        async def fake_user_tokens(redis_cache, owner_id):
            if owner_id == config.TESTING_ORGANIZATION_ID:
                return await real_get_user_tokens(redis_cache, owner_id)
            return user_tokens.UserTokens(redis_cache, owner_id, {})

        mock.patch(
            "mergify_engine.user_tokens.UserTokens._retrieve_from_db",
            side_effect=fake_retrieve_user_tokens_from_db,
        ).start()

        mock.patch(
            "mergify_engine.user_tokens.UserTokens.get",
            side_effect=fake_user_tokens,
        ).start()

        self._event_reader = EventReader(self.app)
        await self._event_reader.drain()

        # NOTE(sileht): Prepare a fresh redis
        await self.clear_redis_stream()
Beispiel #26
0
    async def consume(self, stream_name: StreamNameType) -> None:
        owner_login, owner_id = self._extract_owner(stream_name)
        LOG.debug("consoming stream", gh_owner=owner_login)

        try:
            async with self._translate_exception_to_retries(stream_name):
                sub = await subscription.Subscription.get_subscription(
                    self.redis_cache, owner_id
                )
            async with github.aget_client(owner_login) as client:
                installation = context.Installation(
                    owner_id, owner_login, sub, client, self.redis_cache
                )
                async with self._translate_exception_to_retries(stream_name):
                    pulls = await self._extract_pulls_from_stream(installation)
                if pulls:
                    client.set_requests_ratio(len(pulls))
                    await self._consume_pulls(installation, pulls)

                await self._refresh_merge_trains(installation)

        except StreamUnused:
            LOG.info("unused stream, dropping it", gh_owner=owner_login, exc_info=True)
            try:
                await self.redis_stream.delete(stream_name)
            except aredis.exceptions.ConnectionError:
                LOG.warning(
                    "fail to drop stream, it will be retried", stream_name=stream_name
                )
        except StreamRetry as e:
            log_method = (
                LOG.error
                if e.attempts >= STREAM_ATTEMPTS_LOGGING_THRESHOLD
                else LOG.info
            )
            log_method(
                "failed to process stream, retrying",
                attempts=e.attempts,
                retry_at=e.retry_at,
                gh_owner=owner_login,
                exc_info=True,
            )
            return
        except vcr_errors_CannotOverwriteExistingCassetteException:
            messages = await self.redis_stream.xrange(
                stream_name, count=config.STREAM_MAX_BATCH
            )
            for message_id, message in messages:
                LOG.info(msgpack.unpackb(message[b"event"], raw=False))
                await self.redis_stream.execute_command("XDEL", stream_name, message_id)

        except Exception:
            # Ignore it, it will retried later
            LOG.error("failed to process stream", gh_owner=owner_login, exc_info=True)

        LOG.debug("cleanup stream start", stream_name=stream_name)
        try:
            await self.redis_stream.eval(
                self.ATOMIC_CLEAN_STREAM_SCRIPT, 1, stream_name.encode(), time.time()
            )
        except aredis.exceptions.ConnectionError:
            LOG.warning(
                "fail to cleanup stream, it maybe partially replayed",
                stream_name=stream_name,
            )
        LOG.debug("cleanup stream end", stream_name=stream_name)
Beispiel #27
0
async def report(
    url: str,
) -> typing.Union[context.Context, github.AsyncGithubInstallationClient, None]:
    redis_cache = utils.create_aredis_for_cache(max_idle_time=0)

    try:
        owner, repo, pull_number = _url_parser(url)
    except ValueError:
        print(f"{url} is not valid")
        return None

    try:
        client = github.aget_client(owner)
    except exceptions.MergifyNotInstalled:
        print(f"* Mergify is not installed on account {owner}")
        return None

    # Do a dumb request just to authenticate
    await client.get("/")

    if client.auth.installation is None:
        print("No installation detected")
        return None

    print(f"* INSTALLATION ID: {client.auth.installation['id']}")

    if client.auth.owner_id is None:
        raise RuntimeError("Unable to get owner_id")

    if repo is None:
        slug = None
    else:
        slug = owner + "/" + repo

    cached_sub = await subscription.Subscription.get_subscription(
        redis_cache, client.auth.owner_id)
    db_sub = await subscription.Subscription._retrieve_subscription_from_db(
        redis_cache, client.auth.owner_id)

    cached_tokens = await user_tokens.UserTokens.get(redis_cache,
                                                     client.auth.owner_id)
    db_tokens = await user_tokens.UserTokens._retrieve_from_db(
        redis_cache, client.auth.owner_id)

    print(f"* SUBSCRIBED (cache/db): {cached_sub.active} / {db_sub.active}")
    print("* Features (cache):")
    for f in db_sub.features:
        print(f"  - {f.value}")
    print("* Features (db):")
    for f in cached_sub.features:
        print(f"  - {f.value}")

    await report_dashboard_synchro(client.auth.installation["id"], cached_sub,
                                   cached_tokens, "ENGINE-CACHE", slug)
    await report_dashboard_synchro(client.auth.installation["id"], db_sub,
                                   db_tokens, "DASHBOARD", slug)

    await report_worker_status(owner)

    installation = context.Installation(client.auth.owner_id, owner,
                                        cached_sub, client, redis_cache)

    if repo is not None:
        repo_info: github_types.GitHubRepository = await client.item(
            f"/repos/{owner}/{repo}")
        repository = context.Repository(installation, repo_info["name"],
                                        repo_info["id"])

        print(
            f"* REPOSITORY IS {'PRIVATE' if repo_info['private'] else 'PUBLIC'}"
        )

        print("* CONFIGURATION:")
        mergify_config = None
        config_file = await repository.get_mergify_config_file()
        if config_file is None:
            print(".mergify.yml is missing")
        else:
            print(f"Config filename: {config_file['path']}")
            print(config_file["decoded_content"].decode())
            try:
                mergify_config = rules.get_mergify_config(config_file)
            except rules.InvalidRules as e:  # pragma: no cover
                print(f"configuration is invalid {str(e)}")
            else:
                mergify_config["pull_request_rules"].rules.extend(
                    engine.MERGIFY_BUILTIN_CONFIG["pull_request_rules"].rules)

        if pull_number is None:
            async for branch in typing.cast(
                    typing.AsyncGenerator[github_types.GitHubBranch, None],
                    client.items(f"/repos/{owner}/{repo}/branches"),
            ):
                # TODO(sileht): Add some informations on the train
                q: queue.QueueBase = naive.Queue(repository, branch["name"])
                await report_queue("QUEUES", q)

                q = merge_train.Train(repository, branch["name"])
                await q.load()
                await report_queue("TRAIN", q)

        else:
            repository = context.Repository(
                installation, github_types.GitHubRepositoryName(repo))
            ctxt = await repository.get_pull_request_context(
                github_types.GitHubPullRequestNumber(int(pull_number)))

            # FIXME queues could also be printed if no pull number given
            # TODO(sileht): display train if any
            q = await naive.Queue.from_context(ctxt)
            print(
                f"* QUEUES: {', '.join([f'#{p}' for p in await q.get_pulls()])}"
            )
            q = await merge_train.Train.from_context(ctxt)
            print(
                f"* TRAIN: {', '.join([f'#{p}' for p in await q.get_pulls()])}"
            )
            print("* PULL REQUEST:")
            pr_data = await ctxt.pull_request.items()
            pprint.pprint(pr_data, width=160)

            is_behind = await ctxt.is_behind
            print(f"is_behind: {is_behind}")

            print(f"mergeable_state: {ctxt.pull['mergeable_state']}")

            print("* MERGIFY LAST CHECKS:")
            for c in await ctxt.pull_engine_check_runs:
                print(
                    f"[{c['name']}]: {c['conclusion']} | {c['output'].get('title')}"
                )
                print("> " + "\n> ".join(
                    ("No Summary", ) if c["output"]["summary"] is None else
                    c["output"]["summary"].split("\n")))

            if mergify_config is not None:
                print("* MERGIFY LIVE MATCHES:")
                match = await mergify_config["pull_request_rules"
                                             ].get_pull_request_rule(ctxt)
                summary_title, summary = await actions_runner.gen_summary(
                    ctxt, match)
                print(f"[Summary]: success | {summary_title}")
                print("> " + "\n> ".join(summary.strip().split("\n")))

            return ctxt

    return client