async def test_get_mergify_config_location_from_cache( redis_cache: utils.RedisCache, ) -> None: client = mock.AsyncMock() client.auth.owner = "foo" client.item.side_effect = [ http.HTTPNotFound("Not Found", request=mock.Mock(), response=mock.Mock()), http.HTTPNotFound("Not Found", request=mock.Mock(), response=mock.Mock()), github_types.GitHubContentFile({ "content": encodebytes("whatever".encode()).decode(), "type": "file", "path": ".github/mergify.yml", "sha": github_types.SHAType("zeazeaze"), }), ] installation = context.Installation( github_types.GitHubAccountIdType(0), github_types.GitHubLogin("foo"), subscription.Subscription(redis_cache, 0, False, "", frozenset()), client, redis_cache, ) repository = context.Repository(installation, github_types.GitHubRepositoryName("bar")) await repository.get_mergify_config_file() assert client.item.call_count == 3 client.item.assert_has_calls([ mock.call("/repos/foo/bar/contents/.mergify.yml"), mock.call("/repos/foo/bar/contents/.mergify/config.yml"), mock.call("/repos/foo/bar/contents/.github/mergify.yml"), ]) client.item.reset_mock() client.item.side_effect = [ github_types.GitHubContentFile({ "content": encodebytes("whatever".encode()).decode(), "type": "file", "path": ".github/mergify.yml", "sha": github_types.SHAType("zeazeaze"), }), ] repository._cache = context.RepositoryCache() await repository.get_mergify_config_file() assert client.item.call_count == 1 client.item.assert_has_calls([ mock.call("/repos/foo/bar/contents/.github/mergify.yml"), ])
async def test_get_commits_to_cherry_pick_merge( commits: mock.PropertyMock, context_getter: conftest.ContextGetterFixture, ) -> None: c1 = github_types.CachedGitHubBranchCommit({ "sha": github_types.SHAType("c1f"), "parents": [], "commit_message": "foobar", "commit_verification_verified": False, }) c2 = github_types.CachedGitHubBranchCommit({ "sha": github_types.SHAType("c2"), "parents": [c1["sha"]], "commit_message": "foobar", "commit_verification_verified": False, }) async def fake_commits( ) -> typing.List[github_types.CachedGitHubBranchCommit]: return [c1, c2] commits.return_value = fake_commits() client = mock.Mock() client.auth.get_access_token.return_value = "<token>" ctxt = await context_getter(github_types.GitHubPullRequestNumber(1)) ctxt.repository.installation.client = client base_branch = github_types.CachedGitHubBranchCommit({ "sha": github_types.SHAType("base_branch"), "parents": [], "commit_message": "foobar", "commit_verification_verified": False, }) merge_commit = github_types.CachedGitHubBranchCommit({ "sha": github_types.SHAType("merge_commit"), "parents": [base_branch["sha"], c2["sha"]], "commit_message": "foobar", "commit_verification_verified": False, }) assert await duplicate_pull._get_commits_to_cherrypick(ctxt, merge_commit) == [ c1, c2, ]
def make_pr( repo: github_types.GitHubRepository, owner: github_types.GitHubAccount ) -> github_types.GitHubPullRequest: return github_types.GitHubPullRequest({ "id": github_types.GitHubPullRequestId(github_types.GitHubIssueId(0)), "maintainer_can_modify": False, "head": { "user": owner, "label": "", "ref": github_types.GitHubRefType(""), "sha": github_types.SHAType(""), "repo": repo, }, "user": owner, "number": github_types.GitHubPullRequestNumber( github_types.GitHubIssueNumber(0)), "rebaseable": False, "draft": False, "merge_commit_sha": None, "html_url": "", "state": "closed", "mergeable_state": "unknown", "merged_by": None, "merged": False, "merged_at": None, "labels": [], "base": { "ref": github_types.GitHubRefType("main"), "sha": github_types.SHAType(""), "label": "", "repo": repo, "user": owner, }, })
async def test_get_already_merged_summary( merged_by: str, raw_config: str, result: str, context_getter: conftest.ContextGetterFixture, ) -> None: ctxt = await context_getter( github_types.GitHubPullRequestNumber(1), merged=True, merged_by=github_types.GitHubAccount({ "id": github_types.GitHubAccountIdType(1), "login": github_types.GitHubLogin(merged_by), "type": "User", "avatar_url": "", }), ) ctxt.repository._caches.branch_protections[github_types.GitHubRefType( "main")] = None file = context.MergifyConfigFile( type="file", content="whatever", sha=github_types.SHAType("azertyuiop"), path="whatever", decoded_content=raw_config, ) config = rules.get_mergify_config(file) match = await config["pull_request_rules"].get_pull_request_rule(ctxt) assert result == await actions_runner.get_already_merged_summary( ctxt, match)
async def config_validator( data: fastapi.UploadFile = fastapi.File(...), # noqa: B008 ) -> responses.PlainTextResponse: # pragma: no cover try: bytes_or_str = await data.read() if isinstance(bytes_or_str, str): content_bytes = bytes_or_str.encode() else: content_bytes = bytes_or_str rules.get_mergify_config( context.MergifyConfigFile({ "path": data.filename, "type": "file", "content": base64.b64encode(content_bytes).decode(), "decoded_content": content_bytes, "sha": github_types.SHAType( hashlib.sha1(content_bytes).hexdigest() # nosec ), })) except Exception as e: status = 400 message = str(e) else: status = 200 message = "The configuration is valid" return responses.PlainTextResponse(message, status_code=status)
def get_config(self) -> rules.MergifyConfig: try: return rules.get_mergify_config( context.MergifyConfigFile( { "type": "file", "content": "whatever", "sha": github_types.SHAType("whatever"), "path": ".mergify.yml", "decoded_content": self.mergify_yml, } ) ) except rules.InvalidRules as exc: detail = list( map( lambda e: { "loc": ("body", "mergify_yml"), "msg": rules.InvalidRules.format_error(e), "type": "mergify_config_error", }, sorted(exc.errors, key=str), ) ) raise fastapi.HTTPException(status_code=422, detail=detail)
def test_command_loader_with_defaults() -> None: raw_config = """ defaults: actions: backport: branches: - branch-3.1 - branch-3.2 ignore_conflicts: false """ file = context.MergifyConfigFile( type="file", content="whatever", sha=github_types.SHAType("azertyuiop"), path="whatever", decoded_content=raw_config, ) config = rules.get_mergify_config(file) command = commands_runner.load_command(config, "@mergifyio backport") assert command.name == "backport" assert command.args == "" assert isinstance(command.action, BackportAction) assert command.action.config == { "assignees": [], "branches": ["branch-3.1", "branch-3.2"], "bot_account": None, "regexes": [], "ignore_conflicts": False, "labels": [], "label_conflicts": "conflicts", "title": "{{ title }} (backport #{{ number }})", "body": "This is an automatic backport of pull request #{{number}} done by [Mergify](https://mergify.com).\n{{ cherry_pick_error }}", }
"id": github_types.GitHubAccountIdType(0), "login": github_types.GitHubLogin(""), "type": "User", "avatar_url": "", }, "labels": [], "base": { "user": { "id": github_types.GitHubAccountIdType(0), "login": github_types.GitHubLogin(""), "type": "User", "avatar_url": "", }, "label": "", "ref": github_types.GitHubRefType(""), "sha": github_types.SHAType(""), "repo": { "url": "", "default_branch": github_types.GitHubRefType(""), "full_name": "", "archived": False, "id": github_types.GitHubRepositoryIdType(0), "private": False, "name": github_types.GitHubRepositoryName(""), "owner": { "login": github_types.GitHubLogin(""), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, },
async def test_configuration_check_not_needed_with_configuration_deleted( github_server: respx.MockRouter, redis_cache: utils.RedisCache ) -> None: github_server.get("/user/12345/installation").respond( 200, json={ "id": 12345, "permissions": { "checks": "write", "contents": "write", "pull_requests": "write", }, "target_type": GH_OWNER["type"], "account": GH_OWNER, }, ) github_server.get(f"{BASE_URL}/pulls/1",).respond( 200, json=typing.cast(typing.Dict[typing.Any, typing.Any], GH_PULL), ) github_server.get(f"{BASE_URL}/contents/.mergify.yml").respond( 200, json=typing.cast( typing.Dict[typing.Any, typing.Any], github_types.GitHubContentFile( { "type": "file", "content": FAKE_MERGIFY_CONTENT, "path": ".mergify.yml", "sha": github_types.SHAType( "739e5ec79e358bae7a150941a148b4131233ce2c" ), } ), ), ) # Summary is present, no need to redo the check github_server.get( f"{BASE_URL}/commits/{GH_PULL['head']['sha']}/check-runs" ).respond( 200, json={"check_runs": [SUMMARY_CHECK, CONFIGURATION_DELETED_CHECK]}, ) installation_json = await github.get_installation_from_account_id(GH_OWNER["id"]) async with github.AsyncGithubInstallationClient( github.GithubAppInstallationAuth(installation_json) ) as client: installation = context.Installation( installation_json, subscription.Subscription( redis_cache, 0, "", frozenset([subscription.Features.PUBLIC_REPOSITORY]), 0, ), client, redis_cache, mock.Mock(), ) repository = context.Repository(installation, GH_REPO) ctxt = await repository.get_pull_request_context( github_types.GitHubPullRequestNumber(1) ) main_config_file = await repository.get_mergify_config_file() changed = await engine._check_configuration_changes(ctxt, main_config_file) assert changed
async def test_configuration_initial( github_server: respx.MockRouter, redis_cache: utils.RedisCache ) -> None: github_server.get("/user/12345/installation").respond( 200, json={ "id": 12345, "permissions": { "checks": "write", "contents": "write", "pull_requests": "write", }, "target_type": GH_OWNER["type"], "account": GH_OWNER, }, ) github_server.get(f"{BASE_URL}/pulls/1",).respond( 200, json=typing.cast(typing.Dict[typing.Any, typing.Any], GH_PULL), ) github_server.route( respx.patterns.M(method="GET", path=f"{BASE_URL}/contents/.mergify.yml") & ~respx.patterns.M(params__contains={"ref": GH_PULL["merge_commit_sha"]}) ).respond(404) github_server.route( respx.patterns.M(method="GET", path=f"{BASE_URL}/contents/.mergify/config.yml") & ~respx.patterns.M(params__contains={"ref": GH_PULL["merge_commit_sha"]}) ).respond(404) github_server.route( respx.patterns.M(method="GET", path=f"{BASE_URL}/contents/.github/mergify.yml") & ~respx.patterns.M(params__contains={"ref": GH_PULL["merge_commit_sha"]}) ).respond(404) github_server.route( respx.patterns.M(method="GET", path=f"{BASE_URL}/contents/.mergify.yml") & respx.patterns.M(params__contains={"ref": GH_PULL["merge_commit_sha"]}) ).respond( 200, json=typing.cast( typing.Dict[typing.Any, typing.Any], github_types.GitHubContentFile( { "type": "file", "content": FAKE_MERGIFY_CONTENT, "path": ".mergify.yml", "sha": github_types.SHAType( "739e5ec79e358bae7a150941a148b4131233ce2c" ), } ), ), ) github_server.route( respx.patterns.M(method="GET", path=f"{BASE_URL}/contents/.github/mergify.yml") & respx.patterns.M(params__contains={"ref": GH_PULL["merge_commit_sha"]}) ).respond(404) github_server.route( respx.patterns.M(method="GET", path=f"{BASE_URL}/contents/.mergify/config.yml") & respx.patterns.M(params__contains={"ref": GH_PULL["merge_commit_sha"]}) ).respond(404) github_server.get( f"{BASE_URL}/commits/{GH_PULL['head']['sha']}/check-runs" ).respond(200, json={"check_runs": []}) github_server.post(f"{BASE_URL}/check-runs").respond( 200, json=typing.cast(typing.Dict[typing.Any, typing.Any], CHECK_RUN) ) installation_json = await github.get_installation_from_account_id(GH_OWNER["id"]) async with github.AsyncGithubInstallationClient( github.GithubAppInstallationAuth(installation_json) ) as client: installation = context.Installation( installation_json, subscription.Subscription( redis_cache, 0, "", frozenset([subscription.Features.PUBLIC_REPOSITORY]), 0, ), client, redis_cache, mock.Mock(), ) repository = context.Repository(installation, GH_REPO) ctxt = await repository.get_pull_request_context( github_types.GitHubPullRequestNumber(1) ) main_config_file = await repository.get_mergify_config_file() assert main_config_file is None changed = await engine._check_configuration_changes(ctxt, main_config_file) assert changed
"node_id": "42", "locked": False, "assignees": [], "requested_reviewers": [], "requested_teams": [], "milestone": None, "title": "", "body": "", "updated_at": github_types.ISODateTimeType("2021-06-01T18:41:39Z"), "created_at": github_types.ISODateTimeType("2021-06-01T18:41:39Z"), "closed_at": None, "id": github_types.GitHubPullRequestId(0), "maintainer_can_modify": False, "rebaseable": False, "draft": False, "merge_commit_sha": github_types.SHAType("base-sha"), "labels": [], "number": github_types.GitHubPullRequestNumber(6), "merged": False, "commits": 1, "state": "open", "html_url": "<html_url>", "base": { "label": "", "sha": github_types.SHAType("base-sha"), "user": { "login": github_types.GitHubLogin("owner"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", },
async def test_summary_synchronization_cache() -> None: client = mock.MagicMock() client.auth.get_access_token.return_value = "<token>" ctxt = context.Context( client, { "id": github_types.GitHubPullRequestId(github_types.GitHubIssueId(0)), "maintainer_can_modify": False, "rebaseable": False, "draft": False, "merge_commit_sha": None, "labels": [], "number": github_types.GitHubPullRequestNumber( github_types.GitHubIssueNumber(6) ), "merged": True, "state": "closed", "html_url": "<html_url>", "base": { "label": "", "sha": github_types.SHAType("sha"), "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", }, "ref": github_types.GitHubRefType("ref"), "label": "", "repo": { "archived": False, "url": "", "default_branch": github_types.GitHubRefType(""), "id": github_types.GitHubRepositoryIdType(456), "full_name": "user/ref", "name": "name", "private": False, "owner": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", }, }, }, "head": { "label": "", "sha": github_types.SHAType("old-sha-one"), "ref": github_types.GitHubRefType("fork"), "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", }, "repo": { "archived": False, "url": "", "default_branch": github_types.GitHubRefType(""), "id": github_types.GitHubRepositoryIdType(123), "full_name": "fork/other", "name": "other", "private": False, "owner": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", }, }, }, "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", }, "merged_by": None, "merged_at": None, "mergeable_state": "clean", }, subscription.Subscription(1, False, "", {}, frozenset()), ) assert await ctxt.get_cached_last_summary_head_sha() is None await ctxt.set_summary_check( check_api.Result(check_api.Conclusion.SUCCESS, "foo", "bar") ) assert await ctxt.get_cached_last_summary_head_sha() == "old-sha-one" ctxt.clear_cached_last_summary_head_sha() assert await ctxt.get_cached_last_summary_head_sha() is None
async def test_cleanup_pending_actions_with_no_associated_rules( context_getter: conftest.ContextGetterFixture, ) -> None: ctxt = await context_getter(42) previous_conclusions = { "Rule: title contains cleanup (label)": check_api.Conclusion.CANCELLED, "Rule: ask to resolve conflict (comment)": check_api.Conclusion.NEUTRAL, "Rule: title contains Helm (label)": check_api.Conclusion.CANCELLED, "Rule: remove outdated approvals (queue)": check_api.Conclusion.PENDING, "Rule: title contains build (label)": check_api.Conclusion.CANCELLED, "Rule: automatic merge (queue)": check_api.Conclusion.NEUTRAL, "Rule: title contains rebase (label)": check_api.Conclusion.CANCELLED, "Rule: title contains CI, testing or e2e (label)": check_api.Conclusion.CANCELLED, "Rule: automatic merge (delete_head_branch)": check_api.Conclusion.NEUTRAL, "Rule: title contains DNM (label)": check_api.Conclusion.CANCELLED, "Rule: title contains CephFS (label)": check_api.Conclusion.CANCELLED, "Rule: title contains doc (label)": check_api.Conclusion.SUCCESS, "Rule: automatic merge PR having ready-to-merge label (delete_head_branch)": check_api.Conclusion.NEUTRAL, "Rule: automatic merge PR having ready-to-merge label (dismiss_reviews)": check_api.Conclusion.CANCELLED, "Rule: title indicates a bug fix (label)": check_api.Conclusion.CANCELLED, "Rule: backport patches to release-v3.4 branch (backport)": check_api.Conclusion.NEUTRAL, "Rule: title contains RBD (label)": check_api.Conclusion.CANCELLED, "Rule: ask to resolve conflict (queue)": check_api.Conclusion.NEUTRAL, "Rule: automatic merge PR having ready-to-merge label (queue)": check_api.Conclusion.NEUTRAL, "Rule: title contains Mergify (label)": check_api.Conclusion.CANCELLED, "Rule: automatic merge (dismiss_reviews)": check_api.Conclusion.CANCELLED, "Rule: remove outdated approvals (dismiss_reviews)": check_api.Conclusion.SUCCESS, } current_conclusions = { "Rule: title indicates a bug fix (label)": check_api.Conclusion.CANCELLED, "Rule: title contains doc (label)": check_api.Conclusion.SUCCESS, "Rule: backport patches to release-v3.4 branch (backport)": check_api.Conclusion.NEUTRAL, "Rule: title contains rebase (label)": check_api.Conclusion.CANCELLED, "Rule: title contains build (label)": check_api.Conclusion.CANCELLED, "Rule: title contains RBD (label)": check_api.Conclusion.CANCELLED, "Rule: automatic merge (queue)": check_api.Conclusion.NEUTRAL, "Rule: title contains Mergify (label)": check_api.Conclusion.CANCELLED, "Rule: title contains cleanup (label)": check_api.Conclusion.CANCELLED, "Rule: automatic merge PR having ready-to-merge label (delete_head_branch)": check_api.Conclusion.NEUTRAL, "Rule: automatic merge PR having ready-to-merge label (queue)": check_api.Conclusion.NEUTRAL, "Rule: automatic merge PR having ready-to-merge label (dismiss_reviews)": check_api.Conclusion.CANCELLED, "Rule: ask to resolve conflict (comment)": check_api.Conclusion.NEUTRAL, "Rule: automatic merge (dismiss_reviews)": check_api.Conclusion.CANCELLED, "Rule: title contains CephFS (label)": check_api.Conclusion.CANCELLED, "Rule: automatic merge (delete_head_branch)": check_api.Conclusion.NEUTRAL, "Rule: title contains DNM (label)": check_api.Conclusion.CANCELLED, "Rule: title contains CI, testing or e2e (label)": check_api.Conclusion.CANCELLED, "Rule: remove outdated approvals (dismiss_reviews)": check_api.Conclusion.SUCCESS, "Rule: title contains Helm (label)": check_api.Conclusion.CANCELLED, } checks = [ github_types.CachedGitHubCheckRun( name=check, id=1, app_id=config.INTEGRATION_ID, app_name="mergify", app_avatar_url="", external_id="", head_sha=github_types.SHAType("azertyio"), status="in_progress", output={ "title": "", "summary": "", "text": None, "annotations_count": 0, "annotations": [], "annotations_url": "", }, conclusion=None, completed_at=github_types.ISODateTimeType(""), html_url="", ) for check, state in previous_conclusions.items() if state == check_api.Conclusion.PENDING ] ctxt._caches.pull_check_runs.set(checks) with ( mock.patch.object(merge_train.Train, "force_remove_pull") as force_remove_pull, mock.patch.object(check_api, "set_check_run") as set_check_run, ): await actions_runner.cleanup_pending_actions_with_no_associated_rules( ctxt, current_conclusions, previous_conclusions ) assert set_check_run.called assert force_remove_pull.called
async def test_get_commits_to_cherry_pick_merge( commits: mock.PropertyMock, redis_cache: utils.RedisCache, ) -> None: c1 = github_types.GitHubBranchCommit({ "sha": github_types.SHAType("c1f"), "parents": [], "commit": { "message": "foobar" }, }) c2 = github_types.GitHubBranchCommit({ "sha": github_types.SHAType("c2"), "parents": [c1], "commit": { "message": "foobar" }, }) async def fake_commits(): return [c1, c2] commits.return_value = fake_commits() client = mock.Mock() client.auth.get_access_token.return_value = "<token>" gh_owner = github_types.GitHubAccount({ "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }) gh_repo = github_types.GitHubRepository({ "full_name": "user/name", "name": github_types.GitHubRepositoryName("name"), "private": False, "id": github_types.GitHubRepositoryIdType(0), "owner": gh_owner, "archived": False, "url": "", "html_url": "", "default_branch": github_types.GitHubRefType("ref"), }) installation = context.Installation( github_types.GitHubAccountIdType(123), github_types.GitHubLogin("user"), subscription.Subscription(redis_cache, 0, False, "", frozenset()), client, redis_cache, ) repository = context.Repository(installation, gh_repo) ctxt = await context.Context.create( repository, { "number": github_types.GitHubPullRequestNumber(6), "commits": 1, "merged": True, "state": "closed", "html_url": "<html_url>", "id": github_types.GitHubPullRequestId(0), "maintainer_can_modify": True, "labels": [], "rebaseable": True, "draft": True, "merge_commit_sha": None, "title": "foobar", "changed_files": 1, "base": { "label": "user:ref", "sha": github_types.SHAType("sha"), "ref": github_types.GitHubRefType("ref"), "user": gh_owner, "repo": github_types.GitHubRepository( { "full_name": "user/ref", "name": github_types.GitHubRepositoryName("name"), "private": False, "id": github_types.GitHubRepositoryIdType(0), "owner": gh_owner, "archived": False, "url": "", "html_url": "", "default_branch": github_types.GitHubRefType("ref"), }), }, "head": { "label": "user:ref", "sha": github_types.SHAType("sha"), "user": gh_owner, "ref": github_types.GitHubRefType("fork"), "repo": github_types.GitHubRepository( { "full_name": "fork/other", "name": github_types.GitHubRepositoryName("name"), "private": False, "id": github_types.GitHubRepositoryIdType(0), "owner": gh_owner, "archived": False, "url": "", "html_url": "", "default_branch": github_types.GitHubRefType("ref"), }), }, "user": gh_owner, "merged_at": None, "merged_by": None, "mergeable_state": "clean", }, ) base_branch = github_types.GitHubBranchCommit({ "sha": github_types.SHAType("base_branch"), "parents": [], "commit": { "message": "foobar" }, }) merge_commit = github_types.GitHubBranchCommit({ "sha": github_types.SHAType("merge_commit"), "parents": [base_branch, c2], "commit": { "message": "foobar" }, }) assert await duplicate_pull._get_commits_to_cherrypick(ctxt, merge_commit) == [ c1, c2, ]
conditions: - author={config.BOT_USER_LOGIN} - head~=^mergify/(bp|copy)/ - closed actions: delete_head_branch: """ MERGIFY_BUILTIN_CONFIG = rules.UserConfigurationSchema( rules.YamlSchema(MERGIFY_BUILTIN_CONFIG_YAML)) DEFAULT_CONFIG_FILE = context.MergifyConfigFile( decoded_content=b"", type="file", content="<default>", sha=github_types.SHAType("<default>"), path="<default>", ) async def _check_configuration_changes( ctxt: context.Context, current_mergify_config_file: typing.Optional[context.MergifyConfigFile], ) -> bool: if ctxt.pull["base"]["repo"]["default_branch"] != ctxt.pull["base"]["ref"]: return False config_file_to_validate: typing.Optional[context.MergifyConfigFile] = None preferred_filename = (None if current_mergify_config_file is None else current_mergify_config_file["path"]) # NOTE(sileht): Just a shorcut to do two requests instead of three.
{ "title": "", "id": github_types.GitHubPullRequestId(0), "maintainer_can_modify": False, "rebaseable": False, "draft": False, "merge_commit_sha": None, "labels": [], "number": github_types.GitHubPullRequestNumber(6), "merged": True, "commits": 1, "state": "closed", "html_url": "<html_url>", "base": { "label": "", "sha": github_types.SHAType("sha"), "user": { "login": github_types.GitHubLogin("owner"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "ref": github_types.GitHubRefType("main"), "label": "", "repo": GH_REPO, }, "head": { "label": "", "sha": github_types.SHAType("old-sha-one"), "ref": github_types.GitHubRefType("fork"), "user": {
def a_pull_request() -> github_types.GitHubPullRequest: gh_owner = github_types.GitHubAccount( { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", } ) gh_repo = github_types.GitHubRepository( { "archived": False, "url": "", "html_url": "", "default_branch": github_types.GitHubRefType(""), "id": github_types.GitHubRepositoryIdType(456), "full_name": "user/repo", "name": github_types.GitHubRepositoryName("repo"), "private": False, "owner": gh_owner, } ) return github_types.GitHubPullRequest( { "node_id": "42", "locked": False, "assignees": [], "requested_reviewers": [], "requested_teams": [], "milestone": None, "title": "", "updated_at": github_types.ISODateTimeType("2021-06-01T18:41:39Z"), "created_at": github_types.ISODateTimeType("2021-06-01T18:41:39Z"), "closed_at": None, "id": github_types.GitHubPullRequestId(0), "maintainer_can_modify": False, "rebaseable": False, "draft": False, "merge_commit_sha": None, "labels": [], "number": github_types.GitHubPullRequestNumber(6), "commits": 1, "merged": True, "state": "closed", "changed_files": 1, "html_url": "<html_url>", "base": { "label": "", "sha": github_types.SHAType("sha"), "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "ref": github_types.GitHubRefType("ref"), "repo": gh_repo, }, "head": { "label": "", "sha": github_types.SHAType("old-sha-one"), "ref": github_types.GitHubRefType("fork"), "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "repo": { "archived": False, "url": "", "html_url": "", "default_branch": github_types.GitHubRefType(""), "id": github_types.GitHubRepositoryIdType(123), "full_name": "fork/other", "name": github_types.GitHubRepositoryName("other"), "private": False, "owner": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, }, }, "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "merged_by": None, "merged_at": None, "mergeable_state": "clean", "mergeable": True, "body": None, } )
async def test_get_commits_to_cherry_pick_rebase( commits: mock.PropertyMock, context_getter: conftest.ContextGetterFixture, ) -> None: c1 = github_types.CachedGitHubBranchCommit({ "sha": github_types.SHAType("c1f"), "parents": [], "commit_message": "foobar", "commit_verification_verified": False, }) c2 = github_types.CachedGitHubBranchCommit({ "sha": github_types.SHAType("c2"), "parents": [c1["sha"]], "commit_message": "foobar", "commit_verification_verified": False, }) commits.return_value = [c1, c2] client = mock.Mock() client.auth.get_access_token.return_value = "<token>" client.items.side_effect = fake_get_github_pulls_from_sha ctxt = await context_getter(github_types.GitHubPullRequestNumber(6)) ctxt.repository.installation.client = client base_branch = github_types.GitHubBranchCommitParent( {"sha": github_types.SHAType("base_branch")}) rebased_c1 = github_types.GitHubBranchCommit({ "sha": github_types.SHAType("rebased_c1"), "parents": [base_branch], "commit": { "message": "hello c1", "verification": { "verified": False } }, }) rebased_c2 = github_types.GitHubBranchCommit({ "sha": github_types.SHAType("rebased_c2"), "parents": [rebased_c1], "commit": { "message": "hello c2", "verification": { "verified": False } }, }) async def fake_get_github_commit_from_sha(url, api_version=None): if url.endswith("/commits/rebased_c1"): return rebased_c1 if url.endswith("/commits/rebased_c2"): return rebased_c2 raise RuntimeError(f"Unknown URL {url}") client.item.side_effect = fake_get_github_commit_from_sha assert await duplicate_pull._get_commits_to_cherrypick( ctxt, github_types.to_cached_github_branch_commit(rebased_c2)) == [ github_types.to_cached_github_branch_commit(rebased_c1), github_types.to_cached_github_branch_commit(rebased_c2), ]
import pytest from mergify_engine import config from mergify_engine import context from mergify_engine import github_types from mergify_engine import rules from mergify_engine.actions.backport import BackportAction from mergify_engine.actions.rebase import RebaseAction from mergify_engine.engine import commands_runner from mergify_engine.tests.unit import conftest EMPTY_CONFIG = rules.get_mergify_config( context.MergifyConfigFile( type="file", content="whatever", sha=github_types.SHAType("azertyuiop"), path="whatever", decoded_content="", )) def test_command_loader() -> None: with pytest.raises(commands_runner.CommandInvalid): commands_runner.load_command(EMPTY_CONFIG, "@mergifyio notexist foobar\n") with pytest.raises(commands_runner.CommandInvalid): commands_runner.load_command(EMPTY_CONFIG, "@mergifyio comment foobar\n") with pytest.raises(commands_runner.CommandInvalid):
async def test_get_mergify_config_location_from_cache( redis_cache: utils.RedisCache, ) -> None: client = mock.AsyncMock() client.auth.owner = "foo" client.item.side_effect = [ http.HTTPNotFound("Not Found", request=mock.Mock(), response=mock.Mock()), http.HTTPNotFound("Not Found", request=mock.Mock(), response=mock.Mock()), github_types.GitHubContentFile({ "content": encodebytes("whatever".encode()).decode(), "type": "file", "path": ".github/mergify.yml", "sha": github_types.SHAType("zeazeaze"), }), ] gh_owner = github_types.GitHubAccount({ "login": github_types.GitHubLogin("foobar"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }) gh_repo = github_types.GitHubRepository({ "full_name": "foobar/xyz", "name": github_types.GitHubRepositoryName("xyz"), "private": False, "id": github_types.GitHubRepositoryIdType(0), "owner": gh_owner, "archived": False, "url": "", "html_url": "", "default_branch": github_types.GitHubRefType("ref"), }) installation = context.Installation( github_types.GitHubAccountIdType(0), github_types.GitHubLogin("foobar"), subscription.Subscription(redis_cache, 0, False, "", frozenset()), client, redis_cache, ) repository = context.Repository(installation, gh_repo) await repository.get_mergify_config_file() assert client.item.call_count == 3 client.item.assert_has_calls([ mock.call("/repos/foobar/xyz/contents/.mergify.yml", params={}), mock.call("/repos/foobar/xyz/contents/.mergify/config.yml", params={}), mock.call("/repos/foobar/xyz/contents/.github/mergify.yml", params={}), ]) client.item.reset_mock() client.item.side_effect = [ github_types.GitHubContentFile({ "content": encodebytes("whatever".encode()).decode(), "type": "file", "path": ".github/mergify.yml", "sha": github_types.SHAType("zeazeaze"), }), ] repository._cache = context.RepositoryCache() await repository.get_mergify_config_file() assert client.item.call_count == 1 client.item.assert_has_calls([ mock.call("/repos/foobar/xyz/contents/.github/mergify.yml", params={}), ])
async def test_get_pull_request_rule(redis_cache: utils.RedisCache) -> None: client = mock.Mock() get_reviews = [{ "user": { "login": "******", "id": 12321, "type": "User" }, "state": "APPROVED", "author_association": "MEMBER", }] get_files = [{"filename": "README.rst"}, {"filename": "setup.py"}] get_team_members = [{ "login": "******", "id": 12321 }, { "login": "******", "id": 2644 }] get_checks: typing.List[github_types.GitHubCheckRun] = [] get_statuses: typing.List[github_types.GitHubStatus] = [{ "context": "continuous-integration/fake-ci", "state": "success", "description": "foobar", "target_url": "http://example.com", "avatar_url": "", }] async def client_item(url, *args, **kwargs): if url == "/repos/another-jd/name/collaborators/sileht/permission": return {"permission": "write"} elif url == "/repos/another-jd/name/collaborators/jd/permission": return {"permission": "write"} raise RuntimeError(f"not handled url {url}") client.item.side_effect = client_item async def client_items(url, *args, **kwargs): if url == "/repos/another-jd/name/pulls/1/reviews": for r in get_reviews: yield r elif url == "/repos/another-jd/name/pulls/1/files": for f in get_files: yield f elif url == "/repos/another-jd/name/commits/<sha>/check-runs": for c in get_checks: yield c elif url == "/repos/another-jd/name/commits/<sha>/status": for s in get_statuses: yield s elif url == "/orgs/another-jd/teams/my-reviewers/members": for tm in get_team_members: yield tm else: raise RuntimeError(f"not handled url {url}") client.items.side_effect = client_items installation = context.Installation( github_types.GitHubAccountIdType(2644), github_types.GitHubLogin("another-jd"), subscription.Subscription(redis_cache, 0, False, "", frozenset()), client, redis_cache, ) repository = context.Repository( installation, github_types.GitHubRepositoryName("name"), github_types.GitHubRepositoryIdType(123321), ) ctxt = await context.Context.create( repository, github_types.GitHubPullRequest({ "id": github_types.GitHubPullRequestId(0), "number": github_types.GitHubPullRequestNumber(1), "commits": 1, "html_url": "<html_url>", "merge_commit_sha": None, "maintainer_can_modify": True, "rebaseable": True, "state": "closed", "merged_by": None, "merged_at": None, "merged": False, "draft": False, "mergeable_state": "unstable", "labels": [], "changed_files": 1, "base": { "label": "repo", "ref": github_types.GitHubRefType("master"), "repo": { "id": github_types.GitHubRepositoryIdType(123321), "name": github_types.GitHubRepositoryName("name"), "full_name": "another-jd/name", "private": False, "archived": False, "url": "", "default_branch": github_types.GitHubRefType(""), "owner": { "login": github_types.GitHubLogin("another-jd"), "id": github_types.GitHubAccountIdType(2644), "type": "User", "avatar_url": "", }, }, "user": { "login": github_types.GitHubLogin("another-jd"), "id": github_types.GitHubAccountIdType(2644), "type": "User", "avatar_url": "", }, "sha": github_types.SHAType("mew"), }, "head": { "label": "foo", "ref": github_types.GitHubRefType("myfeature"), "sha": github_types.SHAType("<sha>"), "repo": { "id": github_types.GitHubRepositoryIdType(123321), "name": github_types.GitHubRepositoryName("head"), "full_name": "another-jd/head", "private": False, "archived": False, "url": "", "default_branch": github_types.GitHubRefType(""), "owner": { "login": github_types.GitHubLogin("another-jd"), "id": github_types.GitHubAccountIdType(2644), "type": "User", "avatar_url": "", }, }, "user": { "login": github_types.GitHubLogin("another-jd"), "id": github_types.GitHubAccountIdType(2644), "type": "User", "avatar_url": "", }, }, "title": "My awesome job", "user": { "login": github_types.GitHubLogin("another-jd"), "id": github_types.GitHubAccountIdType(2644), "type": "User", "avatar_url": "", }, }), ) # Empty conditions pull_request_rules = rules.PullRequestRules([ rules.Rule(name="default", conditions=rules.RuleConditions([]), actions={}) ]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["default"] assert [r.name for r in match.matching_rules] == ["default"] assert [ rules.EvaluatedRule.from_rule(r, rules.RuleMissingConditions([]), []) for r in match.rules ] == match.matching_rules for rule in match.rules: assert rule.actions == {} pull_request_rules = pull_request_rule_from_list([{ "name": "hello", "conditions": ["base:master"], "actions": {} }]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["hello"] assert [r.name for r in match.matching_rules] == ["hello"] assert [ rules.EvaluatedRule.from_rule(r, rules.RuleMissingConditions([]), []) for r in match.rules ] == match.matching_rules for rule in match.rules: assert rule.actions == {} pull_request_rules = pull_request_rule_from_list([ { "name": "hello", "conditions": ["base:master"], "actions": {} }, { "name": "backport", "conditions": ["base:master"], "actions": {} }, ]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["hello", "backport"] assert [r.name for r in match.matching_rules] == ["hello", "backport"] assert [ rules.EvaluatedRule.from_rule(r, rules.RuleMissingConditions([]), []) for r in match.rules ] == match.matching_rules for rule in match.rules: assert rule.actions == {} pull_request_rules = pull_request_rule_from_list([ { "name": "hello", "conditions": ["author:foobar"], "actions": {} }, { "name": "backport", "conditions": ["base:master"], "actions": {} }, ]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["hello", "backport"] assert [r.name for r in match.matching_rules] == ["backport"] for rule in match.rules: assert rule.actions == {} pull_request_rules = pull_request_rule_from_list([ { "name": "hello", "conditions": ["author:another-jd"], "actions": {} }, { "name": "backport", "conditions": ["base:master"], "actions": {} }, ]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["hello", "backport"] assert [r.name for r in match.matching_rules] == ["hello", "backport"] assert [ rules.EvaluatedRule.from_rule(r, rules.RuleMissingConditions([]), []) for r in match.rules ] == match.matching_rules for rule in match.rules: assert rule.actions == {} # No match pull_request_rules = pull_request_rule_from_list([{ "name": "merge", "conditions": [ "base=xyz", "check-success=continuous-integration/fake-ci", "#approved-reviews-by>=1", ], "actions": {}, }]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["merge"] assert [r.name for r in match.matching_rules] == [] pull_request_rules = pull_request_rule_from_list([{ "name": "merge", "conditions": [ "base=master", "check-success=continuous-integration/fake-ci", "#approved-reviews-by>=1", ], "actions": {}, }]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["merge"] assert [r.name for r in match.matching_rules] == ["merge"] assert [ rules.EvaluatedRule.from_rule(r, rules.RuleMissingConditions([]), []) for r in match.rules ] == match.matching_rules for rule in match.rules: assert rule.actions == {} pull_request_rules = pull_request_rule_from_list([ { "name": "merge", "conditions": [ "base=master", "check-success=continuous-integration/fake-ci", "#approved-reviews-by>=2", ], "actions": {}, }, { "name": "fast merge", "conditions": [ "base=master", "label=fast-track", "check-success=continuous-integration/fake-ci", "#approved-reviews-by>=1", ], "actions": {}, }, { "name": "fast merge with alternate ci", "conditions": [ "base=master", "label=fast-track", "check-success=continuous-integration/fake-ci-bis", "#approved-reviews-by>=1", ], "actions": {}, }, { "name": "fast merge from a bot", "conditions": [ "base=master", "author=mybot", "check-success=continuous-integration/fake-ci", ], "actions": {}, }, ]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == [ "merge", "fast merge", "fast merge with alternate ci", "fast merge from a bot", ] assert [r.name for r in match.matching_rules] == [ "merge", "fast merge", "fast merge with alternate ci", ] for rule in match.rules: assert rule.actions == {} assert match.matching_rules[0].name == "merge" assert len(match.matching_rules[0].missing_conditions) == 1 assert (str(match.matching_rules[0].missing_conditions[0]) == "#approved-reviews-by>=2") assert match.matching_rules[1].name == "fast merge" assert len(match.matching_rules[1].missing_conditions) == 1 assert str( match.matching_rules[1].missing_conditions[0]) == "label=fast-track" assert match.matching_rules[2].name == "fast merge with alternate ci" assert len(match.matching_rules[2].missing_conditions) == 2 assert str( match.matching_rules[2].missing_conditions[0]) == "label=fast-track" assert (str(match.matching_rules[2].missing_conditions[1]) == "check-success=continuous-integration/fake-ci-bis") # Team conditions with one review missing pull_request_rules = pull_request_rule_from_list([{ "name": "default", "conditions": [ "approved-reviews-by=@another-jd/my-reviewers", "#approved-reviews-by>=2", ], "actions": {}, }]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["default"] assert [r.name for r in match.matching_rules] == ["default"] assert match.matching_rules[0].name == "default" assert len(match.matching_rules[0].missing_conditions) == 1 assert (str(match.matching_rules[0].missing_conditions[0]) == "#approved-reviews-by>=2") get_reviews.append({ "user": { "login": "******", "id": 2644, "type": "User" }, "state": "APPROVED", "author_association": "MEMBER", }) del ctxt._cache["reviews"] del ctxt._cache["consolidated_reviews"] # Team conditions with no review missing pull_request_rules = pull_request_rule_from_list([{ "name": "default", "conditions": [ "approved-reviews-by=@another-jd/my-reviewers", "#approved-reviews-by>=2", ], "actions": {}, }]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["default"] assert [r.name for r in match.matching_rules] == ["default"] assert match.matching_rules[0].name == "default" assert len(match.matching_rules[0].missing_conditions) == 0 # Forbidden labels, when no label set pull_request_rules = pull_request_rule_from_list([{ "name": "default", "conditions": ["-label~=^(status/wip|status/blocked|review/need2)$"], "actions": {}, }]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["default"] assert [r.name for r in match.matching_rules] == ["default"] assert match.matching_rules[0].name == "default" assert len(match.matching_rules[0].missing_conditions) == 0 # Forbidden labels, when forbiden label set ctxt.pull["labels"] = [{ "id": 0, "color": "#1234", "default": False, "name": "status/wip" }] match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["default"] assert [r.name for r in match.matching_rules] == ["default"] assert match.matching_rules[0].name == "default" assert len(match.matching_rules[0].missing_conditions) == 1 assert str(match.matching_rules[0].missing_conditions[0]) == ( "-label~=^(status/wip|status/blocked|review/need2)$") # Forbidden labels, when other label set ctxt.pull["labels"] = [{ "id": 0, "color": "#1234", "default": False, "name": "allowed" }] match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["default"] assert [r.name for r in match.matching_rules] == ["default"] assert match.matching_rules[0].name == "default" assert len(match.matching_rules[0].missing_conditions) == 0 # Test team expander pull_request_rules = pull_request_rule_from_list([{ "name": "default", "conditions": ["author~=^(user1|user2|another-jd)$"], "actions": {}, }]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["default"] assert [r.name for r in match.matching_rules] == ["default"] assert match.matching_rules[0].name == "default" assert len(match.matching_rules[0].missing_conditions) == 0 # branch protection async def client_item_with_branch_protection_enabled(url, *args, **kwargs): if url == "/repos/another-jd/name/branches/master": return { "protection": { "enabled": True, "required_status_checks": { "contexts": ["awesome-ci"] }, }, } raise RuntimeError(f"not handled url {url}") client.item.side_effect = client_item_with_branch_protection_enabled pull_request_rules = pull_request_rule_from_list([{ "name": "default", "conditions": [], "actions": { "merge": {}, "comment": { "message": "yo" } }, }]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["default", "default"] assert list(match.matching_rules[0].actions.keys()) == ["merge"] assert [str(c) for c in match.matching_rules[0].conditions ] == ["check-success=awesome-ci"] assert [str(c) for c in match.matching_rules[0].missing_conditions ] == ["check-success=awesome-ci"] assert list(match.matching_rules[1].actions.keys()) == ["comment"] assert match.matching_rules[1].conditions == []
async def test_configuration_initial( github_server: httpserver.HTTPServer, redis_cache: utils.RedisCache, ) -> None: github_server.expect_oneshot_request( f"{BASE_URL}/pulls/1", ).respond_with_json( GH_PULL, status=200, ) github_server.expect_oneshot_request( f"{BASE_URL}/contents/.mergify.yml", ).respond_with_data(status=404) github_server.expect_oneshot_request( f"{BASE_URL}/contents/.mergify/config.yml", ).respond_with_data( status=404) github_server.expect_oneshot_request( f"{BASE_URL}/contents/.github/mergify.yml", ).respond_with_data( status=404) github_server.expect_oneshot_request( f"{BASE_URL}/contents/.mergify.yml", query_string={ "ref": GH_PULL["head"]["sha"] }, ).respond_with_json( github_types.GitHubContentFile({ "type": "file", "content": FAKE_MERGIFY_CONTENT, "path": ".mergify.yml", "sha": github_types.SHAType("739e5ec79e358bae7a150941a148b4131233ce2c"), }), status=200, ) github_server.expect_oneshot_request( f"{BASE_URL}/commits/{GH_PULL['head']['sha']}/check-runs" ).respond_with_json({"check_runs": []}, status=200) github_server.expect_oneshot_request(f"{BASE_URL}/check-runs", method="POST").respond_with_json( {}, status=200) async with github.AsyncGithubInstallationClient( github.get_auth(GH_OWNER["login"])) as client: installation = context.Installation( GH_OWNER["id"], GH_OWNER["login"], subscription.Subscription(redis_cache, 0, False, "", frozenset(), 0), client, redis_cache, ) repository = context.Repository(installation, GH_REPO) ctxt = await repository.get_pull_request_context( github_types.GitHubPullRequestNumber(1)) main_config_file = await repository.get_mergify_config_file() assert main_config_file is None changed = await engine._check_configuration_changes( ctxt, main_config_file) assert changed github_server.check_assertions()
async def test_summary_synchronization_cache( redis_cache: utils.RedisCache, ) -> None: gh_owner = github_types.GitHubAccount({ "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }) gh_repo = github_types.GitHubRepository({ "archived": False, "url": "", "default_branch": github_types.GitHubRefType(""), "id": github_types.GitHubRepositoryIdType(456), "full_name": "user/ref", "name": github_types.GitHubRepositoryName("name"), "private": False, "owner": gh_owner, }) async def items(*args, **kwargs): if False: yield return async def post_check(*args, **kwargs): return mock.Mock() client = mock.AsyncMock() client.auth.get_access_token.return_value = "<token>" client.items = items client.post.side_effect = post_check sub = subscription.Subscription(redis_cache, 0, False, "", frozenset()) installation = context.Installation( gh_owner["id"], gh_owner["login"], sub, client, redis_cache, ) repository = context.Repository(installation, gh_repo["name"], gh_repo["id"]) ctxt = await context.Context.create( repository, { "title": "", "id": github_types.GitHubPullRequestId(0), "maintainer_can_modify": False, "rebaseable": False, "draft": False, "merge_commit_sha": None, "labels": [], "number": github_types.GitHubPullRequestNumber(6), "commits": 1, "merged": True, "state": "closed", "changed_files": 1, "html_url": "<html_url>", "base": { "label": "", "sha": github_types.SHAType("sha"), "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "ref": github_types.GitHubRefType("ref"), "label": "", "repo": gh_repo, }, "head": { "label": "", "sha": github_types.SHAType("old-sha-one"), "ref": github_types.GitHubRefType("fork"), "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "repo": { "archived": False, "url": "", "default_branch": github_types.GitHubRefType(""), "id": github_types.GitHubRepositoryIdType(123), "full_name": "fork/other", "name": github_types.GitHubRepositoryName("other"), "private": False, "owner": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, }, }, "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "merged_by": None, "merged_at": None, "mergeable_state": "clean", }, ) assert await ctxt.get_cached_last_summary_head_sha() is None await ctxt.set_summary_check( check_api.Result(check_api.Conclusion.SUCCESS, "foo", "bar")) assert await ctxt.get_cached_last_summary_head_sha() == "old-sha-one" await ctxt.clear_cached_last_summary_head_sha() assert await ctxt.get_cached_last_summary_head_sha() is None
async def test_signals(redis_cache): gh_owner = github_types.GitHubAccount({ "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }) gh_repo = github_types.GitHubRepository({ "archived": False, "url": "", "default_branch": github_types.GitHubRefType(""), "id": github_types.GitHubRepositoryIdType(456), "full_name": "user/ref", "name": github_types.GitHubRepositoryName("name"), "private": False, "owner": gh_owner, }) client = mock.AsyncMock() client.auth.get_access_token.return_value = "<token>" sub = subscription.Subscription(redis_cache, 0, False, "", frozenset()) installation = context.Installation( gh_owner["id"], gh_owner["login"], sub, client, redis_cache, ) repository = context.Repository(installation, gh_repo["name"], gh_repo["id"]) ctxt = await context.Context.create( repository, { "title": "", "id": github_types.GitHubPullRequestId(0), "maintainer_can_modify": False, "rebaseable": False, "draft": False, "merge_commit_sha": None, "labels": [], "number": github_types.GitHubPullRequestNumber(6), "commits": 1, "merged": True, "state": "closed", "changed_files": 1, "html_url": "<html_url>", "base": { "label": "", "sha": github_types.SHAType("sha"), "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "ref": github_types.GitHubRefType("ref"), "label": "", "repo": gh_repo, }, "head": { "label": "", "sha": github_types.SHAType("old-sha-one"), "ref": github_types.GitHubRefType("fork"), "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "repo": { "archived": False, "url": "", "default_branch": github_types.GitHubRefType(""), "id": github_types.GitHubRepositoryIdType(123), "full_name": "fork/other", "name": github_types.GitHubRepositoryName("other"), "private": False, "owner": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, }, }, "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "merged_by": None, "merged_at": None, "mergeable_state": "clean", }, ) assert len(signals.SIGNALS) == 0 signals.setup() assert len(signals.SIGNALS) == 1 with mock.patch( "mergify_engine_signals.noop.Signal.__call__") as signal_method: await signals.send(ctxt, "action.update") signal_method.assert_called_once_with(ctxt, "action.update")
async def refresh_pull( owner: github_types.GitHubLogin, repo: str, pull_request_number: github_types.GitHubPullRequestNumber, action: github_types.GitHubEventRefreshActionType = "user", ) -> responses.Response: action = RefreshActionSchema(action) return await _refresh( owner, repo, action=action, pull_request=github_types.GitHubPullRequest({ "number": pull_request_number, "id": github_types.GitHubPullRequestId(github_types.GitHubIssueId(0)), "maintainer_can_modify": False, "base": { "label": "", "ref": github_types.GitHubRefType(""), "sha": github_types.SHAType(""), "repo": { "default_branch": github_types.GitHubRefType(""), "id": github_types.GitHubRepositoryIdType(0), "owner": { "id": github_types.GitHubAccountIdType(0), "login": github_types.GitHubLogin(""), "type": "User", }, "private": False, "name": "", "full_name": "", "archived": False, "url": "", }, "user": { "login": github_types.GitHubLogin(""), "id": github_types.GitHubAccountIdType(0), "type": "User", }, }, "head": { "label": "", "ref": github_types.GitHubRefType(""), "sha": github_types.SHAType(""), "repo": { "default_branch": github_types.GitHubRefType(""), "id": github_types.GitHubRepositoryIdType(0), "owner": { "id": github_types.GitHubAccountIdType(0), "login": github_types.GitHubLogin(""), "type": "User", }, "private": False, "name": "", "full_name": "", "archived": False, "url": "", }, "user": { "login": github_types.GitHubLogin(""), "id": github_types.GitHubAccountIdType(0), "type": "User", }, }, "state": "open", "user": { "id": github_types.GitHubAccountIdType(0), "login": github_types.GitHubLogin(""), "type": "User", }, "labels": [], "merged": False, "merged_by": None, "merged_at": None, "rebaseable": False, "draft": False, "merge_commit_sha": None, "mergeable_state": "unknown", "html_url": "", }), )
async def test_get_commits_to_cherry_pick_rebase( commits: mock.PropertyMock, redis_cache: utils.RedisCache, ) -> None: gh_owner = github_types.GitHubAccount({ "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }) gh_repo = github_types.GitHubRepository({ "full_name": "user/name", "name": github_types.GitHubRepositoryName("name"), "private": False, "id": github_types.GitHubRepositoryIdType(0), "owner": gh_owner, "archived": False, "url": "", "html_url": "", "default_branch": github_types.GitHubRefType("ref"), }) c1 = github_types.GitHubBranchCommit({ "sha": github_types.SHAType("c1f"), "parents": [], "commit": { "message": "foobar" }, }) c2 = github_types.GitHubBranchCommit({ "sha": github_types.SHAType("c2"), "parents": [c1], "commit": { "message": "foobar" }, }) commits.return_value = [c1, c2] client = mock.Mock() client.auth.get_access_token.return_value = "<token>" client.items.side_effect = fake_get_github_pulls_from_sha installation = context.Installation( github_types.GitHubAccountIdType(123), github_types.GitHubLogin("user"), subscription.Subscription(redis_cache, 0, False, "", frozenset()), client, redis_cache, ) repository = context.Repository(installation, gh_repo) ctxt = await context.Context.create( repository, { "labels": [], "draft": False, "merge_commit_sha": github_types.SHAType(""), "title": "", "commits": 1, "rebaseable": False, "maintainer_can_modify": False, "id": github_types.GitHubPullRequestId(0), "number": github_types.GitHubPullRequestNumber(6), "merged": True, "state": "closed", "html_url": "<html_url>", "changed_files": 1, "base": { "label": "", "sha": github_types.SHAType("sha"), "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "ref": github_types.GitHubRefType("ref"), "repo": { "full_name": "user/ref", "name": github_types.GitHubRepositoryName("name"), "private": False, "id": github_types.GitHubRepositoryIdType(0), "owner": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "archived": False, "url": "", "html_url": "", "default_branch": github_types.GitHubRefType(""), }, }, "head": { "label": "", "sha": github_types.SHAType("sha"), "ref": github_types.GitHubRefType("fork"), "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "repo": { "full_name": "fork/other", "name": github_types.GitHubRepositoryName("other"), "private": False, "archived": False, "url": "", "html_url": "", "default_branch": github_types.GitHubRefType(""), "id": github_types.GitHubRepositoryIdType(0), "owner": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, }, }, "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "merged_by": None, "merged_at": None, "mergeable_state": "clean", }, ) base_branch = github_types.GitHubBranchCommitParent( {"sha": github_types.SHAType("base_branch")}) rebased_c1 = github_types.GitHubBranchCommit({ "sha": github_types.SHAType("rebased_c1"), "parents": [base_branch], "commit": { "message": "hello c1" }, }) rebased_c2 = github_types.GitHubBranchCommit({ "sha": github_types.SHAType("rebased_c2"), "parents": [rebased_c1], "commit": { "message": "hello c2" }, }) async def fake_get_github_commit_from_sha(url, api_version=None): if url.endswith("/commits/rebased_c1"): return rebased_c1 if url.endswith("/commits/rebased_c2"): return rebased_c2 raise RuntimeError(f"Unknown URL {url}") client.item.side_effect = fake_get_github_commit_from_sha assert await duplicate_pull._get_commits_to_cherrypick(ctxt, rebased_c2) == [ rebased_c1, rebased_c2, ]
async def build_fake_context( number: github_types.GitHubPullRequestNumber, *, repository: context.Repository, **kwargs: typing.Dict[str, typing.Any], ) -> context.Context: pull_request_author = github_types.GitHubAccount( { "id": github_types.GitHubAccountIdType(123), "type": "User", "login": github_types.GitHubLogin("contributor"), "avatar_url": "", } ) pull: github_types.GitHubPullRequest = { "node_id": "42", "locked": False, "assignees": [], "requested_reviewers": [ { "id": github_types.GitHubAccountIdType(123), "type": "User", "login": github_types.GitHubLogin("jd"), "avatar_url": "", }, { "id": github_types.GitHubAccountIdType(456), "type": "User", "login": github_types.GitHubLogin("sileht"), "avatar_url": "", }, ], "requested_teams": [ {"slug": github_types.GitHubTeamSlug("foobar")}, {"slug": github_types.GitHubTeamSlug("foobaz")}, ], "milestone": None, "title": "awesome", "body": "", "created_at": github_types.ISODateTimeType("2021-06-01T18:41:39Z"), "closed_at": None, "updated_at": github_types.ISODateTimeType("2021-06-01T18:41:39Z"), "id": github_types.GitHubPullRequestId(123), "maintainer_can_modify": True, "user": pull_request_author, "labels": [], "rebaseable": True, "draft": False, "merge_commit_sha": None, "number": number, "commits": 1, "mergeable_state": "clean", "mergeable": True, "state": "open", "changed_files": 1, "head": { "sha": github_types.SHAType("the-head-sha"), "label": f"{pull_request_author['login']}:feature-branch", "ref": github_types.GitHubRefType("feature-branch"), "repo": { "id": github_types.GitHubRepositoryIdType(123), "default_branch": github_types.GitHubRefType("main"), "name": github_types.GitHubRepositoryName("mergify-engine"), "full_name": "contributor/mergify-engine", "archived": False, "private": False, "owner": pull_request_author, "url": "https://api.github.com/repos/contributor/mergify-engine", "html_url": "https://github.com/contributor/mergify-engine", }, "user": pull_request_author, }, "merged": False, "merged_by": None, "merged_at": None, "html_url": "https://...", "base": { "label": "mergify_engine:main", "ref": github_types.GitHubRefType("main"), "repo": repository.repo, "sha": github_types.SHAType("the-base-sha"), "user": repository.repo["owner"], }, } pull.update(kwargs) # type: ignore return await context.Context.create(repository, pull)