async def test_train_add_remove_pull_idempotant( repository: context.Repository, context_getter: conftest.ContextGetterFixture ) -> None: t = merge_train.Train(repository, github_types.GitHubRefType("main")) await t.load() config = get_config("5x1", priority=0) await t.add_pull(await context_getter(1), config) await t.add_pull(await context_getter(2), config) await t.add_pull(await context_getter(3), config) await t.refresh() assert [[1], [1, 2], [1, 2, 3]] == get_cars_content(t) config = get_config("5x1", priority=10) await t.add_pull(await context_getter(1), config) await t.refresh() assert [[1], [1, 2], [1, 2, 3]] == get_cars_content(t) t = merge_train.Train(repository, github_types.GitHubRefType("main")) await t.load() assert [[1], [1, 2], [1, 2, 3]] == get_cars_content(t) await t.remove_pull(await context_getter(2)) await t.refresh() assert [[1], [1, 3]] == get_cars_content(t) await t.remove_pull(await context_getter(2)) await t.refresh() assert [[1], [1, 3]] == get_cars_content(t) t = merge_train.Train(repository, github_types.GitHubRefType("main")) await t.load() assert [[1], [1, 3]] == get_cars_content(t)
async def test_train_mutiple_queue( repository: context.Repository, context_getter: conftest.ContextGetterFixture ) -> None: t = merge_train.Train(repository, github_types.GitHubRefType("main")) await t.load() config_two = get_config("2x1", priority=0) config_five = get_config("5x1", priority=0) await t.add_pull(await context_getter(1), config_two) await t.add_pull(await context_getter(2), config_two) await t.add_pull(await context_getter(3), config_five) await t.add_pull(await context_getter(4), config_five) await t.refresh() assert [[1], [1, 2]] == get_cars_content(t) assert [3, 4] == get_waiting_content(t) # Ensure we don't got over the train_size await t.add_pull(await context_getter(5), config_two) await t.refresh() assert [[1], [1, 2]] == get_cars_content(t) assert [5, 3, 4] == get_waiting_content(t) await t.add_pull(await context_getter(6), config_five) await t.add_pull(await context_getter(7), config_five) await t.add_pull(await context_getter(8), config_five) await t.add_pull(await context_getter(9), config_five) await t.refresh() assert [[1], [1, 2]] == get_cars_content(t) assert [5, 3, 4, 6, 7, 8, 9] == get_waiting_content(t) t = merge_train.Train(repository, github_types.GitHubRefType("main")) await t.load() assert [[1], [1, 2]] == get_cars_content(t) assert [5, 3, 4, 6, 7, 8, 9] == get_waiting_content(t) await t.remove_pull(await context_getter(2)) await t.refresh() assert [[1], [1, 5]] == get_cars_content( t ), f"{get_cars_content(t)} {get_waiting_content(t)}" assert [3, 4, 6, 7, 8, 9] == get_waiting_content(t) await t.remove_pull(await context_getter(1)) await t.remove_pull(await context_getter(5)) await t.refresh() assert [[3], [3, 4], [3, 4, 6], [3, 4, 6, 7], [3, 4, 6, 7, 8]] == get_cars_content( t ) assert [9] == get_waiting_content(t) t = merge_train.Train(repository, github_types.GitHubRefType("main")) await t.load() assert [[3], [3, 4], [3, 4, 6], [3, 4, 6, 7], [3, 4, 6, 7, 8]] == get_cars_content( t ) assert [9] == get_waiting_content(t)
def make_pr( repo: github_types.GitHubRepository, owner: github_types.GitHubAccount ) -> github_types.GitHubPullRequest: return github_types.GitHubPullRequest({ "id": github_types.GitHubPullRequestId(github_types.GitHubIssueId(0)), "maintainer_can_modify": False, "head": { "user": owner, "label": "", "ref": github_types.GitHubRefType(""), "sha": github_types.SHAType(""), "repo": repo, }, "user": owner, "number": github_types.GitHubPullRequestNumber( github_types.GitHubIssueNumber(0)), "rebaseable": False, "draft": False, "merge_commit_sha": None, "html_url": "", "state": "closed", "mergeable_state": "unknown", "merged_by": None, "merged": False, "merged_at": None, "labels": [], "base": { "ref": github_types.GitHubRefType("main"), "sha": github_types.SHAType(""), "label": "", "repo": repo, "user": owner, }, })
async def test_train_disallow_checks_interruption_scenario_2( repository: context.Repository, context_getter: conftest.ContextGetterFixture ) -> None: t = merge_train.Train(repository, github_types.GitHubRefType("main")) await t.load() urgent = get_config("urgent-1x4") fastlane = get_config("fastlane-1x8-noint") regular = get_config("regular-1x8-noint-from-fastlane-and-regular") await t.add_pull(await context_getter(1), regular) await t.add_pull(await context_getter(2), regular) await t.refresh() assert [[1, 2]] == get_cars_content(t) assert [] == get_waiting_content(t) # fastlane doesn't interrupt the checks as # disallow_checks_interruption_from_queues of regular disallow it await t.add_pull(await context_getter(3), fastlane) await t.refresh() assert [[1, 2]] == get_cars_content(t) assert [3] == get_waiting_content(t) # fastlane doesn't interrupt the checks because of noint, but goes before # regular await t.add_pull(await context_getter(4), regular) await t.refresh() assert [[1, 2]] == get_cars_content(t) assert [3, 4] == get_waiting_content(t) # urgent breaks everything, then we put the fastlane one, and all regulars goes behind await t.add_pull(await context_getter(5), urgent) await t.refresh() assert [[5]] == get_cars_content(t) assert [3, 1, 2, 4] == get_waiting_content(t)
async def test_train_interrupt_mixed_across_queue( repository: context.Repository, context_getter: conftest.ContextGetterFixture ) -> None: t = merge_train.Train(repository, github_types.GitHubRefType("main")) await t.load() config = get_config("low-1x5-noint") await t.add_pull(await context_getter(1), config) await t.refresh() assert [[1]] == get_cars_content(t) assert [] == get_waiting_content(t) await t.add_pull(await context_getter(2), config) await t.refresh() assert [[1]] == get_cars_content(t) assert [2] == get_waiting_content(t) await t.add_pull(await context_getter(3), config) await t.refresh() assert [[1]] == get_cars_content(t) assert [2, 3] == get_waiting_content(t) # Inserting pr in high queue always break started speculative checks await t.add_pull(await context_getter(4), get_config("high-1x2")) await t.refresh() assert [[4]] == get_cars_content(t) assert [1, 2, 3] == get_waiting_content(t)
async def test_train_queue_config_deleted( report_failure: mock.Mock, repository: context.Repository, context_getter: conftest.ContextGetterFixture, ) -> None: t = merge_train.Train(repository, github_types.GitHubRefType("main")) await t.load() await t.add_pull(await context_getter(1), get_config("2x1", 1000)) await t.add_pull(await context_getter(2), get_config("2x1", 1000)) await t.add_pull(await context_getter(3), get_config("5x1", 1000)) await t.refresh() assert [[1], [1, 2]] == get_cars_content(t) assert [3] == get_waiting_content(t) with mock.patch.object( sys.modules[__name__], "MERGIFY_CONFIG", """ queue_rules: - name: five conditions: [] speculative_checks: 5 """, ): repository._caches.mergify_config.delete() repository._caches.mergify_config_file.delete() await t.refresh() assert [] == get_cars_content(t) assert [1, 2, 3] == get_waiting_content(t) assert len(report_failure.mock_calls) == 1
async def test_train_priority_change( repository: context.Repository, context_getter: conftest.ContextGetterFixture, ) -> None: t = merge_train.Train(repository, github_types.GitHubRefType("main")) await t.load() await t.add_pull(await context_getter(1), get_config("2x1", 1000)) await t.add_pull(await context_getter(2), get_config("2x1", 1000)) await t.add_pull(await context_getter(3), get_config("2x1", 1000)) await t.refresh() assert [[1], [1, 2]] == get_cars_content(t) assert [3] == get_waiting_content(t) assert ( t._cars[0].still_queued_embarked_pulls[0].config["effective_priority"] == QUEUE_RULES["2x1"].config["priority"] * queue.QUEUE_PRIORITY_OFFSET + 1000 ) # NOTE(sileht): pull request got requeued with new configuration that don't # update the position but update the prio await t.add_pull(await context_getter(1), get_config("2x1", 2000)) await t.refresh() assert [[1], [1, 2]] == get_cars_content(t) assert [3] == get_waiting_content(t) assert ( t._cars[0].still_queued_embarked_pulls[0].config["effective_priority"] == QUEUE_RULES["2x1"].config["priority"] * queue.QUEUE_PRIORITY_OFFSET + 2000 )
def test_train_batch_split(repository: context.Repository) -> None: now = datetime.datetime.utcnow() t = merge_train.Train(repository, github_types.GitHubRefType("main")) p1_two = merge_train.EmbarkedPull( t, github_types.GitHubPullRequestNumber(1), get_config("2x1"), now ) p2_two = merge_train.EmbarkedPull( t, github_types.GitHubPullRequestNumber(2), get_config("2x1"), now ) p3_two = merge_train.EmbarkedPull( t, github_types.GitHubPullRequestNumber(3), get_config("2x1"), now ) p4_five = merge_train.EmbarkedPull( t, github_types.GitHubPullRequestNumber(4), get_config("5x1"), now ) assert ([p1_two], [p2_two, p3_two, p4_five]) == t._get_next_batch( [p1_two, p2_two, p3_two, p4_five], "2x1", 1 ) assert ([p1_two, p2_two], [p3_two, p4_five]) == t._get_next_batch( [p1_two, p2_two, p3_two, p4_five], "2x1", 2 ) assert ([p1_two, p2_two, p3_two], [p4_five]) == t._get_next_batch( [p1_two, p2_two, p3_two, p4_five], "2x1", 10 ) assert ([], [p1_two, p2_two, p3_two, p4_five]) == t._get_next_batch( [p1_two, p2_two, p3_two, p4_five], "5x1", 10 )
async def refresh_branch( owner: github_types.GitHubLogin, repo_name: github_types.GitHubRepositoryName, branch: str, redis_cache: utils.RedisCache = fastapi.Depends( # noqa: B008 redis.get_redis_cache ), redis_stream: utils.RedisStream = fastapi.Depends( # noqa: B008 redis.get_redis_stream ), ) -> responses.Response: async with github.aget_client(owner_name=owner) as client: try: repository = await client.item(f"/repos/{owner}/{repo_name}") except http.HTTPNotFound: return responses.JSONResponse( status_code=404, content="repository not found" ) await github_events.send_refresh( redis_cache, redis_stream, repository, ref=github_types.GitHubRefType(f"refs/heads/{branch}"), ) return responses.Response("Refresh queued", status_code=202)
async def test_merge_commit_message( body: str, title: str, message: str, template: typing.Optional[str], context_getter: conftest.ContextGetterFixture, ) -> None: ctxt = await context_getter(github_types.GitHubPullRequestNumber(43), body=body, title="My PR title") ctxt.repository._caches.branch_protections[github_types.GitHubRefType( "main")] = None ctxt._caches.pull_statuses.set([ github_types.GitHubStatus({ "target_url": "http://example.com", "context": "my CI", "state": "success", "description": "foobar", "avatar_url": "", }) ]) ctxt._caches.pull_check_runs.set([]) assert await ctxt.pull_request.get_commit_message(template=template) == ( title, message, )
async def refresh_branch( owner_login: github_types.GitHubLogin, repo_name: github_types.GitHubRepositoryName, branch: str, redis_cache: utils.RedisCache = fastapi.Depends( # noqa: B008 redis.get_redis_cache), redis_stream: utils.RedisStream = fastapi.Depends( # noqa: B008 redis.get_redis_stream), ) -> responses.Response: installation_json = await github.get_installation_from_login(owner_login) async with github.aget_client(installation_json) as client: try: repository = await client.item(f"/repos/{owner_login}/{repo_name}") except http.HTTPNotFound: return responses.JSONResponse(status_code=404, content="repository not found") await utils.send_branch_refresh( redis_cache, redis_stream, repository, action="user", source="API", ref=github_types.GitHubRefType(f"refs/heads/{branch}"), ) return responses.Response("Refresh queued", status_code=202)
async def test_train_no_interrupt_add_pull( repository: context.Repository, context_getter: conftest.ContextGetterFixture ) -> None: t = merge_train.Train(repository, github_types.GitHubRefType("main")) await t.load() config = get_config("high-2x5-noint") await t.add_pull(await context_getter(1), config) await t.refresh() assert [[1]] == get_cars_content(t) assert [] == get_waiting_content(t) await t.add_pull(await context_getter(2), config) await t.refresh() assert [[1], [1, 2]] == get_cars_content(t) assert [] == get_waiting_content(t) await t.add_pull(await context_getter(3), config) await t.refresh() assert [[1], [1, 2]] == get_cars_content(t) assert [3] == get_waiting_content(t) # Inserting high prio didn't break started speculative checks, but the PR # move above other await t.add_pull(await context_getter(4), get_config("high-2x5-noint", 20000)) await t.refresh() assert [[1], [1, 2]] == get_cars_content(t) assert [4, 3] == get_waiting_content(t)
async def test_train_remove_duplicates( repository: context.Repository, context_getter: conftest.ContextGetterFixture ) -> None: t = merge_train.Train(repository, github_types.GitHubRefType("main")) await t.load() await t.add_pull(await context_getter(1), get_config("2x1", 1000)) await t.add_pull(await context_getter(2), get_config("2x1", 1000)) await t.add_pull(await context_getter(3), get_config("2x1", 1000)) await t.add_pull(await context_getter(4), get_config("2x1", 1000)) await t.refresh() assert [[1], [1, 2]] == get_cars_content(t) assert [3, 4] == get_waiting_content(t) # Insert bugs in queue t._waiting_pulls.extend( [ merge_train.EmbarkedPull( t, t._cars[0].still_queued_embarked_pulls[0].user_pull_request_number, t._cars[0].still_queued_embarked_pulls[0].config, t._cars[0].still_queued_embarked_pulls[0].queued_at, ), t._waiting_pulls[0], ] ) t._cars = t._cars + t._cars assert [[1], [1, 2], [1], [1, 2]] == get_cars_content(t) assert [1, 3, 3, 4] == get_waiting_content(t) # Everything should be back to normal await t.refresh() assert [[1], [1, 2]] == get_cars_content(t) assert [3, 4] == get_waiting_content(t)
async def test_get_already_merged_summary( merged_by: str, raw_config: str, result: str, context_getter: conftest.ContextGetterFixture, ) -> None: ctxt = await context_getter( github_types.GitHubPullRequestNumber(1), merged=True, merged_by=github_types.GitHubAccount({ "id": github_types.GitHubAccountIdType(1), "login": github_types.GitHubLogin(merged_by), "type": "User", "avatar_url": "", }), ) ctxt.repository._caches.branch_protections[github_types.GitHubRefType( "main")] = None file = context.MergifyConfigFile( type="file", content="whatever", sha=github_types.SHAType("azertyuiop"), path="whatever", decoded_content=raw_config, ) config = rules.get_mergify_config(file) match = await config["pull_request_rules"].get_pull_request_rule(ctxt) assert result == await actions_runner.get_already_merged_summary( ctxt, match)
async def iter_trains( cls, installation: context.Installation ) -> typing.AsyncIterator["Train"]: for train_name in await installation.redis.keys( cls.get_redis_key_for(installation.owner_id, "*", "*")): train_name_split = train_name.split("~") repo_id = github_types.GitHubRepositoryIdType( int(train_name_split[2])) ref = github_types.GitHubRefType(train_name_split[3]) repository = await installation.get_repository_by_id(repo_id) yield cls(repository, ref)
async def _refresh( owner: github_types.GitHubLogin, repo: str, action: github_types.GitHubEventRefreshActionType = "user", ref: typing.Optional[github_types.GitHubRefType] = None, pull_request: typing.Optional[github_types.GitHubPullRequest] = None, ) -> responses.Response: data = github_types.GitHubEventRefresh({ "action": action, "organization": { "login": owner, "id": github_types.GitHubAccountIdType(0), "type": "Organization", }, "installation": { "id": 0, "account": { "login": owner, "id": github_types.GitHubAccountIdType(0), "type": "Organization", }, }, "repository": { "default_branch": github_types.GitHubRefType(""), "id": github_types.GitHubRepositoryIdType(0), "private": False, "archived": False, "url": "", "name": repo, "owner": { "login": owner, "id": github_types.GitHubAccountIdType(0), "type": "Organization", }, "full_name": f"{owner}/{repo}", }, "sender": { "login": github_types.GitHubLogin("<internal>"), "id": github_types.GitHubAccountIdType(0), "type": "User", }, "ref": ref, "pull_request": pull_request, }) await github_events.filter_and_dispatch(_AREDIS_STREAM, "refresh", str(uuid.uuid4()), data) return responses.Response("Refresh queued", status_code=202)
def fake_repository( redis_links: redis_utils.RedisLinks, fake_subscription: subscription.Subscription, ) -> context.Repository: gh_owner = github_types.GitHubAccount({ "login": github_types.GitHubLogin("Mergifyio"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }) gh_repo = github_types.GitHubRepository({ "full_name": "Mergifyio/mergify-engine", "name": github_types.GitHubRepositoryName("mergify-engine"), "private": False, "id": github_types.GitHubRepositoryIdType(0), "owner": gh_owner, "archived": False, "url": "", "html_url": "", "default_branch": github_types.GitHubRefType("main"), }) installation_json = github_types.GitHubInstallation({ "id": github_types.GitHubInstallationIdType(12345), "target_type": gh_owner["type"], "permissions": {}, "account": gh_owner, }) fake_client = mock.Mock() installation = context.Installation(installation_json, fake_subscription, fake_client, redis_links) return context.Repository(installation, gh_repo)
async def test_train_remove_middle_not_merged( repository: context.Repository, context_getter: conftest.ContextGetterFixture ) -> None: t = merge_train.Train(repository, github_types.GitHubRefType("main")) await t.load() await t.add_pull(await context_getter(1), get_config("5x1", 1000)) await t.add_pull(await context_getter(3), get_config("5x1", 100)) await t.add_pull(await context_getter(2), get_config("5x1", 1000)) await t.refresh() assert [[1], [1, 2], [1, 2, 3]] == get_cars_content(t) await t.remove_pull(await context_getter(2)) await t.refresh() assert [[1], [1, 3]] == get_cars_content(t)
async def test_train_queue_pr_with_higher_prio_enters_in_queue_during_merging_2x5( report_failure: mock.Mock, repository: context.Repository, context_getter: conftest.ContextGetterFixture, fake_client: mock.Mock, ) -> None: t = merge_train.Train(repository, github_types.GitHubRefType("main")) await t.load() for i in range(41, 52): await t.add_pull(await context_getter(i), get_config("2x5", 1000)) await t.refresh() assert [ [41, 42, 43, 44, 45], [41, 42, 43, 44, 45, 46, 47, 48, 49, 50], ] == get_cars_content(t) assert [51] == get_waiting_content(t) t._cars[0].checks_conclusion = check_api.Conclusion.SUCCESS await t.save() await t.refresh() assert [ [41, 42, 43, 44, 45], [41, 42, 43, 44, 45, 46, 47, 48, 49, 50], ] == get_cars_content(t) assert [51] == get_waiting_content(t) # merge half of the batch for i in range(41, 44): fake_client.update_base_sha(f"sha{i}") await t.remove_pull( await context_getter(i, merged=True, merge_commit_sha=f"sha{i}") ) await t.refresh() assert [ [44, 45], [41, 42, 43, 44, 45, 46, 47, 48, 49, 50], ] == get_cars_content(t) assert [51] == get_waiting_content(t) await t.add_pull(await context_getter(7), get_config("2x5", 2000)) await t.refresh() assert [[44, 45], [44, 45, 7, 46, 47, 48, 49]] == get_cars_content(t) assert [50, 51] == get_waiting_content(t)
async def test_train_queue_splitted_on_failure_1x2( report_failure: mock.Mock, repository: context.Repository, fake_client: mock.Mock, context_getter: conftest.ContextGetterFixture, ) -> None: t = merge_train.Train(repository, github_types.GitHubRefType("main")) await t.load() for i in range(41, 43): await t.add_pull(await context_getter(i), get_config("high-1x2", 1000)) for i in range(6, 20): await t.add_pull(await context_getter(i), get_config("high-1x2", 1000)) await t.refresh() assert [[41, 42]] == get_cars_content(t) assert list(range(6, 20)) == get_waiting_content(t) t._cars[0].checks_conclusion = check_api.Conclusion.FAILURE await t.save() assert [[41, 42]] == get_cars_content(t) assert list(range(6, 20)) == get_waiting_content(t) await t.load() await t.refresh() assert [ [41], [41, 42], ] == get_cars_content(t) assert list(range(6, 20)) == get_waiting_content(t) assert len(t._cars[0].failure_history) == 1 assert len(t._cars[1].failure_history) == 0 assert t._cars[0].creation_state == "updated" assert t._cars[1].creation_state == "created" # mark [41] as failed t._cars[1].checks_conclusion = check_api.Conclusion.FAILURE await t.save() await t.remove_pull(await context_getter(41, merged=False)) # It's 41 fault, we restart the train on 42 await t.refresh() assert [[42, 6]] == get_cars_content(t) assert list(range(7, 20)) == get_waiting_content(t) assert len(t._cars[0].failure_history) == 0 assert t._cars[0].creation_state == "created" # type: ignore[comparison-overlap]
async def test_train_remove_last_cars( repository: context.Repository, context_getter: conftest.ContextGetterFixture ) -> None: t = merge_train.Train(repository, github_types.GitHubRefType("main")) await t.load() await t.add_pull(await context_getter(1), get_config("high-1x1", 1000)) await t.add_pull(await context_getter(2), get_config("high-1x1", 1000)) await t.add_pull(await context_getter(3), get_config("high-1x1", 1000)) await t.refresh() assert [[1]] == get_cars_content(t) assert [2, 3] == get_waiting_content(t) await t.remove_pull(await context_getter(1)) await t.refresh() assert [[2]] == get_cars_content(t) assert [3] == get_waiting_content(t)
async def test_train_remove_head_merged( repository: context.Repository, context_getter: conftest.ContextGetterFixture ) -> None: t = merge_train.Train(repository, github_types.GitHubRefType("main")) await t.load() config = get_config("5x1") await t.add_pull(await context_getter(1), config) await t.add_pull(await context_getter(2), config) await t.add_pull(await context_getter(3), config) await t.refresh() assert [[1], [1, 2], [1, 2, 3]] == get_cars_content(t) await t.remove_pull( await context_getter(1, merged=True, merge_commit_sha="new_sha1") ) await t.refresh() assert [[1, 2], [1, 2, 3]] == get_cars_content(t)
def fake_repository( redis_cache: utils.RedisCache, fake_subscription: subscription.Subscription, ) -> context.Repository: gh_owner = github_types.GitHubAccount( { "login": github_types.GitHubLogin("Mergifyio"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", } ) gh_repo = github_types.GitHubRepository( { "full_name": "Mergifyio/mergify-engine", "name": github_types.GitHubRepositoryName("mergify-engine"), "private": False, "id": github_types.GitHubRepositoryIdType(0), "owner": gh_owner, "archived": False, "url": "", "html_url": "", "default_branch": github_types.GitHubRefType("main"), } ) installation_json = github_types.GitHubInstallation( { "id": github_types.GitHubInstallationIdType(12345), "target_type": gh_owner["type"], "permissions": {}, "account": gh_owner, } ) fake_client = redis_queue = mock.Mock() # NOTE(Syffe): Since redis_queue is not used in fake_repository, we simply mock it, # otherwise a fixture is needed for it. This might change with future use of redis_queue. installation = context.Installation( installation_json, fake_subscription, fake_client, redis_cache, redis_queue ) return context.Repository(installation, gh_repo)
async def refresh_branch( owner: github_types.GitHubLogin, repo_name: github_types.GitHubRepositoryName, branch: str, ) -> responses.Response: async with github.aget_client(owner_name=owner) as client: try: repository = await client.item(f"/repos/{owner}/{repo_name}") except http.HTTPNotFound: return responses.JSONResponse(status_code=404, content="repository not found") global _AREDIS_STREAM, _AREDIS_CACHE await github_events.send_refresh( _AREDIS_CACHE, _AREDIS_STREAM, repository, ref=github_types.GitHubRefType(f"refs/heads/{branch}"), ) return responses.Response("Refresh queued", status_code=202)
async def test_train_with_speculative_checks_decreased( repository: context.Repository, context_getter: conftest.ContextGetterFixture ) -> None: t = merge_train.Train(repository, github_types.GitHubRefType("main")) await t.load() config = get_config("5x1", 1000) await t.add_pull(await context_getter(1), config) QUEUE_RULES["5x1"].config["speculative_checks"] = 2 await t.add_pull(await context_getter(2), config) await t.add_pull(await context_getter(3), config) await t.add_pull(await context_getter(4), config) await t.add_pull(await context_getter(5), config) await t.refresh() assert [[1], [1, 2], [1, 2, 3], [1, 2, 3, 4], [1, 2, 3, 4, 5]] == get_cars_content( t ) assert [] == get_waiting_content(t) await t.remove_pull( await context_getter(1, merged=True, merge_commit_sha="new_sha1") ) with mock.patch.object( sys.modules[__name__], "MERGIFY_CONFIG", """ queue_rules: - name: 5x1 conditions: [] speculative_checks: 2 """, ): repository._caches.mergify_config.delete() repository._caches.mergify_config_file.delete() await t.refresh() assert [[1, 2], [1, 2, 3]] == get_cars_content(t) assert [4, 5] == get_waiting_content(t)
def repository(redis_cache, fake_client): gh_owner = github_types.GitHubAccount({ "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }) gh_repo = github_types.GitHubRepository({ "full_name": "user/name", "name": github_types.GitHubRepositoryName("name"), "private": False, "id": github_types.GitHubRepositoryIdType(0), "owner": gh_owner, "archived": False, "url": "", "html_url": "", "default_branch": github_types.GitHubRefType("ref"), }) installation = context.Installation( github_types.GitHubAccountIdType(123), github_types.GitHubLogin("user"), subscription.Subscription(redis_cache, 0, False, "", frozenset()), fake_client, redis_cache, ) return context.Repository(installation, gh_repo)
async def test_train_batch_max_wait_time( repository: context.Repository, context_getter: conftest.ContextGetterFixture ) -> None: with freeze_time("2021-09-22T08:00:00") as freezed_time: t = merge_train.Train(repository, github_types.GitHubRefType("main")) await t.load() config = get_config("batch-wait-time") await t.add_pull(await context_getter(1), config) await t.refresh() assert [] == get_cars_content(t) assert [1] == get_waiting_content(t) # Enought PR to batch! await t.add_pull(await context_getter(2), config) await t.refresh() assert [[1, 2]] == get_cars_content(t) assert [] == get_waiting_content(t) await t.add_pull(await context_getter(3), config) await t.refresh() assert [[1, 2]] == get_cars_content(t) assert [3] == get_waiting_content(t) d = await delayed_refresh._get_current_refresh_datetime( repository, github_types.GitHubPullRequestNumber(3) ) assert d is not None assert d == freezed_time().replace( tzinfo=datetime.timezone.utc ) + datetime.timedelta(minutes=5) with freeze_time("2021-09-22T08:05:02"): await t.refresh() assert [[1, 2], [1, 2, 3]] == get_cars_content(t) assert [] == get_waiting_content(t)
async def test_get_commits_to_cherry_pick_rebase( commits: mock.PropertyMock, redis_cache: utils.RedisCache, ) -> None: gh_owner = github_types.GitHubAccount({ "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }) gh_repo = github_types.GitHubRepository({ "full_name": "user/name", "name": github_types.GitHubRepositoryName("name"), "private": False, "id": github_types.GitHubRepositoryIdType(0), "owner": gh_owner, "archived": False, "url": "", "html_url": "", "default_branch": github_types.GitHubRefType("ref"), }) c1 = github_types.GitHubBranchCommit({ "sha": github_types.SHAType("c1f"), "parents": [], "commit": { "message": "foobar" }, }) c2 = github_types.GitHubBranchCommit({ "sha": github_types.SHAType("c2"), "parents": [c1], "commit": { "message": "foobar" }, }) commits.return_value = [c1, c2] client = mock.Mock() client.auth.get_access_token.return_value = "<token>" client.items.side_effect = fake_get_github_pulls_from_sha installation = context.Installation( github_types.GitHubAccountIdType(123), github_types.GitHubLogin("user"), subscription.Subscription(redis_cache, 0, False, "", frozenset()), client, redis_cache, ) repository = context.Repository(installation, gh_repo) ctxt = await context.Context.create( repository, { "labels": [], "draft": False, "merge_commit_sha": github_types.SHAType(""), "title": "", "commits": 1, "rebaseable": False, "maintainer_can_modify": False, "id": github_types.GitHubPullRequestId(0), "number": github_types.GitHubPullRequestNumber(6), "merged": True, "state": "closed", "html_url": "<html_url>", "changed_files": 1, "base": { "label": "", "sha": github_types.SHAType("sha"), "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "ref": github_types.GitHubRefType("ref"), "repo": { "full_name": "user/ref", "name": github_types.GitHubRepositoryName("name"), "private": False, "id": github_types.GitHubRepositoryIdType(0), "owner": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "archived": False, "url": "", "html_url": "", "default_branch": github_types.GitHubRefType(""), }, }, "head": { "label": "", "sha": github_types.SHAType("sha"), "ref": github_types.GitHubRefType("fork"), "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "repo": { "full_name": "fork/other", "name": github_types.GitHubRepositoryName("other"), "private": False, "archived": False, "url": "", "html_url": "", "default_branch": github_types.GitHubRefType(""), "id": github_types.GitHubRepositoryIdType(0), "owner": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, }, }, "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "merged_by": None, "merged_at": None, "mergeable_state": "clean", }, ) base_branch = github_types.GitHubBranchCommitParent( {"sha": github_types.SHAType("base_branch")}) rebased_c1 = github_types.GitHubBranchCommit({ "sha": github_types.SHAType("rebased_c1"), "parents": [base_branch], "commit": { "message": "hello c1" }, }) rebased_c2 = github_types.GitHubBranchCommit({ "sha": github_types.SHAType("rebased_c2"), "parents": [rebased_c1], "commit": { "message": "hello c2" }, }) async def fake_get_github_commit_from_sha(url, api_version=None): if url.endswith("/commits/rebased_c1"): return rebased_c1 if url.endswith("/commits/rebased_c2"): return rebased_c2 raise RuntimeError(f"Unknown URL {url}") client.item.side_effect = fake_get_github_commit_from_sha assert await duplicate_pull._get_commits_to_cherrypick(ctxt, rebased_c2) == [ rebased_c1, rebased_c2, ]
async def test_get_commits_to_cherry_pick_merge( commits: mock.PropertyMock, redis_cache: utils.RedisCache, ) -> None: c1 = github_types.GitHubBranchCommit({ "sha": github_types.SHAType("c1f"), "parents": [], "commit": { "message": "foobar" }, }) c2 = github_types.GitHubBranchCommit({ "sha": github_types.SHAType("c2"), "parents": [c1], "commit": { "message": "foobar" }, }) async def fake_commits(): return [c1, c2] commits.return_value = fake_commits() client = mock.Mock() client.auth.get_access_token.return_value = "<token>" gh_owner = github_types.GitHubAccount({ "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }) gh_repo = github_types.GitHubRepository({ "full_name": "user/name", "name": github_types.GitHubRepositoryName("name"), "private": False, "id": github_types.GitHubRepositoryIdType(0), "owner": gh_owner, "archived": False, "url": "", "html_url": "", "default_branch": github_types.GitHubRefType("ref"), }) installation = context.Installation( github_types.GitHubAccountIdType(123), github_types.GitHubLogin("user"), subscription.Subscription(redis_cache, 0, False, "", frozenset()), client, redis_cache, ) repository = context.Repository(installation, gh_repo) ctxt = await context.Context.create( repository, { "number": github_types.GitHubPullRequestNumber(6), "commits": 1, "merged": True, "state": "closed", "html_url": "<html_url>", "id": github_types.GitHubPullRequestId(0), "maintainer_can_modify": True, "labels": [], "rebaseable": True, "draft": True, "merge_commit_sha": None, "title": "foobar", "changed_files": 1, "base": { "label": "user:ref", "sha": github_types.SHAType("sha"), "ref": github_types.GitHubRefType("ref"), "user": gh_owner, "repo": github_types.GitHubRepository( { "full_name": "user/ref", "name": github_types.GitHubRepositoryName("name"), "private": False, "id": github_types.GitHubRepositoryIdType(0), "owner": gh_owner, "archived": False, "url": "", "html_url": "", "default_branch": github_types.GitHubRefType("ref"), }), }, "head": { "label": "user:ref", "sha": github_types.SHAType("sha"), "user": gh_owner, "ref": github_types.GitHubRefType("fork"), "repo": github_types.GitHubRepository( { "full_name": "fork/other", "name": github_types.GitHubRepositoryName("name"), "private": False, "id": github_types.GitHubRepositoryIdType(0), "owner": gh_owner, "archived": False, "url": "", "html_url": "", "default_branch": github_types.GitHubRefType("ref"), }), }, "user": gh_owner, "merged_at": None, "merged_by": None, "mergeable_state": "clean", }, ) base_branch = github_types.GitHubBranchCommit({ "sha": github_types.SHAType("base_branch"), "parents": [], "commit": { "message": "foobar" }, }) merge_commit = github_types.GitHubBranchCommit({ "sha": github_types.SHAType("merge_commit"), "parents": [base_branch, c2], "commit": { "message": "foobar" }, }) assert await duplicate_pull._get_commits_to_cherrypick(ctxt, merge_commit) == [ c1, c2, ]
async def test_signals(redis_cache): gh_owner = github_types.GitHubAccount({ "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }) gh_repo = github_types.GitHubRepository({ "archived": False, "url": "", "default_branch": github_types.GitHubRefType(""), "id": github_types.GitHubRepositoryIdType(456), "full_name": "user/ref", "name": github_types.GitHubRepositoryName("name"), "private": False, "owner": gh_owner, }) client = mock.AsyncMock() client.auth.get_access_token.return_value = "<token>" sub = subscription.Subscription(redis_cache, 0, False, "", frozenset()) installation = context.Installation( gh_owner["id"], gh_owner["login"], sub, client, redis_cache, ) repository = context.Repository(installation, gh_repo["name"], gh_repo["id"]) ctxt = await context.Context.create( repository, { "title": "", "id": github_types.GitHubPullRequestId(0), "maintainer_can_modify": False, "rebaseable": False, "draft": False, "merge_commit_sha": None, "labels": [], "number": github_types.GitHubPullRequestNumber(6), "commits": 1, "merged": True, "state": "closed", "changed_files": 1, "html_url": "<html_url>", "base": { "label": "", "sha": github_types.SHAType("sha"), "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "ref": github_types.GitHubRefType("ref"), "label": "", "repo": gh_repo, }, "head": { "label": "", "sha": github_types.SHAType("old-sha-one"), "ref": github_types.GitHubRefType("fork"), "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "repo": { "archived": False, "url": "", "default_branch": github_types.GitHubRefType(""), "id": github_types.GitHubRepositoryIdType(123), "full_name": "fork/other", "name": github_types.GitHubRepositoryName("other"), "private": False, "owner": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, }, }, "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "merged_by": None, "merged_at": None, "mergeable_state": "clean", }, ) assert len(signals.SIGNALS) == 0 signals.setup() assert len(signals.SIGNALS) == 1 with mock.patch( "mergify_engine_signals.noop.Signal.__call__") as signal_method: await signals.send(ctxt, "action.update") signal_method.assert_called_once_with(ctxt, "action.update")