def test_generate_all_shadow_scheduler_tasks(responses): rev = "a" * 40 shadow_schedulers = ( ( "bar", ["task-1", "task-3", "task-4"], ), # names will be generated alphabetically ("foo", ["task-2", "task-4"]), ) push = Push(rev) responses.add( responses.GET, get_index_url(push.index + ".taskgraph.decision"), json={"taskId": 1}, status=200, ) id = count(2) responses.add( responses.GET, get_artifact_url(1, "public/task-graph.json"), json={ next(id): { "label": f"source-test-shadow-scheduler-{s[0]}" } for s in shadow_schedulers }, status=200, ) id = count(2) for ss in shadow_schedulers: s_id = next(id) responses.add( responses.GET, get_index_url(f"{push.index}.source.shadow-scheduler-{ss[0]}"), json={"taskId": s_id}, status=200, ) responses.add( responses.GET, get_artifact_url(s_id, "public/shadow-scheduler/optimized-tasks.json"), stream=True, json={next(id): { "label": task } for task in ss[1]}, status=200, ) # retrieve the data for i, (name, tasks) in enumerate(push.generate_all_shadow_scheduler_tasks()): print(i, name, tasks) assert name == shadow_schedulers[i][0] assert tasks == set(shadow_schedulers[i][1])
def test_backfill_trigger_hook_error(responses, create_task): rev = "a" * 40 branch = "autoland" push = Push(rev, branch) decision_task_url = f"{PRODUCTION_TASKCLUSTER_ROOT_URL}/api/index/v1/task/gecko.v2.{branch}.revision.{rev}.taskgraph.decision" responses.add( responses.GET, decision_task_url, status=200, json={"taskId": "a" * 10} ) responses.add( responses.GET, get_artifact_url(push.decision_task.id, "public/actions.json"), status=200, json=ACTIONS_ARTIFACT_EXTRACT, ) config._config["taskcluster_firefox_ci"] = { "client_id": "a client id", "access_token": "an access token", } hookGroupId = ACTIONS_ARTIFACT_EXTRACT["actions"][0]["hookGroupId"] hookId = ACTIONS_ARTIFACT_EXTRACT["actions"][0]["hookId"].replace("/", "%2F") responses.add( responses.POST, f"{PRODUCTION_TASKCLUSTER_ROOT_URL}/api/hooks/v1/hooks/{hookGroupId}/{hookId}/trigger", status=500, ) task = create_task(label="foobar") with pytest.raises(TaskclusterRestFailure): task.backfill(push)
def test_backfill_incomplete_secret(responses, secret_content, create_task): rev = "a" * 40 branch = "autoland" push = Push(rev, branch) decision_task_url = f"{PRODUCTION_TASKCLUSTER_ROOT_URL}/api/index/v1/task/gecko.v2.{branch}.revision.{rev}.taskgraph.decision" responses.add( responses.GET, decision_task_url, status=200, json={"taskId": "a" * 10} ) responses.add( responses.GET, get_artifact_url(push.decision_task.id, "public/actions.json"), status=200, json=ACTIONS_ARTIFACT_EXTRACT, ) # Update configuration config._config["taskcluster_firefox_ci"] = secret_content task = create_task(label="foobar") with pytest.raises( AssertionError, match="Missing Taskcluster Firefox CI credentials in mozci config secret", ): task.backfill(push)
def test_missing_artifacts(responses, create_task): artifact = "public/artifact.txt" task = create_task(label="foobar") # First we'll check the new deployment. responses.add( responses.GET, get_artifact_url(task.id, artifact), status=404, ) # Then we'll check the old deployment. responses.add( responses.GET, get_artifact_url(task.id, artifact, old_deployment=True), status=404, ) with pytest.raises(ArtifactNotFound): task.get_artifact(artifact)
def test_missing_artifacts(responses, create_task): artifact = "public/artifact.txt" task = create_task(label="foobar") responses.add( responses.GET, get_artifact_url(task.id, artifact), status=404, ) with pytest.raises(ArtifactNotFound): task.get_artifact(artifact)
def test_get_shadow_scheduler_tasks_fallback(responses): rev = "a" * 40 ss = ("foo", ["task-2", "task-4"]) ss_id = 1 push = Push(rev) responses.add( responses.GET, get_index_url(f"{push.index}.source.shadow-scheduler-{ss[0]}"), json={"taskId": ss_id}, status=200, ) responses.add( responses.GET, get_artifact_url(ss_id, "public/shadow-scheduler/optimized-tasks.json"), status=404, ) # utility file will also try the old deployment responses.add( responses.GET, get_artifact_url(ss_id, "public/shadow-scheduler/optimized-tasks.json", old_deployment=True), status=404, ) responses.add( responses.GET, get_artifact_url(ss_id, "public/shadow-scheduler/optimized_tasks.list"), stream=True, body="\n".join(ss[1]), status=200, ) assert push.get_shadow_scheduler_tasks(ss[0]) == set(ss[1])
def test_retrigger_should_retrigger(responses, create_task): rev = "a" * 40 branch = "autoland" push = Push(rev, branch) decision_task_url = f"{PRODUCTION_TASKCLUSTER_ROOT_URL}/api/index/v1/task/gecko.v2.{branch}.revision.{rev}.taskgraph.decision" responses.add( responses.GET, decision_task_url, status=200, json={"taskId": "a" * 10} ) responses.add( responses.GET, get_artifact_url(push.decision_task.id, "public/actions.json"), status=200, json=RETRIGGER_ACTIONS_ARTIFACT_EXTRACT, ) config._config["taskcluster_firefox_ci"] = { "client_id": "a client id", "access_token": "an access token", } task = create_task(label="foobar", tags={"retrigger": "true"}) hookGroupId = RETRIGGER_ACTIONS_ARTIFACT_EXTRACT["actions"][0]["hookGroupId"] hookId = RETRIGGER_ACTIONS_ARTIFACT_EXTRACT["actions"][0]["hookId"].replace( "/", "%2F" ) hookPayload = copy.deepcopy( RETRIGGER_ACTIONS_ARTIFACT_EXTRACT["actions"][0]["hookPayload"] ) hookPayload["user"] = { "input": {"times": 3}, "taskGroupId": push.decision_task.id, "taskId": task.id, } responses.add( responses.POST, f"{PRODUCTION_TASKCLUSTER_ROOT_URL}/api/hooks/v1/hooks/{hookGroupId}/{hookId}/trigger", status=200, json={"status": {"taskId": "new-retrigger-task"}}, match=[matchers.json_params_matcher(hookPayload)], ) assert task.retrigger(push) == "new-retrigger-task"
def test_backfill_missing_actions_artifact(responses, create_task): rev = "a" * 40 branch = "autoland" push = Push(rev, branch) decision_task_url = f"{PRODUCTION_TASKCLUSTER_ROOT_URL}/api/index/v1/task/gecko.v2.{branch}.revision.{rev}.taskgraph.decision" responses.add( responses.GET, decision_task_url, status=200, json={"taskId": "a" * 10} ) responses.add( responses.GET, get_artifact_url(push.decision_task.id, "public/actions.json"), status=404, ) task = create_task(label="foobar") with pytest.raises(ArtifactNotFound): task.backfill(push)
def test_backfill_wrong_action_kind(responses, create_task): rev = "a" * 40 branch = "autoland" push = Push(rev, branch) decision_task_url = f"{PRODUCTION_TASKCLUSTER_ROOT_URL}/api/index/v1/task/gecko.v2.{branch}.revision.{rev}.taskgraph.decision" responses.add( responses.GET, decision_task_url, status=200, json={"taskId": "a" * 10} ) invalid_actions = copy.deepcopy(ACTIONS_ARTIFACT_EXTRACT) invalid_actions["actions"][0]["kind"] = "not a hook" responses.add( responses.GET, get_artifact_url(push.decision_task.id, "public/actions.json"), status=200, json=invalid_actions, ) task = create_task(label="foobar") with pytest.raises(AssertionError): task.backfill(push)
def test_generate_all_shadow_scheduler_config_groups(responses): rev = "a" * 40 shadow_schedulers = ( ( "bar", [ ( "test-linux1804-64/debug-xpcshell-spi-nw-e10s-1", ["group1", "group5"], ), ("test-linux1804-64/debug-xpcshell-spi-nw-e10s-2", ["group2"]), ("test-windows7-32/opt-xpcshell-e10s-1", ["group3"]), ], { ("test-linux1804-64/debug-*-spi-nw-e10s", "group2"), ("test-linux1804-64/debug-*-spi-nw-e10s", "group5"), ("test-linux1804-64/debug-*-spi-nw-e10s", "group1"), ("test-windows7-32/opt-*-e10s", "group3"), }, ), ( "foo", [ ("test-macosx1014-64/opt-xpcshell-e10s-1", ["group4"]), ( "test-android-em-7.0-x86_64/debug-geckoview-xpcshell-e10s-1", ["group3"], ), ], { ("test-android-em-7.0-x86_64/debug-geckoview-*-e10s", "group3"), ("test-macosx1014-64/opt-*-e10s", "group4"), }, ), ) push = Push(rev) responses.add( responses.GET, get_index_url(push.index + ".taskgraph.decision"), json={"taskId": 1}, status=200, ) id = count(2) responses.add( responses.GET, get_artifact_url(1, "public/task-graph.json"), json={ next(id): { "label": f"source-test-shadow-scheduler-{s[0]}" } for s in shadow_schedulers }, status=200, ) id = count(2) for ss in shadow_schedulers: s_id = next(id) responses.add( responses.GET, get_index_url(f"{push.index}.source.shadow-scheduler-{ss[0]}"), json={"taskId": s_id}, status=200, ) responses.add( responses.GET, get_artifact_url(s_id, "public/shadow-scheduler/optimized-tasks.json"), stream=True, json={ next(id): { "label": label, "attributes": { "test_manifests": groups } } for label, groups in ss[1] }, status=200, ) # retrieve the data for i, (name, config_groups) in enumerate( push.generate_all_shadow_scheduler_config_groups()): print(i, name, config_groups) assert name == shadow_schedulers[i][0] assert config_groups == shadow_schedulers[i][2]