Ejemplo n.º 1
0
    def run(self) -> None:
        trigger_missing.trigger_missing(config.MOZILLA_CENTRAL_REPOSITORY)

        # Index the task in the TaskCluster index at the given revision and as "latest".
        # Given that all tasks have the same rank, the latest task that finishes will
        # overwrite the "latest" entry.
        self.index_task([
            "project.relman.code-coverage.{}.crontrigger.{}".format(
                secrets[secrets.APP_CHANNEL], self.revision),
            "project.relman.code-coverage.{}.crontrigger.latest".format(
                secrets[secrets.APP_CHANNEL]),
        ])
Ejemplo n.º 2
0
    def run(self) -> None:
        trigger_missing.trigger_missing(config.MOZILLA_CENTRAL_REPOSITORY)

        self.retrieve_source_and_artifacts()

        commit_coverage.generate(self.repository, self.repo_dir)

        logger.info("Generating zero coverage reports")
        zc = ZeroCov(self.repo_dir)
        zc.generate(self.artifactsHandler.get(), self.revision)

        # This is disabled as it is not used yet.
        # logger.info("Generating chunk mapping")
        # chunk_mapping.generate(self.repo_dir, self.revision, self.artifactsHandler)

        # Index the task in the TaskCluster index at the given revision and as "latest".
        # Given that all tasks have the same rank, the latest task that finishes will
        # overwrite the "latest" entry.
        self.index_task([
            "project.relman.code-coverage.{}.cron.{}".format(
                secrets[secrets.APP_CHANNEL], self.revision),
            "project.relman.code-coverage.{}.cron.latest".format(
                secrets[secrets.APP_CHANNEL]),
        ])
def test_trigger_from_preexisting(monkeypatch, tmpdir, mock_secrets,
                                  mock_taskcluster, mock_phabricator,
                                  fake_hg_repo):
    tmp_path = tmpdir.strpath

    hg, local, remote = fake_hg_repo

    add_file(hg, local, "file", "1\n2\n3\n4\n")
    commit(hg, 1)

    add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n")
    revision2 = commit(hg, 2)

    hg.push(dest=bytes(remote, "ascii"))

    add_file(hg, local, "file2", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n")
    revision3 = commit(hg, 2)

    hg.push(dest=bytes(remote, "ascii"))

    add_file(hg, local, "file3", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n")
    revision4 = commit(hg, 2)

    hg.push(dest=bytes(remote, "ascii"))

    responses.add(
        responses.HEAD,
        "https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/project.relman.code-coverage.production.cron.latest/artifacts/public/triggered_revisions.zst",
        status=200,
    )

    responses.add(
        responses.GET,
        "https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/project.relman.code-coverage.production.cron.latest/artifacts/public/triggered_revisions.zst",
        status=200,
        body=zstandard.ZstdCompressor().compress(
            f"{revision2}\n{revision3}".encode("ascii")),
    )

    copy_pushlog_database(remote, local)

    myBucket = {}

    def get_bucket(acc):
        return myBucket

    monkeypatch.setattr(trigger_missing, "get_bucket", get_bucket)

    gcp_covdir_exists_calls = 0

    def gcp_covdir_exists(bucket, repository, revision, platform, suite):
        nonlocal gcp_covdir_exists_calls
        gcp_covdir_exists_calls += 1
        assert bucket == myBucket
        assert repository == "mozilla-central"
        assert platform == "all"
        assert suite == "all"
        return revision == revision3

    monkeypatch.setattr(uploader, "gcp_covdir_exists", gcp_covdir_exists)

    def slugId():
        return "myGroupId"

    monkeypatch.setattr(trigger_missing, "slugId", slugId)

    trigger_hook_calls = 0

    def get_service(serv):
        assert serv == "hooks"

        class HooksService:
            def triggerHook(self, hook_group, hook_id, payload):
                nonlocal trigger_hook_calls
                assert hook_group == "project-relman"
                assert hook_id == "code-coverage-repo-production"
                assert payload == {
                    "REPOSITORY": "https://hg.mozilla.org/mozilla-central",
                    "REVISION": revision4,
                    "taskGroupId": "myGroupId",
                    "taskName": f"covdir for {revision4}",
                }
                trigger_hook_calls += 1

        return HooksService()

    monkeypatch.setattr(taskcluster_config, "get_service", get_service)

    get_decision_task_calls = 0

    def get_decision_task(branch, revision):
        nonlocal get_decision_task_calls
        assert branch == "mozilla-central"
        assert revision == revision4
        get_decision_task_calls += 1
        return f"decisionTask-{revision}"

    monkeypatch.setattr(taskcluster, "get_decision_task", get_decision_task)

    get_task_details_calls = 0

    def get_task_details(decision_task_id):
        nonlocal get_task_details_calls
        assert decision_task_id == f"decisionTask-{revision4}"
        get_task_details_calls += 1
        return {"taskGroupId": f"decisionTaskGroup-{revision4}"}

    monkeypatch.setattr(taskcluster, "get_task_details", get_task_details)

    get_tasks_in_group_calls = 0

    def get_tasks_in_group(group_id):
        nonlocal get_tasks_in_group_calls
        assert group_id == f"decisionTaskGroup-{revision4}"
        get_tasks_in_group_calls += 1
        return [{
            "status": {
                "state": "completed",
            },
            "task": {
                "metadata": {
                    "name": "build-linux64-ccov/opt",
                }
            },
        }]

    monkeypatch.setattr(taskcluster, "get_tasks_in_group", get_tasks_in_group)

    with hgmo.HGMO(repo_dir=local) as hgmo_server:
        trigger_missing.trigger_missing(hgmo_server.server_address,
                                        out_dir=tmp_path)

    assert gcp_covdir_exists_calls == 1
    assert trigger_hook_calls == 1
    assert get_decision_task_calls == 1
    assert get_task_details_calls == 1
    assert get_tasks_in_group_calls == 1

    dctx = zstandard.ZstdDecompressor()
    with open(os.path.join(tmp_path, "triggered_revisions.zst"), "rb") as zf:
        with dctx.stream_reader(zf) as reader:
            with io.TextIOWrapper(reader, encoding="ascii") as f:
                result = set(rev for rev in f.read().splitlines())

    assert result == {revision2, revision3, revision4}