示例#1
0
def test_get_task_failure(mock_taskcluster, TASK_NOT_FOUND):
    err = TASK_NOT_FOUND.copy()
    err["code"] = "RandomError"
    responses.add(
        responses.GET,
        "http://taskcluster.test/api/index/v1/task/gecko.v2.mozilla-central.revision.b2a9a4bb5c94de179ae7a3f52fde58c0e2897498.taskgraph.decision",
        json=err,
        status=500,
    )

    with pytest.raises(TaskclusterRestFailure, match="Indexed task not found"):
        taskcluster.get_decision_task(
            "mozilla-central", "b2a9a4bb5c94de179ae7a3f52fde58c0e2897498")
示例#2
0
def test_get_task(mock_taskcluster, DECISION_TASK_ID, LATEST_DECISION):
    responses.add(
        responses.GET,
        "http://taskcluster.test/api/index/v1/task/gecko.v2.mozilla-central.revision.7828a10a94b6afb78d18d9b7b83e7aa79337cc24.taskgraph.decision",
        json=LATEST_DECISION,
        status=200,
    )
    assert (taskcluster.get_decision_task(
        "mozilla-central",
        "7828a10a94b6afb78d18d9b7b83e7aa79337cc24") == DECISION_TASK_ID)
示例#3
0
def test_get_task_not_found(mock_taskcluster, TASK_NOT_FOUND):
    responses.add(
        responses.GET,
        "http://taskcluster.test/api/index/v1/task/gecko.v2.mozilla-central.revision.b2a9a4bb5c94de179ae7a3f52fde58c0e2897498.taskgraph.decision",
        json=TASK_NOT_FOUND,
        status=404,
    )

    assert (taskcluster.get_decision_task(
        "mozilla-central", "b2a9a4bb5c94de179ae7a3f52fde58c0e2897498") is None)
示例#4
0
    def __init__(
        self,
        repository,
        revision,
        task_name_filter,
        cache_root,
        working_dir,
        required_platforms=[],
    ):
        os.makedirs(working_dir, exist_ok=True)
        self.artifacts_dir = os.path.join(working_dir, "ccov-artifacts")
        self.reports_dir = os.path.join(working_dir, "ccov-reports")
        logger.info(
            "Local storage initialized.",
            artifacts=self.artifacts_dir,
            reports=self.reports_dir,
        )

        self.repository = repository
        self.revision = revision
        assert (self.revision is not None
                and self.repository is not None), "Missing repo/revision"
        logger.info("Mercurial setup",
                    repository=self.repository,
                    revision=self.revision)

        if cache_root is not None:
            assert os.path.isdir(
                cache_root), f"Cache root {cache_root} is not a dir."
            self.repo_dir = os.path.join(cache_root, self.branch)

        # Load coverage tasks for all platforms
        decision_task_id = taskcluster.get_decision_task(
            self.branch, self.revision)

        assert decision_task_id is not None, "The decision task couldn't be found"

        group = taskcluster.get_task_details(decision_task_id)["taskGroupId"]

        test_tasks = [
            task for task in taskcluster.get_tasks_in_group(group)
            if taskcluster.is_coverage_task(task["task"])
        ]

        # Check the required platforms are present
        platforms = set(
            taskcluster.get_platform(test_task["task"])
            for test_task in test_tasks)
        for platform in required_platforms:
            assert platform in platforms, f"{platform} missing in the task group."

        self.artifactsHandler = ArtifactsHandler(test_tasks,
                                                 self.artifacts_dir,
                                                 task_name_filter)
def trigger_missing(server_address: str, out_dir: str = ".") -> None:
    triggered_revisions_path = os.path.join(out_dir, "triggered_revisions.zst")

    url = f"https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/project.relman.code-coverage.{secrets[secrets.APP_CHANNEL]}.cron.latest/artifacts/public/triggered_revisions.zst"  # noqa
    r = requests.head(url, allow_redirects=True)
    if r.status_code != 404:
        utils.download_file(url, triggered_revisions_path)

    try:
        dctx = zstandard.ZstdDecompressor()
        with open(triggered_revisions_path, "rb") as zf:
            with dctx.stream_reader(zf) as reader:
                with io.TextIOWrapper(reader, encoding="ascii") as f:
                    triggered_revisions = set(rev
                                              for rev in f.read().splitlines())
    except FileNotFoundError:
        triggered_revisions = set()

    # Get all mozilla-central revisions from the past year.
    days = 365 if secrets[secrets.APP_CHANNEL] == "production" else 30
    a_year_ago = datetime.utcnow() - timedelta(days=days)
    with hgmo.HGMO(server_address=server_address) as hgmo_server:
        data = hgmo_server.get_pushes(
            startDate=a_year_ago.strftime("%Y-%m-%d"),
            full=False,
            tipsonly=True)

    revisions = [(push_data["changesets"][0], int(push_data["date"]))
                 for push_data in data["pushes"].values()]

    logger.info(f"{len(revisions)} pushes in the past year")

    assert (secrets[secrets.GOOGLE_CLOUD_STORAGE]
            is not None), "Missing GOOGLE_CLOUD_STORAGE secret"
    bucket = get_bucket(secrets[secrets.GOOGLE_CLOUD_STORAGE])

    missing_revisions = []
    for revision, timestamp in revisions:
        # Skip revisions that have already been triggered. If they are still missing,
        # it means there is a problem that is preventing us from ingesting them.
        if revision in triggered_revisions:
            continue

        # If the revision was already ingested, we don't need to trigger ingestion for it again.
        if uploader.gcp_covdir_exists(bucket, "mozilla-central", revision,
                                      "all", "all"):
            triggered_revisions.add(revision)
            continue

        missing_revisions.append((revision, timestamp))

    logger.info(f"{len(missing_revisions)} missing pushes in the past year")

    yesterday = int(datetime.timestamp(datetime.utcnow() - timedelta(days=1)))

    task_group_id = slugId()
    logger.info(f"Triggering tasks in the {task_group_id} group")
    triggered = 0
    for revision, timestamp in reversed(missing_revisions):
        # If it's older than yesterday, we assume the group finished.
        # If it is newer than yesterday, we load the group and check if all tasks in it finished.
        if timestamp > yesterday:
            decision_task_id = taskcluster.get_decision_task(
                "mozilla-central", revision)
            if decision_task_id is None:
                continue

            group = taskcluster.get_task_details(
                decision_task_id)["taskGroupId"]
            if not all(task["status"]["state"] in taskcluster.FINISHED_STATUSES
                       for task in taskcluster.get_tasks_in_group(group)
                       if taskcluster.is_coverage_task(task["task"])):
                continue

        trigger_task(task_group_id, revision)
        triggered_revisions.add(revision)
        triggered += 1
        if triggered == MAXIMUM_TRIGGERS:
            break

    cctx = zstandard.ZstdCompressor(threads=-1)
    with open(triggered_revisions_path, "wb") as zf:
        with cctx.stream_writer(zf) as compressor:
            with io.TextIOWrapper(compressor, encoding="ascii") as f:
                f.write("\n".join(triggered_revisions))