Ejemplo n.º 1
0
    def go_from_trigger_mozilla_central(self):
        # Check the covdir report does not already exists
        if uploader.gcp_covdir_exists(self.branch, self.revision):
            logger.warn("Covdir report already on GCP")
            return

        self.retrieve_source_and_artifacts()

        # Check that all JavaScript files present in the coverage artifacts actually exist.
        # If they don't, there might be a bug in the LCOV rewriter.
        for artifact in self.artifactsHandler.get():
            if "jsvm" not in artifact:
                continue

            with zipfile.ZipFile(artifact, "r") as zf:
                for file_name in zf.namelist():
                    with zf.open(file_name, "r") as fl:
                        source_files = [
                            line[3:].decode("utf-8").rstrip()
                            for line in fl
                            if line.startswith(b"SF:")
                        ]
                        missing_files = [
                            f
                            for f in source_files
                            if not os.path.exists(os.path.join(self.repo_dir, f))
                        ]
                        if len(missing_files) != 0:
                            logger.warn(
                                f"{missing_files} are present in coverage reports, but missing from the repository"
                            )

        report = self.generate_covdir()

        paths = uploader.covdir_paths(report)
        expected_extensions = [".js", ".cpp"]
        for extension in expected_extensions:
            assert any(
                path.endswith(extension) for path in paths
            ), "No {} file in the generated report".format(extension)

        # Get pushlog and ask the backend to generate the coverage by changeset
        # data, which will be cached.
        with hgmo.HGMO(self.repo_dir) as hgmo_server:
            changesets = hgmo_server.get_automation_relevance_changesets(self.revision)

        logger.info("Upload changeset coverage data to Phabricator")
        phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision)
        changesets_coverage = phabricatorUploader.upload(report, changesets)

        uploader.gcp(self.branch, self.revision, report)

        logger.info("Build uploaded on GCP")
        notify_email(self.revision, changesets, changesets_coverage)
Ejemplo n.º 2
0
    def run(self):
        # Check the covdir report does not already exists
        bucket = gcp.get_bucket(secrets[secrets.GOOGLE_CLOUD_STORAGE])
        if uploader.gcp_covdir_exists(bucket, self.branch, self.revision,
                                      "all", "all"):
            logger.warn("Full covdir report already on GCP")
            return

        self.retrieve_source_and_artifacts()

        self.check_javascript_files()

        reports = self.build_reports()
        logger.info("Built all covdir reports", nb=len(reports))

        # Retrieve the full report
        full_path = reports.get(("all", "all"))
        assert full_path is not None, "Missing full report (all:all)"
        with open(full_path, "r") as f:
            report = json.load(f)

        # Check extensions
        paths = uploader.covdir_paths(report)
        for extension in [".js", ".cpp"]:
            assert any(
                path.endswith(extension) for path in
                paths), "No {} file in the generated report".format(extension)

        # Upload reports on GCP
        self.upload_reports(reports)
        logger.info("Uploaded all covdir reports", nb=len(reports))

        # Upload coverage on phabricator
        changesets = self.get_hgmo_changesets()
        coverage = self.upload_phabricator(report, changesets)

        # Send an email on low coverage
        notify_email(self.revision, changesets, coverage)
        logger.info("Sent low coverage email notification")

        # Index on Taskcluster
        self.index_task([
            "project.relman.code-coverage.{}.repo.mozilla-central.{}".format(
                secrets[secrets.APP_CHANNEL], self.revision),
            "project.relman.code-coverage.{}.repo.mozilla-central.latest".
            format(secrets[secrets.APP_CHANNEL]),
        ])
Ejemplo n.º 3
0
def trigger_missing(server_address: str, out_dir: str = ".") -> None:
    triggered_revisions_path = os.path.join(out_dir, "triggered_revisions.zst")

    url = f"https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/project.relman.code-coverage.{secrets[secrets.APP_CHANNEL]}.cron.latest/artifacts/public/triggered_revisions.zst"  # noqa
    r = requests.head(url, allow_redirects=True)
    if r.status_code != 404:
        utils.download_file(url, triggered_revisions_path)

    try:
        dctx = zstandard.ZstdDecompressor()
        with open(triggered_revisions_path, "rb") as zf:
            with dctx.stream_reader(zf) as reader:
                with io.TextIOWrapper(reader, encoding="ascii") as f:
                    triggered_revisions = set(rev
                                              for rev in f.read().splitlines())
    except FileNotFoundError:
        triggered_revisions = set()

    # Get all mozilla-central revisions from the past year.
    days = 365 if secrets[secrets.APP_CHANNEL] == "production" else 30
    a_year_ago = datetime.utcnow() - timedelta(days=days)
    with hgmo.HGMO(server_address=server_address) as hgmo_server:
        data = hgmo_server.get_pushes(
            startDate=a_year_ago.strftime("%Y-%m-%d"),
            full=False,
            tipsonly=True)

    revisions = [(push_data["changesets"][0], int(push_data["date"]))
                 for push_data in data["pushes"].values()]

    logger.info(f"{len(revisions)} pushes in the past year")

    assert (secrets[secrets.GOOGLE_CLOUD_STORAGE]
            is not None), "Missing GOOGLE_CLOUD_STORAGE secret"
    bucket = get_bucket(secrets[secrets.GOOGLE_CLOUD_STORAGE])

    missing_revisions = []
    for revision, timestamp in revisions:
        # Skip revisions that have already been triggered. If they are still missing,
        # it means there is a problem that is preventing us from ingesting them.
        if revision in triggered_revisions:
            continue

        # If the revision was already ingested, we don't need to trigger ingestion for it again.
        if uploader.gcp_covdir_exists(bucket, "mozilla-central", revision,
                                      "all", "all"):
            triggered_revisions.add(revision)
            continue

        missing_revisions.append((revision, timestamp))

    logger.info(f"{len(missing_revisions)} missing pushes in the past year")

    yesterday = int(datetime.timestamp(datetime.utcnow() - timedelta(days=1)))

    task_group_id = slugId()
    logger.info(f"Triggering tasks in the {task_group_id} group")
    triggered = 0
    for revision, timestamp in reversed(missing_revisions):
        # If it's older than yesterday, we assume the group finished.
        # If it is newer than yesterday, we load the group and check if all tasks in it finished.
        if timestamp > yesterday:
            decision_task_id = taskcluster.get_decision_task(
                "mozilla-central", revision)
            if decision_task_id is None:
                continue

            group = taskcluster.get_task_details(
                decision_task_id)["taskGroupId"]
            if not all(task["status"]["state"] in taskcluster.FINISHED_STATUSES
                       for task in taskcluster.get_tasks_in_group(group)
                       if taskcluster.is_coverage_task(task["task"])):
                continue

        trigger_task(task_group_id, revision)
        triggered_revisions.add(revision)
        triggered += 1
        if triggered == MAXIMUM_TRIGGERS:
            break

    cctx = zstandard.ZstdCompressor(threads=-1)
    with open(triggered_revisions_path, "wb") as zf:
        with cctx.stream_writer(zf) as compressor:
            with io.TextIOWrapper(compressor, encoding="ascii") as f:
                f.write("\n".join(triggered_revisions))
Ejemplo n.º 4
0
    def run(self):
        # Check the covdir report does not already exists
        bucket = gcp.get_bucket(secrets[secrets.GOOGLE_CLOUD_STORAGE])
        if uploader.gcp_covdir_exists(bucket, self.branch, self.revision,
                                      "all", "all"):
            logger.warn("Full covdir report already on GCP")
            return

        # Generate and upload the full report as soon as possible, so it is available
        # for consumers (e.g. Searchfox) right away.
        self.retrieve_source_and_artifacts()

        reports = self.build_reports(only=[("all", "all")])

        full_path = reports.get(("all", "all"))
        assert full_path is not None, "Missing full report (all:all)"
        with open(full_path, "r") as f:
            report_text = f.read()

        # Upload report as an artifact.
        taskcluster_config.upload_artifact(
            "public/code-coverage-report.json",
            report_text,
            "application/json",
            timedelta(days=14),
        )

        # Index on Taskcluster
        self.index_task([
            "project.relman.code-coverage.{}.repo.mozilla-central.{}".format(
                secrets[secrets.APP_CHANNEL], self.revision),
            "project.relman.code-coverage.{}.repo.mozilla-central.latest".
            format(secrets[secrets.APP_CHANNEL]),
        ])

        report = json.loads(report_text)

        # Check extensions
        paths = uploader.covdir_paths(report)
        for extension in [".js", ".cpp"]:
            assert any(
                path.endswith(extension) for path in
                paths), "No {} file in the generated report".format(extension)

        # Upload coverage on phabricator
        changesets = self.get_hgmo_changesets()
        coverage = self.upload_phabricator(report, changesets)

        # Send an email on low coverage
        notify_email(self.revision, changesets, coverage)
        logger.info("Sent low coverage email notification")

        self.check_javascript_files()

        # Generate all reports except the full one which we generated earlier.
        all_report_combinations = self.artifactsHandler.get_combinations()
        del all_report_combinations[("all", "all")]
        reports.update(self.build_reports())
        logger.info("Built all covdir reports", nb=len(reports))

        # Upload reports on GCP
        self.upload_reports(reports)
        logger.info("Uploaded all covdir reports", nb=len(reports))
Ejemplo n.º 5
0
    def go_from_trigger_mozilla_central(self):
        commit_sha = self.githubUtils.mercurial_to_git(self.revision)
        try:
            uploader.get_codecov(commit_sha)
            logger.warn('Build was already injested')

            # Check the covdir report does not already exists
            if uploader.gcp_covdir_exists(self.branch, self.revision):
                logger.warn('Covdir report already on GCP')
                return

            # The artifacts are still needed to build the covdir report
            self.retrieve_source_and_artifacts()

            # Update GCP covdir report anyway
            uploader.gcp(self.branch, self.revision, self.generate_covdir())
            return
        except requests.exceptions.HTTPError:
            pass

        self.retrieve_source_and_artifacts()

        self.githubUtils.update_geckodev_repo()

        logger.info('GitHub revision', revision=commit_sha)

        self.githubUtils.post_github_status(commit_sha)

        r = requests.get('https://hg.mozilla.org/mozilla-central/json-rev/%s' % self.revision)
        r.raise_for_status()
        push_id = r.json()['pushid']

        # Check that all JavaScript files present in the coverage artifacts actually exist.
        # If they don't, there might be a bug in the LCOV rewriter.
        for artifact in self.artifactsHandler.get():
            if 'jsvm' not in artifact:
                continue

            with zipfile.ZipFile(artifact, 'r') as zf:
                for file_name in zf.namelist():
                    with zf.open(file_name, 'r') as fl:
                        source_files = [line[3:].decode('utf-8').rstrip() for line in fl if line.startswith(b'SF:')]
                        missing_files = [f for f in source_files if not os.path.exists(os.path.join(self.repo_dir, f))]
                        if len(missing_files) != 0:
                            logger.warn(f'{missing_files} are present in coverage reports, but missing from the repository')

        output = grcov.report(
            self.artifactsHandler.get(),
            source_dir=self.repo_dir,
            service_number=push_id,
            commit_sha=commit_sha,
        )
        logger.info('Codecov report generated successfully')

        output_covdir = self.generate_covdir()

        report = json.loads(output)
        expected_extensions = ['.js', '.cpp']
        for extension in expected_extensions:
            assert any(f['name'].endswith(extension) for f in
                       report['source_files']), 'No {} file in the generated report'.format(extension)

        logger.info('Upload changeset coverage data to Phabricator')
        phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision)
        phabricatorUploader.upload(report)

        with ThreadPoolExecutorResult(max_workers=2) as executor:
            executor.submit(uploader.codecov, output, commit_sha)
            executor.submit(uploader.gcp, self.branch, self.revision, output_covdir)

        logger.info('Waiting for build to be ingested by Codecov...')
        # Wait until the build has been ingested by Codecov.
        if uploader.codecov_wait(commit_sha):
            logger.info('Build ingested by codecov.io')
            notifier = Notifier(self.repo_dir, self.revision, self.client_id, self.access_token)
            notifier.notify()
        else:
            logger.error('codecov.io took too much time to ingest data.')