Ejemplo n.º 1
0
    def go_from_trigger_mozilla_central(self):
        # Check the covdir report does not already exists
        if uploader.gcp_covdir_exists(self.branch, self.revision):
            logger.warn("Covdir report already on GCP")
            return

        self.retrieve_source_and_artifacts()

        # Check that all JavaScript files present in the coverage artifacts actually exist.
        # If they don't, there might be a bug in the LCOV rewriter.
        for artifact in self.artifactsHandler.get():
            if "jsvm" not in artifact:
                continue

            with zipfile.ZipFile(artifact, "r") as zf:
                for file_name in zf.namelist():
                    with zf.open(file_name, "r") as fl:
                        source_files = [
                            line[3:].decode("utf-8").rstrip()
                            for line in fl
                            if line.startswith(b"SF:")
                        ]
                        missing_files = [
                            f
                            for f in source_files
                            if not os.path.exists(os.path.join(self.repo_dir, f))
                        ]
                        if len(missing_files) != 0:
                            logger.warn(
                                f"{missing_files} are present in coverage reports, but missing from the repository"
                            )

        report = self.generate_covdir()

        paths = uploader.covdir_paths(report)
        expected_extensions = [".js", ".cpp"]
        for extension in expected_extensions:
            assert any(
                path.endswith(extension) for path in paths
            ), "No {} file in the generated report".format(extension)

        # Get pushlog and ask the backend to generate the coverage by changeset
        # data, which will be cached.
        with hgmo.HGMO(self.repo_dir) as hgmo_server:
            changesets = hgmo_server.get_automation_relevance_changesets(self.revision)

        logger.info("Upload changeset coverage data to Phabricator")
        phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision)
        changesets_coverage = phabricatorUploader.upload(report, changesets)

        uploader.gcp(self.branch, self.revision, report)

        logger.info("Build uploaded on GCP")
        notify_email(self.revision, changesets, changesets_coverage)
Ejemplo n.º 2
0
    def run(self):
        # Check the covdir report does not already exists
        bucket = gcp.get_bucket(secrets[secrets.GOOGLE_CLOUD_STORAGE])
        if uploader.gcp_covdir_exists(bucket, self.branch, self.revision,
                                      "all", "all"):
            logger.warn("Full covdir report already on GCP")
            return

        self.retrieve_source_and_artifacts()

        self.check_javascript_files()

        reports = self.build_reports()
        logger.info("Built all covdir reports", nb=len(reports))

        # Retrieve the full report
        full_path = reports.get(("all", "all"))
        assert full_path is not None, "Missing full report (all:all)"
        with open(full_path, "r") as f:
            report = json.load(f)

        # Check extensions
        paths = uploader.covdir_paths(report)
        for extension in [".js", ".cpp"]:
            assert any(
                path.endswith(extension) for path in
                paths), "No {} file in the generated report".format(extension)

        # Upload reports on GCP
        self.upload_reports(reports)
        logger.info("Uploaded all covdir reports", nb=len(reports))

        # Upload coverage on phabricator
        changesets = self.get_hgmo_changesets()
        coverage = self.upload_phabricator(report, changesets)

        # Send an email on low coverage
        notify_email(self.revision, changesets, coverage)
        logger.info("Sent low coverage email notification")

        # Index on Taskcluster
        self.index_task([
            "project.relman.code-coverage.{}.repo.mozilla-central.{}".format(
                secrets[secrets.APP_CHANNEL], self.revision),
            "project.relman.code-coverage.{}.repo.mozilla-central.latest".
            format(secrets[secrets.APP_CHANNEL]),
        ])
Ejemplo n.º 3
0
def test_notification(mock_secrets, mock_taskcluster, mock_phabricator,
                      fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, "file", "1\n2\n3\n4\n")
    commit(hg, 1)

    add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n")
    revision = commit(hg, 2)

    hg.push(dest=bytes(remote, "ascii"))

    copy_pushlog_database(remote, local)

    stack = changesets(local, revision)
    assert len(stack) == 2
    assert (
        stack[0]["desc"] ==
        "Commit [(b'A', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D1"
    )
    assert (
        stack[1]["desc"] ==
        "Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2"
    )

    report = covdir_report({
        "source_files": [{
            "name": "file",
            "coverage": [None, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0]
        }]
    })
    phab = PhabricatorUploader(local, revision)
    changesets_coverage = phab.generate(report, stack)

    assert changesets_coverage == {
        1: {
            "file": {
                "lines_added": 4,
                "lines_covered": 2,
                "coverage": "NUCU"
            }
        },
        2: {
            "file": {
                "lines_added": 6,
                "lines_covered": 0,
                "coverage": "NUCUUUUUUU"
            }
        },
    }

    mail = notify_email(revision, stack, changesets_coverage)
    assert (
        mail ==
        "* [Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2](https://firefox-code-coverage.herokuapp.com/#/changeset/{}): 0 covered out of 6 added.\n"
        .format(  ## noqa
            revision))
Ejemplo n.º 4
0
def test_notification(mock_secrets, mock_taskcluster, mock_phabricator,
                      fake_hg_repo):
    hg, local, remote = fake_hg_repo

    add_file(hg, local, "file", "1\n2\n3\n4\n")
    revision1 = commit(hg, 1)

    add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n")
    revision2 = commit(hg, 2)

    hg.push(dest=bytes(remote, "ascii"))

    copy_pushlog_database(remote, local)

    with hgmo.HGMO(local) as hgmo_server:
        stack = changesets(hgmo_server, revision2)
    assert len(stack) == 2
    assert (
        stack[0]["desc"] ==
        "Commit [(b'A', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D1"
    )
    assert (
        stack[1]["desc"] ==
        "Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2"
    )

    report = covdir_report({
        "source_files": [{
            "name": "file",
            "coverage": [None, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0]
        }]
    })
    phab = PhabricatorUploader(local, revision2)
    with hglib.open(local) as hg:
        changesets_coverage = phab.generate(hg, report, stack)

    assert changesets_coverage == {
        revision1: {
            "revision_id": 1,
            "paths": {
                "file": {
                    "lines_added": 3,
                    "lines_covered": 2,
                    "lines_unknown": 0,
                    "coverage": "NCCU",
                }
            },
        },
        revision2: {
            "revision_id": 2,
            "paths": {
                "file": {
                    "lines_added": 6,
                    "lines_covered": 0,
                    "lines_unknown": 0,
                    "coverage": "NCCUUUUUUU",
                }
            },
        },
    }

    mail = notify_email(revision2, stack, changesets_coverage)
    assert (
        mail ==
        """* [Commit [(b'M', b'file')]Differential Revision: https://phabricator.services.mozilla.com/D2](https://phabricator.services.mozilla.com/D2): 0 covered out of 6 added.\n"""
    )
Ejemplo n.º 5
0
    def run(self):
        # Check the covdir report does not already exists
        bucket = gcp.get_bucket(secrets[secrets.GOOGLE_CLOUD_STORAGE])
        if uploader.gcp_covdir_exists(bucket, self.branch, self.revision,
                                      "all", "all"):
            logger.warn("Full covdir report already on GCP")
            return

        # Generate and upload the full report as soon as possible, so it is available
        # for consumers (e.g. Searchfox) right away.
        self.retrieve_source_and_artifacts()

        reports = self.build_reports(only=[("all", "all")])

        full_path = reports.get(("all", "all"))
        assert full_path is not None, "Missing full report (all:all)"
        with open(full_path, "r") as f:
            report_text = f.read()

        # Upload report as an artifact.
        taskcluster_config.upload_artifact(
            "public/code-coverage-report.json",
            report_text,
            "application/json",
            timedelta(days=14),
        )

        # Index on Taskcluster
        self.index_task([
            "project.relman.code-coverage.{}.repo.mozilla-central.{}".format(
                secrets[secrets.APP_CHANNEL], self.revision),
            "project.relman.code-coverage.{}.repo.mozilla-central.latest".
            format(secrets[secrets.APP_CHANNEL]),
        ])

        report = json.loads(report_text)

        # Check extensions
        paths = uploader.covdir_paths(report)
        for extension in [".js", ".cpp"]:
            assert any(
                path.endswith(extension) for path in
                paths), "No {} file in the generated report".format(extension)

        # Upload coverage on phabricator
        changesets = self.get_hgmo_changesets()
        coverage = self.upload_phabricator(report, changesets)

        # Send an email on low coverage
        notify_email(self.revision, changesets, coverage)
        logger.info("Sent low coverage email notification")

        self.check_javascript_files()

        # Generate all reports except the full one which we generated earlier.
        all_report_combinations = self.artifactsHandler.get_combinations()
        del all_report_combinations[("all", "all")]
        reports.update(self.build_reports())
        logger.info("Built all covdir reports", nb=len(reports))

        # Upload reports on GCP
        self.upload_reports(reports)
        logger.info("Uploaded all covdir reports", nb=len(reports))