Beispiel #1
0
    def go_from_cron(self):
        self.retrieve_source_and_artifacts()

        logger.info('Generating suite reports')
        os.makedirs(self.ccov_reports_dir, exist_ok=True)
        suite_reports.generate(self.suites, self.artifactsHandler, self.ccov_reports_dir, self.repo_dir)

        logger.info('Generating zero coverage reports')
        zc = ZeroCov(self.repo_dir)
        zc.generate(self.artifactsHandler.get(), self.revision, self.github_revision)

        logger.info('Generating chunk mapping')
        chunk_mapping.generate(self.repo_dir, self.revision, self.artifactsHandler)

        # Index the task in the TaskCluster index at the given revision and as "latest".
        # Given that all tasks have the same rank, the latest task that finishes will
        # overwrite the "latest" entry.
        namespaces = [
            'project.releng.services.project.{}.code_coverage_bot.{}'.format(secrets[secrets.APP_CHANNEL], self.revision),
            'project.releng.services.project.{}.code_coverage_bot.latest'.format(secrets[secrets.APP_CHANNEL]),
        ]

        for namespace in namespaces:
            self.index_service.insertTask(
                namespace,
                {
                    'taskId': os.environ['TASK_ID'],
                    'rank': 0,
                    'data': {},
                    'expires': (datetime.utcnow() + timedelta(180)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
                }
            )

        os.chdir(self.ccov_reports_dir)
        self.githubUtils.update_codecoveragereports_repo()
Beispiel #2
0
    def go_from_cron(self):
        self.retrieve_source_and_artifacts()

        logger.info('Generating suite reports')
        os.makedirs(self.ccov_reports_dir, exist_ok=True)
        suite_reports.generate(self.suites, self.artifactsHandler, self.ccov_reports_dir, self.repo_dir)

        logger.info('Generating zero coverage reports')
        zc = ZeroCov(self.repo_dir)
        zc.generate(self.artifactsHandler.get(), self.revision, self.github_revision)

        logger.info('Generating chunk mapping')
        chunk_mapping.generate(self.repo_dir, self.revision, self.artifactsHandler)

        # Index the task in the TaskCluster index at the given revision and as "latest".
        # Given that all tasks have the same rank, the latest task that finishes will
        # overwrite the "latest" entry.
        namespaces = [
            'project.releng.services.project.{}.code_coverage_bot.{}'.format(secrets[secrets.APP_CHANNEL], self.revision),
            'project.releng.services.project.{}.code_coverage_bot.latest'.format(secrets[secrets.APP_CHANNEL]),
        ]

        for namespace in namespaces:
            self.index_service.insertTask(
                namespace,
                {
                    'taskId': os.environ['TASK_ID'],
                    'rank': 0,
                    'data': {},
                    'expires': (datetime.utcnow() + timedelta(180)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
                }
            )

        os.chdir(self.ccov_reports_dir)
        self.githubUtils.update_codecoveragereports_repo()
Beispiel #3
0
    def go_from_cron(self):
        self.retrieve_source_and_artifacts()

        logger.info("Generating zero coverage reports")
        zc = ZeroCov(self.repo_dir)
        zc.generate(self.artifactsHandler.get(), self.revision)

        logger.info("Generating chunk mapping")
        chunk_mapping.generate(self.repo_dir, self.revision, self.artifactsHandler)

        # Index the task in the TaskCluster index at the given revision and as "latest".
        # Given that all tasks have the same rank, the latest task that finishes will
        # overwrite the "latest" entry.
        namespaces = [
            "project.releng.services.project.{}.code_coverage_bot.{}".format(
                secrets[secrets.APP_CHANNEL], self.revision
            ),
            "project.releng.services.project.{}.code_coverage_bot.latest".format(
                secrets[secrets.APP_CHANNEL]
            ),
        ]

        for namespace in namespaces:
            self.index_service.insertTask(
                namespace,
                {
                    "taskId": os.environ["TASK_ID"],
                    "rank": 0,
                    "data": {},
                    "expires": (datetime.utcnow() + timedelta(180)).strftime(
                        "%Y-%m-%dT%H:%M:%S.%fZ"
                    ),
                },
            )
Beispiel #4
0
    def run(self):
        self.retrieve_source_and_artifacts()

        logger.info("Generating zero coverage reports")
        zc = ZeroCov(self.repo_dir)
        zc.generate(self.artifactsHandler.get(), self.revision)

        logger.info("Generating chunk mapping")
        chunk_mapping.generate(self.repo_dir, self.revision,
                               self.artifactsHandler)

        # Index the task in the TaskCluster index at the given revision and as "latest".
        # Given that all tasks have the same rank, the latest task that finishes will
        # overwrite the "latest" entry.
        self.index_task([
            "project.relman.code-coverage.{}.cron.{}".format(
                secrets[secrets.APP_CHANNEL], self.revision),
            "project.relman.code-coverage.{}.cron.latest".format(
                secrets[secrets.APP_CHANNEL]),
        ])
Beispiel #5
0
    def run(self) -> None:
        trigger_missing.trigger_missing(config.MOZILLA_CENTRAL_REPOSITORY)

        self.retrieve_source_and_artifacts()

        commit_coverage.generate(self.repository, self.repo_dir)

        logger.info("Generating zero coverage reports")
        zc = ZeroCov(self.repo_dir)
        zc.generate(self.artifactsHandler.get(), self.revision)

        # This is disabled as it is not used yet.
        # logger.info("Generating chunk mapping")
        # chunk_mapping.generate(self.repo_dir, self.revision, self.artifactsHandler)

        # Index the task in the TaskCluster index at the given revision and as "latest".
        # Given that all tasks have the same rank, the latest task that finishes will
        # overwrite the "latest" entry.
        self.index_task([
            "project.relman.code-coverage.{}.cron.{}".format(
                secrets[secrets.APP_CHANNEL], self.revision),
            "project.relman.code-coverage.{}.cron.latest".format(
                secrets[secrets.APP_CHANNEL]),
        ])
Beispiel #6
0
def test_zero_coverage(tmpdir, grcov_artifact, grcov_uncovered_artifact,
                       jsvm_artifact, jsvm_uncovered_artifact,
                       grcov_uncovered_function_artifact,
                       jsvm_uncovered_function_artifact,
                       fake_hg_repo_with_contents):
    tmp_path = tmpdir.strpath

    hgrev = '314159265358'
    gitrev = '271828182845'
    ZeroCov(fake_hg_repo_with_contents).generate([
        grcov_artifact, grcov_uncovered_artifact, jsvm_artifact,
        jsvm_uncovered_artifact, grcov_uncovered_function_artifact,
        jsvm_uncovered_function_artifact
    ],
                                                 hgrev,
                                                 gitrev,
                                                 out_dir=tmp_path)

    with open(os.path.join(tmp_path, 'zero_coverage_report.json'), 'r') as f:
        zero_coverage_report = json.load(f)

    assert 'hg_revision' in zero_coverage_report and zero_coverage_report[
        'hg_revision'] == hgrev
    assert 'github_revision' in zero_coverage_report and zero_coverage_report[
        'github_revision'] == gitrev
    assert 'files' in zero_coverage_report
    zero_coverage_functions = zero_coverage_report['files']

    today = datetime.utcnow()
    today = pytz.utc.localize(today)
    today = today.strftime(ZeroCov.DATE_FORMAT)

    expected_zero_coverage_functions = [
        {
            'funcs': 1,
            'name': 'mozglue/build/dummy.cpp',
            'uncovered': True,
            'size': 1,
            'commits': 2,
            'first_push_date': today,
            'last_push_date': today
        },
        {
            'funcs': 2,
            'name': 'toolkit/components/osfile/osfile.jsm',
            'uncovered': False,
            'size': 2,
            'commits': 2,
            'first_push_date': today,
            'last_push_date': today
        },
        {
            'funcs': 1,
            'name': 'js/src/jit/JIT.cpp',
            'uncovered': False,
            'size': 3,
            'commits': 2,
            'first_push_date': today,
            'last_push_date': today
        },
        {
            'funcs': 1,
            'name': 'toolkit/components/osfile/osfile-win.jsm',
            'uncovered': True,
            'size': 4,
            'commits': 2,
            'first_push_date': today,
            'last_push_date': today
        },
    ]
    assert len(zero_coverage_functions) == len(
        expected_zero_coverage_functions)
    while len(expected_zero_coverage_functions):
        exp_item = expected_zero_coverage_functions.pop()
        found = False
        for found_item in zero_coverage_functions:
            if found_item['name'] == exp_item['name']:
                found = True
                break
        assert found
        assert found_item['funcs'] == exp_item['funcs']
        assert found_item['first_push_date'] == exp_item['first_push_date']
        assert found_item['last_push_date'] == exp_item['last_push_date']
        assert found_item['size'] == exp_item['size']
        assert found_item['commits'] == exp_item['commits']
        assert found_item['uncovered'] == exp_item['uncovered']
Beispiel #7
0
def test_zero_coverage(
    tmpdir,
    grcov_artifact,
    grcov_uncovered_artifact,
    jsvm_artifact,
    jsvm_uncovered_artifact,
    grcov_uncovered_function_artifact,
    jsvm_uncovered_function_artifact,
    fake_hg_repo_with_contents,
):
    tmp_path = tmpdir.strpath

    hgrev = "314159265358"
    ZeroCov(fake_hg_repo_with_contents).generate(
        [
            grcov_artifact,
            grcov_uncovered_artifact,
            jsvm_artifact,
            jsvm_uncovered_artifact,
            grcov_uncovered_function_artifact,
            jsvm_uncovered_function_artifact,
        ],
        hgrev,
        out_dir=tmp_path,
    )

    with open(os.path.join(tmp_path, "zero_coverage_report.json"), "r") as f:
        zero_coverage_report = json.load(f)

    assert (
        "hg_revision" in zero_coverage_report
        and zero_coverage_report["hg_revision"] == hgrev
    )
    assert "files" in zero_coverage_report
    zero_coverage_functions = zero_coverage_report["files"]

    today = datetime.utcnow()
    today = pytz.utc.localize(today)
    today = today.strftime(ZeroCov.DATE_FORMAT)

    expected_zero_coverage_functions = [
        {
            "funcs": 1,
            "name": "mozglue/build/dummy.cpp",
            "uncovered": True,
            "size": 1,
            "commits": 2,
            "first_push_date": today,
            "last_push_date": today,
        },
        {
            "funcs": 2,
            "name": "toolkit/components/osfile/osfile.jsm",
            "uncovered": False,
            "size": 2,
            "commits": 2,
            "first_push_date": today,
            "last_push_date": today,
        },
        {
            "funcs": 1,
            "name": "js/src/jit/JIT.cpp",
            "uncovered": False,
            "size": 3,
            "commits": 2,
            "first_push_date": today,
            "last_push_date": today,
        },
        {
            "funcs": 1,
            "name": "toolkit/components/osfile/osfile-win.jsm",
            "uncovered": True,
            "size": 4,
            "commits": 2,
            "first_push_date": today,
            "last_push_date": today,
        },
    ]
    assert len(zero_coverage_functions) == len(expected_zero_coverage_functions)
    while len(expected_zero_coverage_functions):
        exp_item = expected_zero_coverage_functions.pop()
        found = False
        for found_item in zero_coverage_functions:
            if found_item["name"] == exp_item["name"]:
                found = True
                break
        assert found
        assert found_item["funcs"] == exp_item["funcs"]
        assert found_item["first_push_date"] == exp_item["first_push_date"]
        assert found_item["last_push_date"] == exp_item["last_push_date"]
        assert found_item["size"] == exp_item["size"]
        assert found_item["commits"] == exp_item["commits"]
        assert found_item["uncovered"] == exp_item["uncovered"]
Beispiel #8
0
    def go(self):
        if self.from_pulse:
            commit_sha = self.githubUtils.mercurial_to_git(self.revision)
            try:
                uploader.get_codecov(commit_sha)
                logger.warn('Build was already injested')
                return
            except requests.exceptions.HTTPError:
                pass

        with ThreadPoolExecutorResult(max_workers=2) as executor:
            # Thread 1 - Download coverage artifacts.
            executor.submit(self.artifactsHandler.download_all)

            # Thread 2 - Clone mozilla-central.
            executor.submit(self.clone_mozilla_central, self.revision)

        if self.from_pulse:
            self.githubUtils.update_geckodev_repo()

            logger.info('GitHub revision', revision=commit_sha)

            self.githubUtils.post_github_status(commit_sha)

            r = requests.get(
                'https://hg.mozilla.org/mozilla-central/json-rev/%s' %
                self.revision)
            r.raise_for_status()
            push_id = r.json()['pushid']

            output = grcov.report(self.artifactsHandler.get(),
                                  source_dir=self.repo_dir,
                                  service_number=push_id,
                                  commit_sha=commit_sha,
                                  token=secrets[secrets.COVERALLS_TOKEN])
            logger.info('Report generated successfully')

            logger.info('Upload changeset coverage data to Phabricator')
            phabricatorUploader = PhabricatorUploader(self.repo_dir,
                                                      self.revision)
            phabricatorUploader.upload(json.loads(output))

            with ThreadPoolExecutorResult(max_workers=2) as executor:
                executor.submit(uploader.coveralls, output)
                executor.submit(uploader.codecov, output, commit_sha)

            logger.info('Waiting for build to be ingested by Codecov...')
            # Wait until the build has been ingested by Codecov.
            if uploader.codecov_wait(commit_sha):
                logger.info('Build ingested by codecov.io')
                self.notifier.notify()
            else:
                logger.error('codecov.io took too much time to ingest data.')
        else:
            logger.info('Generating suite reports')
            os.makedirs(self.ccov_reports_dir, exist_ok=True)
            suite_reports.generate(self.suites, self.artifactsHandler,
                                   self.ccov_reports_dir, self.repo_dir)

            logger.info('Generating zero coverage reports')
            zc = ZeroCov(self.repo_dir)
            zc.generate(self.artifactsHandler.get(), self.revision,
                        self.github_revision)

            logger.info('Generating chunk mapping')
            chunk_mapping.generate(self.repo_dir, self.revision,
                                   self.artifactsHandler)

            # Index the task in the TaskCluster index at the given revision and as "latest".
            # Given that all tasks have the same rank, the latest task that finishes will
            # overwrite the "latest" entry.
            namespaces = [
                'project.releng.services.project.{}.code_coverage_bot.{}'.
                format(secrets[secrets.APP_CHANNEL], self.revision),
                'project.releng.services.project.{}.code_coverage_bot.latest'.
                format(secrets[secrets.APP_CHANNEL]),
            ]

            for namespace in namespaces:
                self.index_service.insertTask(
                    namespace, {
                        'taskId':
                        os.environ['TASK_ID'],
                        'rank':
                        0,
                        'data': {},
                        'expires':
                        (datetime.utcnow() +
                         timedelta(180)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
                    })

            os.chdir(self.ccov_reports_dir)
            self.githubUtils.update_codecoveragereports_repo()