Ejemplo n.º 1
0
def test_get_coverage_artifacts():
    utils.mkdir('ccov-artifacts')

    for f in FILES:
        open(f, 'w')

    a = ArtifactsHandler([], [])
    assert set(a.get()) == set(FILES)
    assert set(a.get(suite='mochitest')) == set([
        'ccov-artifacts/windows_mochitest-1_code-coverage-jsvm.info',
        'ccov-artifacts/linux_mochitest-2_code-coverage-grcov.zip'
    ])
    assert set(a.get(chunk='xpcshell-7')) == set([
        'ccov-artifacts/windows_xpcshell-7_code-coverage-jsvm.info',
        'ccov-artifacts/linux_xpcshell-7_code-coverage-grcov.zip'
    ])
    assert set(a.get(chunk='cppunit')) == set(
        ['ccov-artifacts/windows_cppunit_code-coverage-grcov.zip'])
    assert set(a.get(platform='windows')) == set([
        'ccov-artifacts/windows_mochitest-1_code-coverage-jsvm.info',
        'ccov-artifacts/windows_xpcshell-7_code-coverage-jsvm.info',
        'ccov-artifacts/windows_cppunit_code-coverage-grcov.zip',
    ])
    assert set(a.get(platform='linux', chunk='xpcshell-7')) == set(
        ['ccov-artifacts/linux_xpcshell-7_code-coverage-grcov.zip'])

    try:
        a.get(chunk='xpcshell-7', suite='mochitest')
        assert False, 'An exception should have been thrown'
    except Exception:
        pass

    for f in FILES:
        os.remove(f)
Ejemplo n.º 2
0
    def generate_zero_coverage_report(self):
        report = self.generate_info(self.revision, out_format='coveralls+')
        report = json.loads(report.decode(
            'utf-8'))  # Decoding is only necessary until Python 3.6.

        zero_coverage_files = []
        zero_coverage_functions = {}
        for sf in report['source_files']:
            name = sf['name']

            # For C/C++ source files, we can consider a file as being uncovered
            # when all its source lines are uncovered.
            all_lines_uncovered = all(c is None or c == 0
                                      for c in sf['coverage'])
            # For JavaScript files, we can't do the same, as the top-level is always
            # executed, even if it just contains declarations. So, we need to check if
            # all its functions, except the top-level, are uncovered.
            all_functions_uncovered = True
            for f in sf['functions']:
                f_name = f['name']
                if f_name == 'top-level':
                    continue

                if not f['exec']:
                    if name in zero_coverage_functions:
                        zero_coverage_functions[name].append(f['name'])
                    else:
                        zero_coverage_functions[name] = [f['name']]
                else:
                    all_functions_uncovered = False

            if all_lines_uncovered or (len(sf['functions']) > 1
                                       and all_functions_uncovered):
                zero_coverage_files.append(name)

        with open('code-coverage-reports/zero_coverage_files.json', 'w') as f:
            json.dump(zero_coverage_files, f)

        mkdir('code-coverage-reports/zero_coverage_functions')

        zero_coverage_function_counts = []
        for fname, functions in zero_coverage_functions.items():
            zero_coverage_function_counts.append({
                'name': fname,
                'funcs': len(functions),
            })
            with open(
                    'code-coverage-reports/zero_coverage_functions/%s.json' %
                    fname.replace('/', '_'), 'w') as f:
                json.dump(functions, f)

        with open('code-coverage-reports/zero_coverage_functions.json',
                  'w') as f:
            json.dump(zero_coverage_function_counts, f)
Ejemplo n.º 3
0
    def go(self):
        with ThreadPoolExecutorResult(max_workers=2) as executor:
            # Thread 1 - Download coverage artifacts.
            executor.submit(self.artifactsHandler.download_all)

            # Thread 2 - Clone mozilla-central.
            executor.submit(self.clone_mozilla_central, self.revision)

        if self.from_pulse:
            self.githubUtils.update_geckodev_repo()

            commit_sha = self.githubUtils.get_commit(self.revision)
            logger.info('GitHub revision', revision=commit_sha)

            self.githubUtils.post_github_status(commit_sha)

            r = requests.get(
                'https://hg.mozilla.org/mozilla-central/json-rev/%s' %
                self.revision)
            r.raise_for_status()
            push_id = r.json()['pushid']

            output = grcov.report(self.artifactsHandler.get(),
                                  source_dir=self.repo_dir,
                                  service_number=push_id,
                                  commit_sha=commit_sha,
                                  token=secrets[secrets.COVERALLS_TOKEN])
            logger.info('Report generated successfully')

            with ThreadPoolExecutorResult(max_workers=2) as executor:
                executor.submit(uploader.coveralls, output)
                executor.submit(uploader.codecov, output, commit_sha)

            logger.info('Waiting for build to be ingested by Codecov...')
            # Wait until the build has been ingested by Codecov.
            if uploader.codecov_wait(commit_sha):
                logger.info('Build ingested by codecov.io')
                self.notifier.notify()
            else:
                logger.info('codecov.io took too much time to ingest data.')
        else:
            mkdir('code-coverage-reports')

            # XXX: Disabled as it is unused for now.
            # self.generate_suite_reports()

            report_generators.zero_coverage(self.artifactsHandler.get())

            self.generate_chunk_mapping()

            os.chdir('code-coverage-reports')
            self.githubUtils.update_codecoveragereports_repo()
Ejemplo n.º 4
0
    def download_all(self):
        mkdir('ccov-artifacts')

        # The test tasks for the Linux and Windows builds are in the same group,
        # but the following code is generic and supports build tasks split in
        # separate groups.
        groups = set([
            taskcluster.get_task_details(build_task_id)['taskGroupId']
            for build_task_id in self.task_ids.values()
        ])
        test_tasks = [
            task for group in groups
            for task in taskcluster.get_tasks_in_group(group)
            if taskcluster.is_coverage_task(task)
        ]

        # Choose best tasks to download (e.g. 'completed' is better than 'failed')
        download_tasks = {}
        for test_task in test_tasks:
            status = test_task['status']['state']
            assert status in ALL_STATUSES

            chunk_name = taskcluster.get_chunk(
                test_task['task']['metadata']['name'])
            platform_name = taskcluster.get_platform(
                test_task['task']['metadata']['name'])
            # Ignore awsy and talos as they aren't actually suites of tests.
            if any(to_ignore in chunk_name
                   for to_ignore in self.suites_to_ignore):
                continue

            if (chunk_name, platform_name) not in download_tasks:
                # If the chunk hasn't been downloaded before, this is obviously the best task
                # to download it from.
                download_tasks[(chunk_name, platform_name)] = test_task
            else:
                # Otherwise, compare the status of this task with the previously selected task.
                prev_task = download_tasks[(chunk_name, platform_name)]

                if STATUS_VALUE[status] > STATUS_VALUE[prev_task['status']
                                                       ['state']]:
                    download_tasks[(chunk_name, platform_name)] = test_task

        with ThreadPoolExecutorResult() as executor:
            for test_task in test_tasks:
                executor.submit(self.download, test_task)

        logger.info('Code coverage artifacts downloaded')
Ejemplo n.º 5
0
def test_get_chunks():
    utils.mkdir('ccov-artifacts')

    for f in FILES:
        open(f, 'w')

    a = ArtifactsHandler([], [])
    assert set(a.get_chunks()) == set([
        'mochitest-1',
        'mochitest-2',
        'xpcshell-3',
        'xpcshell-7',
        'cppunit',
        'firefox-ui-functional-remote',
    ])

    for f in FILES:
        os.remove(f)
Ejemplo n.º 6
0
    def go(self):
        with ThreadPoolExecutorResult(max_workers=2) as executor:
            # Thread 1 - Download coverage artifacts.
            executor.submit(self.artifactsHandler.download_all)

            # Thread 2 - Clone mozilla-central.
            executor.submit(self.clone_mozilla_central, self.revision)

        if self.from_pulse:
            self.githubUtils.update_geckodev_repo()

            commit_sha = self.githubUtils.get_commit(self.revision)
            logger.info('GitHub revision', revision=commit_sha)

            self.githubUtils.post_github_status(commit_sha)

            output = self.generate_info(commit_sha)
            logger.info('Report generated successfully')

            with ThreadPoolExecutorResult(max_workers=2) as executor:
                executor.submit(uploader.coveralls, output)
                executor.submit(uploader.codecov, output, commit_sha)

            logger.info('Waiting for build to be ingested by Codecov...')
            # Wait until the build has been ingested by Codecov.
            if uploader.codecov_wait(commit_sha):
                logger.info('Build ingested by codecov.io')
                self.notifier.notify()
            else:
                logger.info('codecov.io took too much time to ingest data.')
        else:
            mkdir('code-coverage-reports')

            self.generate_per_suite_reports()

            self.generate_zero_coverage_report()

            self.generate_chunk_mapping()

            os.chdir('code-coverage-reports')
            self.githubUtils.update_codecoveragereports_repo()