def generate_suite_report(self, suite): output = grcov.report(self.artifactsHandler.get(suite=suite), out_format='lcov') info_file = '%s.info' % suite with open(info_file, 'wb') as f: f.write(output) run_check([ 'genhtml', '-o', os.path.join(os.getcwd(), suite), '--show-details', '--highlight', '--ignore-errors', 'source', '--legend', os.path.join(os.getcwd(), info_file), '--prefix', self.repo_dir ], cwd=self.repo_dir) os.remove('%s.info' % suite) with tarfile.open('code-coverage-reports/%s.tar.xz' % suite, 'w:xz') as tar: tar.add(suite) shutil.rmtree(os.path.join(os.getcwd(), suite)) logger.info('Suite report generated', suite=suite)
def test_report_options(grcov_artifact, jsvm_artifact): output = grcov.report([grcov_artifact, jsvm_artifact], out_format='coveralls', options=['--ignore-dir', 'toolkit']) report = json.loads(output.decode('utf-8')) assert len(report['source_files']) == 1 assert report['source_files'][0]['name'] == 'js/src/jit/BitSet.cpp'
def test_report_source_dir(grcov_artifact, grcov_existing_file_artifact): output = grcov.report([grcov_existing_file_artifact], source_dir=os.getcwd(), out_format='coveralls') report = json.loads(output.decode('utf-8')) # When we pass the source directory to the report function, grcov ignores not-existing files. assert len(report['source_files']) == 1 assert report['source_files'][0]['name'] == 'shipit_code_coverage/cli.py' # When we pass the source directory to grcov and the file exists, grcov can calculate its hash. assert report['source_files'][0]['source_digest'] == '8972cbe76ff3dffe12cc04e1e0ad10a7'
def test_report_source_dir(grcov_existing_file_artifact): output = grcov.report([grcov_existing_file_artifact], source_dir=os.getcwd(), out_format='coveralls') report = json.loads(output.decode('utf-8')) assert len(report['source_files']) == 1 assert report['source_files'][0]['name'] == 'shipit_code_coverage/cli.py' # When we pass the source directory to grcov and the file exists, grcov can calculate its hash. assert report['source_files'][0][ 'source_digest'] == '1a1c4cac2d925795713415a7a00cec40'
def test_report_multiple_artifacts(grcov_artifact, jsvm_artifact): output = grcov.report([grcov_artifact, jsvm_artifact], out_format='coveralls') report = json.loads(output.decode('utf-8')) assert report['repo_token'] == 'unused' assert report['service_name'] == 'TaskCluster' assert report['service_job_number'] == '1' assert report['git']['branch'] == 'master' assert report['git']['head']['id'] == 'unused' assert report['service_number'] == '' assert len(report['source_files']) == 2 assert set(['toolkit/components/osfile/osfile.jsm', 'js/src/jit/BitSet.cpp']) == set([sf['name'] for sf in report['source_files']])
def zero_coverage(artifacts, out_dir='code-coverage-reports'): report = grcov.report(artifacts, out_format='coveralls+') report = json.loads( report.decode('utf-8')) # Decoding is only necessary until Python 3.6. zero_coverage_files = [] zero_coverage_functions = {} for sf in report['source_files']: name = sf['name'] # For C/C++ source files, we can consider a file as being uncovered # when all its source lines are uncovered. all_lines_uncovered = all(c is None or c == 0 for c in sf['coverage']) # For JavaScript files, we can't do the same, as the top-level is always # executed, even if it just contains declarations. So, we need to check if # all its functions, except the top-level, are uncovered. all_functions_uncovered = True for f in sf['functions']: f_name = f['name'] if f_name == 'top-level': continue if not f['exec']: if name in zero_coverage_functions: zero_coverage_functions[name].append(f['name']) else: zero_coverage_functions[name] = [f['name']] else: all_functions_uncovered = False if all_lines_uncovered or (len(sf['functions']) > 1 and all_functions_uncovered): zero_coverage_files.append(name) with open(os.path.join(out_dir, 'zero_coverage_files.json'), 'w') as f: json.dump(zero_coverage_files, f) mkdir(os.path.join(out_dir, 'zero_coverage_functions')) zero_coverage_function_counts = [] for fname, functions in zero_coverage_functions.items(): zero_coverage_function_counts.append({ 'name': fname, 'funcs': len(functions), }) with open( os.path.join( out_dir, 'zero_coverage_functions/%s.json' % fname.replace('/', '_')), 'w') as f: json.dump(functions, f) with open(os.path.join(out_dir, 'zero_coverage_functions.json'), 'w') as f: json.dump(zero_coverage_function_counts, f)
def go(self): with ThreadPoolExecutorResult(max_workers=2) as executor: # Thread 1 - Download coverage artifacts. executor.submit(self.artifactsHandler.download_all) # Thread 2 - Clone mozilla-central. executor.submit(self.clone_mozilla_central, self.revision) if self.from_pulse: self.githubUtils.update_geckodev_repo() commit_sha = self.githubUtils.get_commit(self.revision) logger.info('GitHub revision', revision=commit_sha) self.githubUtils.post_github_status(commit_sha) r = requests.get( 'https://hg.mozilla.org/mozilla-central/json-rev/%s' % self.revision) r.raise_for_status() push_id = r.json()['pushid'] output = grcov.report(self.artifactsHandler.get(), source_dir=self.repo_dir, service_number=push_id, commit_sha=commit_sha, token=secrets[secrets.COVERALLS_TOKEN]) logger.info('Report generated successfully') with ThreadPoolExecutorResult(max_workers=2) as executor: executor.submit(uploader.coveralls, output) executor.submit(uploader.codecov, output, commit_sha) logger.info('Waiting for build to be ingested by Codecov...') # Wait until the build has been ingested by Codecov. if uploader.codecov_wait(commit_sha): logger.info('Build ingested by codecov.io') self.notifier.notify() else: logger.info('codecov.io took too much time to ingest data.') else: mkdir('code-coverage-reports') # XXX: Disabled as it is unused for now. # self.generate_suite_reports() report_generators.zero_coverage(self.artifactsHandler.get()) self.generate_chunk_mapping() os.chdir('code-coverage-reports') self.githubUtils.update_codecoveragereports_repo()
def test_report_grcov_artifact(grcov_artifact): output = grcov.report([grcov_artifact], out_format='coveralls') report = json.loads(output.decode('utf-8')) assert report['repo_token'] == 'unused' assert report['service_name'] == 'TaskCluster' assert report['service_job_number'] == '1' assert report['git']['branch'] == 'master' assert report['git']['head']['id'] == 'unused' assert report['service_number'] == '' assert len(report['source_files']) == 1 assert report['source_files'][0]['name'] == 'js/src/jit/BitSet.cpp' assert report['source_files'][0]['coverage'] == [42, 42] assert report['source_files'][0]['branches'] == [] assert 'source_digest' in report['source_files'][0] assert 'functions' not in report['source_files'][0]
def test_report_jsvm_artifact(jsvm_artifact): output = grcov.report([jsvm_artifact], out_format='coveralls') report = json.loads(output.decode('utf-8')) assert report['repo_token'] == 'unused' assert report['service_name'] == 'TaskCluster' assert report['service_job_number'] == '1' assert report['git']['branch'] == 'master' assert report['git']['head']['id'] == 'unused' assert report['service_number'] == '' assert len(report['source_files']) == 1 assert report['source_files'][0]['name'] == 'toolkit/components/osfile/osfile.jsm' assert report['source_files'][0]['coverage'] == [42, 42] assert report['source_files'][0]['branches'] == [] assert 'source_digest' in report['source_files'][0] assert 'functions' not in report['source_files'][0]
def test_report_service_number(grcov_artifact): output = grcov.report([grcov_artifact], service_number='test', out_format='coveralls') report = json.loads(output.decode('utf-8')) assert report['service_number'] == 'test'
def test_report_invalid_output_format(grcov_artifact): with pytest.raises(click.exceptions.ClickException, message='`grcov` failed with code: 1.'): grcov.report([grcov_artifact], out_format='UNSUPPORTED')
def test_report_token(grcov_artifact): output = grcov.report([grcov_artifact], token='test', out_format='coveralls') report = json.loads(output.decode('utf-8')) assert report['repo_token'] == 'test'
def test_report_commit_sha(grcov_artifact): output = grcov.report([grcov_artifact], commit_sha='test', out_format='coveralls') report = json.loads(output.decode('utf-8')) assert report['git']['head']['id'] == 'test'
def go(self): with ThreadPoolExecutorResult(max_workers=2) as executor: # Thread 1 - Download coverage artifacts. executor.submit(self.artifactsHandler.download_all) # Thread 2 - Clone mozilla-central. executor.submit(self.clone_mozilla_central, self.revision) if self.from_pulse: self.githubUtils.update_geckodev_repo() commit_sha = self.githubUtils.get_commit(self.revision) logger.info('GitHub revision', revision=commit_sha) self.githubUtils.post_github_status(commit_sha) r = requests.get('https://hg.mozilla.org/mozilla-central/json-rev/%s' % self.revision) r.raise_for_status() push_id = r.json()['pushid'] output = grcov.report( self.artifactsHandler.get(), source_dir=self.repo_dir, service_number=push_id, commit_sha=commit_sha, token=secrets[secrets.COVERALLS_TOKEN] ) logger.info('Report generated successfully') with ThreadPoolExecutorResult(max_workers=2) as executor: executor.submit(uploader.coveralls, output) executor.submit(uploader.codecov, output, commit_sha) logger.info('Waiting for build to be ingested by Codecov...') # Wait until the build has been ingested by Codecov. if uploader.codecov_wait(commit_sha): logger.info('Build ingested by codecov.io') self.notifier.notify() else: logger.error('codecov.io took too much time to ingest data.') else: os.makedirs('code-coverage-reports', exist_ok=True) self.generate_suite_reports() report_generators.zero_coverage(self.artifactsHandler.get()) self.generate_chunk_mapping() # Index the task in the TaskCluster index. self.index_service.insertTask( 'project.releng.services.project.{}.shipit_code_coverage.{}'.format(secrets[secrets.APP_CHANNEL], self.revision), { 'taskId': os.environ['TASK_ID'], 'rank': 0, 'data': {}, 'expires': (datetime.utcnow() + timedelta(180)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'), } ) os.chdir('code-coverage-reports') self.githubUtils.update_codecoveragereports_repo()