def build_reports(self, only=None): """ Build all the possible covdir reports using current artifacts """ os.makedirs(self.reports_dir, exist_ok=True) reports = {} for ( (platform, suite), artifacts, ) in self.artifactsHandler.get_combinations().items(): if only is not None and (platform, suite) not in only: continue # Generate covdir report for that suite & platform logger.info( "Building covdir suite report", suite=suite, platform=platform, artifacts=len(artifacts), ) output = grcov.report(artifacts, source_dir=self.repo_dir, out_format="covdir") # Write output on FS path = os.path.join(self.reports_dir, f"{platform}.{suite}.json") with open(path, "wb") as f: f.write(output) reports[(platform, suite)] = path return reports
def generate(self, artifacts, hgrev, out_dir="."): report = grcov.report(artifacts, out_format="coveralls+", source_dir=self.repo_dir) report = json.loads(report.decode( "utf-8")) # Decoding is only necessary until Python 3.6. zero_coverage_files = set() zero_coverage_functions = {} for sf in report["source_files"]: name = sf["name"] # For C/C++ source files, we can consider a file as being uncovered # when all its source lines are uncovered. all_lines_uncovered = all(c is None or c == 0 for c in sf["coverage"]) # For JavaScript files, we can't do the same, as the top-level is always # executed, even if it just contains declarations. So, we need to check if # all its functions, except the top-level, are uncovered. all_functions_uncovered = True for f in sf["functions"]: f_name = f["name"] if f_name == "top-level": continue if not f["exec"]: if name in zero_coverage_functions: zero_coverage_functions[name].append(f["name"]) else: zero_coverage_functions[name] = [f["name"]] else: all_functions_uncovered = False if all_lines_uncovered or (len(sf["functions"]) > 1 and all_functions_uncovered): zero_coverage_files.add(name) os.makedirs(os.path.join(out_dir, "zero_coverage_functions"), exist_ok=True) filesinfo = self.get_fileinfo(zero_coverage_functions.keys()) zero_coverage_info = [] for fname, functions in zero_coverage_functions.items(): info = filesinfo[fname] info.update({ "name": fname, "funcs": len(functions), "uncovered": fname in zero_coverage_files, }) zero_coverage_info.append(info) zero_coverage_report = { "hg_revision": hgrev, "files": zero_coverage_info } with open(os.path.join(out_dir, "zero_coverage_report.json"), "w") as f: json.dump(zero_coverage_report, f)
def generate_suite_reports(self): for suite in self.suites: output = grcov.report(self.artifactsHandler.get(suite=suite), out_format='lcov') info_file = '%s.info' % suite with open(info_file, 'wb') as f: f.write(output) run_check([ 'genhtml', '-o', os.path.join(os.getcwd(), suite), '--show-details', '--highlight', '--ignore-errors', 'source', '--legend', os.path.join(os.getcwd(), info_file), '--prefix', self.repo_dir ], cwd=self.repo_dir) os.remove('%s.info' % suite) with tarfile.open('code-coverage-reports/%s.tar.xz' % suite, 'w:xz') as tar: tar.add(suite) shutil.rmtree(os.path.join(os.getcwd(), suite)) logger.info('Suite report generated', suite=suite)
def go_from_trigger_try(self): phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) with hgmo.HGMO(server_address=TRY_REPOSITORY) as hgmo_server: changesets = hgmo_server.get_automation_relevance_changesets( self.revision)['changesets'] if not any( phabricatorUploader.parse_revision_id(changeset['desc']) is not None for changeset in changesets): logger.info( 'None of the commits in the try push are linked to a Phabricator revision' ) return self.retrieve_source_and_artifacts() output = grcov.report( self.artifactsHandler.get(), source_dir=self.repo_dir, service_number='SERVICE_NUMBER', commit_sha='COMMIT_SHA', token='TOKEN', ) logger.info('Report generated successfully') logger.info('Upload changeset coverage data to Phabricator') phabricatorUploader.upload(json.loads(output), changesets)
def test_report_source_dir( fake_source_dir, grcov_artifact, grcov_existing_file_artifact ): output = grcov.report( [grcov_existing_file_artifact], source_dir=fake_source_dir, out_format="covdir" ) report = json.loads(output.decode("utf-8")) assert report == { "children": { "code_coverage_bot": { "children": { "cli.py": { "coverage": [42, 42], "coveragePercent": 100.0, "linesCovered": 2, "linesMissed": 0, "linesTotal": 2, "name": "cli.py", } }, "coveragePercent": 100.0, "linesCovered": 2, "linesMissed": 0, "linesTotal": 2, "name": "code_coverage_bot", } }, "coveragePercent": 100.0, "linesCovered": 2, "linesMissed": 0, "linesTotal": 2, "name": "", }
def test_report_options(grcov_artifact, jsvm_artifact): output = grcov.report([grcov_artifact, jsvm_artifact], out_format='coveralls', options=['--ignore-dir', 'toolkit/*']) report = json.loads(output.decode('utf-8')) assert len(report['source_files']) == 1 assert report['source_files'][0]['name'] == 'js/src/jit/BitSet.cpp'
def generate(suites, artifactsHandler, ccov_reports_dir, repo_dir): for suite in suites: output = grcov.report(artifactsHandler.get(suite=suite), out_format='lcov') info_file = os.path.join(ccov_reports_dir, '%s.info' % suite) with open(info_file, 'wb') as f: f.write(output) suite_dir = os.path.join(ccov_reports_dir, suite) run_check([ 'genhtml', '-o', suite_dir, '--show-details', '--highlight', '--ignore-errors', 'source', '--legend', info_file, '--prefix', repo_dir ], cwd=repo_dir) os.remove(info_file) with tarfile.open(os.path.join(ccov_reports_dir, '%s.tar.xz' % suite), 'w:xz') as tar: tar.add(suite_dir, arcname=suite) shutil.rmtree(suite_dir) logger.info('Suite report generated', suite=suite)
def test_report_source_dir(grcov_artifact, grcov_existing_file_artifact): output = grcov.report([grcov_existing_file_artifact], source_dir=os.getcwd(), out_format='coveralls') report = json.loads(output.decode('utf-8')) # When we pass the source directory to the report function, grcov ignores not-existing files. assert len(report['source_files']) == 1 assert report['source_files'][0]['name'] == 'code_coverage_bot/cli.py' # When we pass the source directory to grcov and the file exists, grcov can calculate its hash. assert report['source_files'][0]['source_digest'] == 'b53ea39de2095ba8dd0a6e4e6e52173d'
def test_report_source_dir(fake_source_dir, grcov_artifact, grcov_existing_file_artifact): output = grcov.report([grcov_existing_file_artifact], source_dir=fake_source_dir, out_format='coveralls') report = json.loads(output.decode('utf-8')) # When we pass the source directory to the report function, grcov ignores not-existing files. assert len(report['source_files']) == 1 assert report['source_files'][0]['name'] == 'code_coverage_bot/cli.py' # When we pass the source directory to grcov and the file exists, grcov can calculate its hash. assert report['source_files'][0]['source_digest'] == '6ddb4095eb719e2a9f0a3f95677d24e0'
def generate_covdir(self): """ Build the covdir report using current artifacts """ output = grcov.report( self.artifactsHandler.get(), source_dir=self.repo_dir, out_format="covdir" ) logger.info("Covdir report generated successfully") return json.loads(output)
def test_report_multiple_artifacts(grcov_artifact, jsvm_artifact): output = grcov.report([grcov_artifact, jsvm_artifact], out_format='coveralls') report = json.loads(output.decode('utf-8')) assert report['repo_token'] == 'unused' assert report['service_name'] == 'TaskCluster' assert report['service_job_number'] == '1' assert report['git']['branch'] == 'master' assert report['git']['head']['id'] == 'unused' assert report['service_number'] == '' assert len(report['source_files']) == 2 assert set(['toolkit/components/osfile/osfile.jsm', 'js/src/jit/BitSet.cpp']) == set([sf['name'] for sf in report['source_files']])
def generate_covdir(self): ''' Build the covdir report using current artifacts ''' output = grcov.report( self.artifactsHandler.get(), source_dir=self.repo_dir, out_format='covdir', ) logger.info('Covdir report generated successfully') return output
def test_report_source_dir(fake_source_dir, grcov_artifact, grcov_existing_file_artifact): output = grcov.report([grcov_existing_file_artifact], source_dir=fake_source_dir, out_format='coveralls') report = json.loads(output.decode('utf-8')) # When we pass the source directory to the report function, grcov ignores not-existing files. assert len(report['source_files']) == 1 assert report['source_files'][0]['name'] == 'code_coverage_bot/cli.py' # When we pass the source directory to grcov and the file exists, grcov can calculate its hash. assert report['source_files'][0][ 'source_digest'] == '6ddb4095eb719e2a9f0a3f95677d24e0'
def go_from_trigger_mozilla_central(self): commit_sha = self.githubUtils.mercurial_to_git(self.revision) try: uploader.get_codecov(commit_sha) logger.warn('Build was already injested') return except requests.exceptions.HTTPError: pass self.retrieve_source_and_artifacts() self.githubUtils.update_geckodev_repo() logger.info('GitHub revision', revision=commit_sha) self.githubUtils.post_github_status(commit_sha) r = requests.get('https://hg.mozilla.org/mozilla-central/json-rev/%s' % self.revision) r.raise_for_status() push_id = r.json()['pushid'] output = grcov.report(self.artifactsHandler.get(), source_dir=self.repo_dir, service_number=push_id, commit_sha=commit_sha, token=secrets[secrets.COVERALLS_TOKEN]) logger.info('Report generated successfully') report = json.loads(output) expected_extensions = ['.js', '.cpp'] for extension in expected_extensions: assert any( f['name'].endswith(extension) for f in report['source_files'] ), 'No {} file in the generated report'.format(extension) logger.info('Upload changeset coverage data to Phabricator') phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) phabricatorUploader.upload(report) with ThreadPoolExecutorResult(max_workers=2) as executor: executor.submit(uploader.coveralls, output) executor.submit(uploader.codecov, output, commit_sha) logger.info('Waiting for build to be ingested by Codecov...') # Wait until the build has been ingested by Codecov. if uploader.codecov_wait(commit_sha): logger.info('Build ingested by codecov.io') notifier = Notifier(self.repo_dir, self.revision, self.client_id, self.access_token) notifier.notify() else: logger.error('codecov.io took too much time to ingest data.')
def test_report_options(grcov_artifact, jsvm_artifact): output = grcov.report( [grcov_artifact, jsvm_artifact], out_format="covdir", options=["--ignore", "toolkit/*"], ) report = json.loads(output.decode("utf-8")) assert report == { "children": { "js": { "children": { "src": { "children": { "jit": { "children": { "BitSet.cpp": { "coverage": [42, 42], "coveragePercent": 100.0, "linesCovered": 2, "linesMissed": 0, "linesTotal": 2, "name": "BitSet.cpp", } }, "coveragePercent": 100.0, "linesCovered": 2, "linesMissed": 0, "linesTotal": 2, "name": "jit", } }, "coveragePercent": 100.0, "linesCovered": 2, "linesMissed": 0, "linesTotal": 2, "name": "src", } }, "coveragePercent": 100.0, "linesCovered": 2, "linesMissed": 0, "linesTotal": 2, "name": "js", } }, "coveragePercent": 100.0, "linesCovered": 2, "linesMissed": 0, "linesTotal": 2, "name": "", }
def test_report_grcov_artifact(grcov_artifact): output = grcov.report([grcov_artifact], out_format='coveralls') report = json.loads(output.decode('utf-8')) assert report['repo_token'] == 'unused' assert report['service_name'] == 'TaskCluster' assert report['service_job_number'] == '1' assert report['git']['branch'] == 'master' assert report['git']['head']['id'] == 'unused' assert report['service_number'] == '' assert len(report['source_files']) == 1 assert report['source_files'][0]['name'] == 'js/src/jit/BitSet.cpp' assert report['source_files'][0]['coverage'] == [42, 42] assert report['source_files'][0]['branches'] == [] assert 'source_digest' in report['source_files'][0] assert 'functions' not in report['source_files'][0]
def test_report_jsvm_artifact(jsvm_artifact): output = grcov.report([jsvm_artifact], out_format='coveralls') report = json.loads(output.decode('utf-8')) assert report['repo_token'] == 'unused' assert report['service_name'] == 'TaskCluster' assert report['service_job_number'] == '1' assert report['git']['branch'] == 'master' assert report['git']['head']['id'] == 'unused' assert report['service_number'] == '' assert len(report['source_files']) == 1 assert report['source_files'][0]['name'] == 'toolkit/components/osfile/osfile.jsm' assert report['source_files'][0]['coverage'] == [42, 42] assert report['source_files'][0]['branches'] == [] assert 'source_digest' in report['source_files'][0] assert 'functions' not in report['source_files'][0]
def test_report_grcov_artifact_coverallsplus(grcov_artifact): output = grcov.report([grcov_artifact], out_format="coveralls+") report = json.loads(output.decode("utf-8")) assert report["repo_token"] == "unused" assert report["git"]["branch"] == "master" assert report["service_number"] == "" assert len(report["source_files"]) == 1 assert report["source_files"][0]["name"] == "js/src/jit/BitSet.cpp" assert report["source_files"][0]["coverage"] == [42, 42] assert report["source_files"][0]["branches"] == [] assert "source_digest" in report["source_files"][0] assert len(report["source_files"][0]["functions"]) == 1 assert report["source_files"][0]["functions"][0]["exec"] assert report["source_files"][0]["functions"][0][ "name"] == "js::jit::BitSet::empty" assert report["source_files"][0]["functions"][0]["start"] == 1
def test_report_jsvm_artifact(jsvm_artifact): output = grcov.report([jsvm_artifact], out_format="covdir") report = json.loads(output.decode("utf-8")) assert report == { "children": { "toolkit": { "children": { "components": { "children": { "osfile": { "children": { "osfile.jsm": { "coverage": [42, 42], "coveragePercent": 100.0, "linesCovered": 2, "linesMissed": 0, "linesTotal": 2, "name": "osfile.jsm", } }, "coveragePercent": 100.0, "linesCovered": 2, "linesMissed": 0, "linesTotal": 2, "name": "osfile", } }, "coveragePercent": 100.0, "linesCovered": 2, "linesMissed": 0, "linesTotal": 2, "name": "components", } }, "coveragePercent": 100.0, "linesCovered": 2, "linesMissed": 0, "linesTotal": 2, "name": "toolkit", } }, "coveragePercent": 100.0, "linesCovered": 2, "linesMissed": 0, "linesTotal": 2, "name": "", }
def go_from_trigger_try(self): phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) with hgmo.HGMO(server_address=TRY_REPOSITORY) as hgmo_server: changesets = hgmo_server.get_automation_relevance_changesets(self.revision) if not any(phabricatorUploader.parse_revision_id(changeset['desc']) is not None for changeset in changesets): logger.info('None of the commits in the try push are linked to a Phabricator revision') return self.retrieve_source_and_artifacts() output = grcov.report( self.artifactsHandler.get(), source_dir=self.repo_dir, service_number='SERVICE_NUMBER', commit_sha='COMMIT_SHA', token='TOKEN', ) logger.info('Report generated successfully') logger.info('Upload changeset coverage data to Phabricator') phabricatorUploader.upload(json.loads(output), changesets)
def test_report_multiple_artifacts(grcov_artifact, jsvm_artifact): output = grcov.report([grcov_artifact, jsvm_artifact], out_format="covdir") report = json.loads(output.decode("utf-8")) assert report["linesTotal"] == 4 assert report["linesCovered"] == 4 assert report["coveragePercent"] == 100.0 assert covdir_get(report, "toolkit/components/osfile/osfile.jsm") == { "coverage": [42, 42], "coveragePercent": 100.0, "linesCovered": 2, "linesMissed": 0, "linesTotal": 2, "name": "osfile.jsm", } assert covdir_get(report, "js/src/jit/BitSet.cpp") == { "coverage": [42, 42], "coveragePercent": 100.0, "linesCovered": 2, "linesMissed": 0, "linesTotal": 2, "name": "BitSet.cpp", }
def test_report_service_number(grcov_artifact): output = grcov.report([grcov_artifact], service_number='test', out_format='coveralls') report = json.loads(output.decode('utf-8')) assert report['service_number'] == 'test'
def test_report_token(grcov_artifact): output = grcov.report([grcov_artifact], token='test', out_format='coveralls') report = json.loads(output.decode('utf-8')) assert report['repo_token'] == 'test'
def test_report_invalid_output_format(grcov_artifact): with pytest.raises(AssertionError, match="Unsupported output format"): grcov.report([grcov_artifact], out_format="UNSUPPORTED") with pytest.raises(AssertionError, match="Unsupported output format"): grcov.report([grcov_artifact], out_format="coveralls")
def generate(self, artifacts, hgrev, gitrev, out_dir='.'): report = grcov.report(artifacts, out_format='coveralls+', source_dir=self.repo_dir) report = json.loads(report.decode( 'utf-8')) # Decoding is only necessary until Python 3.6. zero_coverage_files = set() zero_coverage_functions = {} for sf in report['source_files']: name = sf['name'] # For C/C++ source files, we can consider a file as being uncovered # when all its source lines are uncovered. all_lines_uncovered = all(c is None or c == 0 for c in sf['coverage']) # For JavaScript files, we can't do the same, as the top-level is always # executed, even if it just contains declarations. So, we need to check if # all its functions, except the top-level, are uncovered. all_functions_uncovered = True for f in sf['functions']: f_name = f['name'] if f_name == 'top-level': continue if not f['exec']: if name in zero_coverage_functions: zero_coverage_functions[name].append(f['name']) else: zero_coverage_functions[name] = [f['name']] else: all_functions_uncovered = False if all_lines_uncovered or (len(sf['functions']) > 1 and all_functions_uncovered): zero_coverage_files.add(name) os.makedirs(os.path.join(out_dir, 'zero_coverage_functions'), exist_ok=True) filesinfo = self.get_fileinfo(zero_coverage_functions.keys()) zero_coverage_info = [] for fname, functions in zero_coverage_functions.items(): info = filesinfo[fname] info.update({ 'name': fname, 'funcs': len(functions), 'uncovered': fname in zero_coverage_files }) zero_coverage_info.append(info) zero_coverage_report = { 'github_revision': gitrev, 'hg_revision': hgrev, 'files': zero_coverage_info } with open(os.path.join(out_dir, 'zero_coverage_report.json'), 'w') as f: json.dump(zero_coverage_report, f)
def test_report_invalid_output_format(grcov_artifact): with pytest.raises(click.exceptions.ClickException, message='`grcov` failed with code: 1.'): grcov.report([grcov_artifact], out_format='UNSUPPORTED')
def test_report_commit_sha(grcov_artifact): output = grcov.report([grcov_artifact], commit_sha='test', out_format='coveralls') report = json.loads(output.decode('utf-8')) assert report['git']['head']['id'] == 'test'
def go_from_trigger_mozilla_central(self): commit_sha = self.githubUtils.mercurial_to_git(self.revision) try: uploader.get_codecov(commit_sha) logger.warn('Build was already injested') return except requests.exceptions.HTTPError: pass self.retrieve_source_and_artifacts() self.githubUtils.update_geckodev_repo() logger.info('GitHub revision', revision=commit_sha) self.githubUtils.post_github_status(commit_sha) r = requests.get('https://hg.mozilla.org/mozilla-central/json-rev/%s' % self.revision) r.raise_for_status() push_id = r.json()['pushid'] # Check that all JavaScript files present in the coverage artifacts actually exist. # If they don't, there might be a bug in the LCOV rewriter. for artifact in self.artifactsHandler.get(): if 'jsvm' not in artifact: continue with zipfile.ZipFile(artifact, 'r') as zf: for file_name in zf.namelist(): with zf.open(file_name, 'r') as fl: source_files = [line[3:].decode('utf-8').rstrip() for line in fl if line.startswith(b'SF:')] missing_files = [f for f in source_files if not os.path.exists(os.path.join(self.repo_dir, f))] if len(missing_files) != 0: logger.warn(f'{missing_files} are present in coverage reports, but missing from the repository') output = grcov.report( self.artifactsHandler.get(), source_dir=self.repo_dir, service_number=push_id, commit_sha=commit_sha, token=secrets[secrets.COVERALLS_TOKEN] ) logger.info('Report generated successfully') report = json.loads(output) expected_extensions = ['.js', '.cpp'] for extension in expected_extensions: assert any(f['name'].endswith(extension) for f in report['source_files']), 'No {} file in the generated report'.format(extension) logger.info('Upload changeset coverage data to Phabricator') phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) phabricatorUploader.upload(report) with ThreadPoolExecutorResult(max_workers=2) as executor: executor.submit(uploader.coveralls, output) executor.submit(uploader.codecov, output, commit_sha) logger.info('Waiting for build to be ingested by Codecov...') # Wait until the build has been ingested by Codecov. if uploader.codecov_wait(commit_sha): logger.info('Build ingested by codecov.io') notifier = Notifier(self.repo_dir, self.revision, self.client_id, self.access_token) notifier.notify() else: logger.error('codecov.io took too much time to ingest data.')
def go_from_trigger_mozilla_central(self): commit_sha = self.githubUtils.mercurial_to_git(self.revision) try: uploader.get_codecov(commit_sha) logger.warn('Build was already injested') return except requests.exceptions.HTTPError: pass self.retrieve_source_and_artifacts() self.githubUtils.update_geckodev_repo() logger.info('GitHub revision', revision=commit_sha) self.githubUtils.post_github_status(commit_sha) r = requests.get('https://hg.mozilla.org/mozilla-central/json-rev/%s' % self.revision) r.raise_for_status() push_id = r.json()['pushid'] # Check that all JavaScript files present in the coverage artifacts actually exist. # If they don't, there might be a bug in the LCOV rewriter. for artifact in self.artifactsHandler.get(): if 'jsvm' not in artifact: continue with zipfile.ZipFile(artifact, 'r') as zf: for file_name in zf.namelist(): with zf.open(file_name, 'r') as fl: source_files = [ line[3:].decode('utf-8').rstrip() for line in fl if line.startswith(b'SF:') ] missing_files = [ f for f in source_files if not os.path.exists(os.path.join(self.repo_dir, f)) ] if len(missing_files) != 0: logger.warn( f'{missing_files} are present in coverage reports, but missing from the repository' ) output = grcov.report(self.artifactsHandler.get(), source_dir=self.repo_dir, service_number=push_id, commit_sha=commit_sha, token=secrets[secrets.COVERALLS_TOKEN]) logger.info('Report generated successfully') report = json.loads(output) expected_extensions = ['.js', '.cpp'] for extension in expected_extensions: assert any( f['name'].endswith(extension) for f in report['source_files'] ), 'No {} file in the generated report'.format(extension) logger.info('Upload changeset coverage data to Phabricator') phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) phabricatorUploader.upload(report) with ThreadPoolExecutorResult(max_workers=2) as executor: executor.submit(uploader.coveralls, output) executor.submit(uploader.codecov, output, commit_sha) logger.info('Waiting for build to be ingested by Codecov...') # Wait until the build has been ingested by Codecov. if uploader.codecov_wait(commit_sha): logger.info('Build ingested by codecov.io') notifier = Notifier(self.repo_dir, self.revision, self.client_id, self.access_token) notifier.notify() else: logger.error('codecov.io took too much time to ingest data.')
def go(self): if self.from_pulse: commit_sha = self.githubUtils.mercurial_to_git(self.revision) try: uploader.get_codecov(commit_sha) logger.warn('Build was already injested') return except requests.exceptions.HTTPError: pass with ThreadPoolExecutorResult(max_workers=2) as executor: # Thread 1 - Download coverage artifacts. executor.submit(self.artifactsHandler.download_all) # Thread 2 - Clone mozilla-central. executor.submit(self.clone_mozilla_central, self.revision) if self.from_pulse: self.githubUtils.update_geckodev_repo() logger.info('GitHub revision', revision=commit_sha) self.githubUtils.post_github_status(commit_sha) r = requests.get( 'https://hg.mozilla.org/mozilla-central/json-rev/%s' % self.revision) r.raise_for_status() push_id = r.json()['pushid'] output = grcov.report(self.artifactsHandler.get(), source_dir=self.repo_dir, service_number=push_id, commit_sha=commit_sha, token=secrets[secrets.COVERALLS_TOKEN]) logger.info('Report generated successfully') logger.info('Upload changeset coverage data to Phabricator') phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) phabricatorUploader.upload(json.loads(output)) with ThreadPoolExecutorResult(max_workers=2) as executor: executor.submit(uploader.coveralls, output) executor.submit(uploader.codecov, output, commit_sha) logger.info('Waiting for build to be ingested by Codecov...') # Wait until the build has been ingested by Codecov. if uploader.codecov_wait(commit_sha): logger.info('Build ingested by codecov.io') self.notifier.notify() else: logger.error('codecov.io took too much time to ingest data.') else: logger.info('Generating suite reports') os.makedirs(self.ccov_reports_dir, exist_ok=True) suite_reports.generate(self.suites, self.artifactsHandler, self.ccov_reports_dir, self.repo_dir) logger.info('Generating zero coverage reports') zc = ZeroCov(self.repo_dir) zc.generate(self.artifactsHandler.get(), self.revision, self.github_revision) logger.info('Generating chunk mapping') chunk_mapping.generate(self.repo_dir, self.revision, self.artifactsHandler) # Index the task in the TaskCluster index at the given revision and as "latest". # Given that all tasks have the same rank, the latest task that finishes will # overwrite the "latest" entry. namespaces = [ 'project.releng.services.project.{}.code_coverage_bot.{}'. format(secrets[secrets.APP_CHANNEL], self.revision), 'project.releng.services.project.{}.code_coverage_bot.latest'. format(secrets[secrets.APP_CHANNEL]), ] for namespace in namespaces: self.index_service.insertTask( namespace, { 'taskId': os.environ['TASK_ID'], 'rank': 0, 'data': {}, 'expires': (datetime.utcnow() + timedelta(180)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'), }) os.chdir(self.ccov_reports_dir) self.githubUtils.update_codecoveragereports_repo()