def go(self): with ThreadPoolExecutorResult(max_workers=2) as executor: # Thread 1 - Download coverage artifacts. executor.submit(self.artifactsHandler.download_all) # Thread 2 - Clone mozilla-central. executor.submit(self.clone_mozilla_central, self.revision) if self.from_pulse: self.githubUtils.update_geckodev_repo() commit_sha = self.githubUtils.get_commit(self.revision) logger.info('GitHub revision', revision=commit_sha) self.githubUtils.post_github_status(commit_sha) r = requests.get( 'https://hg.mozilla.org/mozilla-central/json-rev/%s' % self.revision) r.raise_for_status() push_id = r.json()['pushid'] output = grcov.report(self.artifactsHandler.get(), source_dir=self.repo_dir, service_number=push_id, commit_sha=commit_sha, token=secrets[secrets.COVERALLS_TOKEN]) logger.info('Report generated successfully') with ThreadPoolExecutorResult(max_workers=2) as executor: executor.submit(uploader.coveralls, output) executor.submit(uploader.codecov, output, commit_sha) logger.info('Waiting for build to be ingested by Codecov...') # Wait until the build has been ingested by Codecov. if uploader.codecov_wait(commit_sha): logger.info('Build ingested by codecov.io') self.notifier.notify() else: logger.info('codecov.io took too much time to ingest data.') else: mkdir('code-coverage-reports') # XXX: Disabled as it is unused for now. # self.generate_suite_reports() report_generators.zero_coverage(self.artifactsHandler.get()) self.generate_chunk_mapping() os.chdir('code-coverage-reports') self.githubUtils.update_codecoveragereports_repo()
def test_zero_coverage(tmpdir, grcov_artifact, grcov_uncovered_artifact, jsvm_artifact, jsvm_uncovered_artifact, grcov_uncovered_function_artifact, jsvm_uncovered_function_artifact): tmp_path = tmpdir.strpath report_generators.zero_coverage([ grcov_artifact, grcov_uncovered_artifact, jsvm_artifact, jsvm_uncovered_artifact, grcov_uncovered_function_artifact, jsvm_uncovered_function_artifact ], out_dir=tmp_path) with open( os.path.join( tmp_path, 'zero_coverage_functions/mozglue_build_dummy.cpp.json'), 'r') as f: assert set(json.load(f)) == set(['main']) with open( os.path.join(tmp_path, 'zero_coverage_functions/js_src_jit_JIT.cpp.json'), 'r') as f: assert set(json.load(f)) == set(['anUncoveredFunction']) with open( os.path.join( tmp_path, 'zero_coverage_functions/toolkit_components_osfile_osfile.jsm.json' ), 'r') as f: assert set(json.load(f)) == set(['read', 'write']) with open(os.path.join(tmp_path, 'zero_coverage_report.json'), 'r') as f: zero_coverage_functions = json.load(f) expected_zero_coverage_functions = [ { 'funcs': 1, 'name': 'mozglue/build/dummy.cpp', 'uncovered': True }, { 'funcs': 2, 'name': 'toolkit/components/osfile/osfile.jsm', 'uncovered': False }, { 'funcs': 1, 'name': 'js/src/jit/JIT.cpp', 'uncovered': False }, { 'funcs': 1, 'name': 'toolkit/components/osfile/osfile-win.jsm', 'uncovered': True }, ] assert len(zero_coverage_functions) == len( expected_zero_coverage_functions) while len(expected_zero_coverage_functions): exp_item = expected_zero_coverage_functions.pop() found = False for found_item in zero_coverage_functions: if found_item['name'] == exp_item['name']: found = True break assert found assert found_item['funcs'] == exp_item['funcs']
def go(self): with ThreadPoolExecutorResult(max_workers=2) as executor: # Thread 1 - Download coverage artifacts. executor.submit(self.artifactsHandler.download_all) # Thread 2 - Clone mozilla-central. executor.submit(self.clone_mozilla_central, self.revision) if self.from_pulse: self.githubUtils.update_geckodev_repo() commit_sha = self.githubUtils.get_commit(self.revision) logger.info('GitHub revision', revision=commit_sha) self.githubUtils.post_github_status(commit_sha) r = requests.get('https://hg.mozilla.org/mozilla-central/json-rev/%s' % self.revision) r.raise_for_status() push_id = r.json()['pushid'] output = grcov.report( self.artifactsHandler.get(), source_dir=self.repo_dir, service_number=push_id, commit_sha=commit_sha, token=secrets[secrets.COVERALLS_TOKEN] ) logger.info('Report generated successfully') with ThreadPoolExecutorResult(max_workers=2) as executor: executor.submit(uploader.coveralls, output) executor.submit(uploader.codecov, output, commit_sha) logger.info('Waiting for build to be ingested by Codecov...') # Wait until the build has been ingested by Codecov. if uploader.codecov_wait(commit_sha): logger.info('Build ingested by codecov.io') self.notifier.notify() else: logger.error('codecov.io took too much time to ingest data.') else: os.makedirs('code-coverage-reports', exist_ok=True) self.generate_suite_reports() report_generators.zero_coverage(self.artifactsHandler.get()) self.generate_chunk_mapping() # Index the task in the TaskCluster index. self.index_service.insertTask( 'project.releng.services.project.{}.shipit_code_coverage.{}'.format(secrets[secrets.APP_CHANNEL], self.revision), { 'taskId': os.environ['TASK_ID'], 'rank': 0, 'data': {}, 'expires': (datetime.utcnow() + timedelta(180)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'), } ) os.chdir('code-coverage-reports') self.githubUtils.update_codecoveragereports_repo()