def go_from_trigger_mozilla_central(self): commit_sha = self.githubUtils.mercurial_to_git(self.revision) try: uploader.get_codecov(commit_sha) logger.warn('Build was already injested') return except requests.exceptions.HTTPError: pass self.retrieve_source_and_artifacts() self.githubUtils.update_geckodev_repo() logger.info('GitHub revision', revision=commit_sha) self.githubUtils.post_github_status(commit_sha) r = requests.get('https://hg.mozilla.org/mozilla-central/json-rev/%s' % self.revision) r.raise_for_status() push_id = r.json()['pushid'] output = grcov.report(self.artifactsHandler.get(), source_dir=self.repo_dir, service_number=push_id, commit_sha=commit_sha, token=secrets[secrets.COVERALLS_TOKEN]) logger.info('Report generated successfully') report = json.loads(output) expected_extensions = ['.js', '.cpp'] for extension in expected_extensions: assert any( f['name'].endswith(extension) for f in report['source_files'] ), 'No {} file in the generated report'.format(extension) logger.info('Upload changeset coverage data to Phabricator') phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) phabricatorUploader.upload(report) with ThreadPoolExecutorResult(max_workers=2) as executor: executor.submit(uploader.coveralls, output) executor.submit(uploader.codecov, output, commit_sha) logger.info('Waiting for build to be ingested by Codecov...') # Wait until the build has been ingested by Codecov. if uploader.codecov_wait(commit_sha): logger.info('Build ingested by codecov.io') notifier = Notifier(self.repo_dir, self.revision, self.client_id, self.access_token) notifier.notify() else: logger.error('codecov.io took too much time to ingest data.')
def go_from_trigger_mozilla_central(self): commit_sha = self.githubUtils.mercurial_to_git(self.revision) try: uploader.get_codecov(commit_sha) logger.warn('Build was already injested') return except requests.exceptions.HTTPError: pass self.retrieve_source_and_artifacts() self.githubUtils.update_geckodev_repo() logger.info('GitHub revision', revision=commit_sha) self.githubUtils.post_github_status(commit_sha) r = requests.get('https://hg.mozilla.org/mozilla-central/json-rev/%s' % self.revision) r.raise_for_status() push_id = r.json()['pushid'] # Check that all JavaScript files present in the coverage artifacts actually exist. # If they don't, there might be a bug in the LCOV rewriter. for artifact in self.artifactsHandler.get(): if 'jsvm' not in artifact: continue with zipfile.ZipFile(artifact, 'r') as zf: for file_name in zf.namelist(): with zf.open(file_name, 'r') as fl: source_files = [ line[3:].decode('utf-8').rstrip() for line in fl if line.startswith(b'SF:') ] missing_files = [ f for f in source_files if not os.path.exists(os.path.join(self.repo_dir, f)) ] if len(missing_files) != 0: logger.warn( f'{missing_files} are present in coverage reports, but missing from the repository' ) output = grcov.report(self.artifactsHandler.get(), source_dir=self.repo_dir, service_number=push_id, commit_sha=commit_sha, token=secrets[secrets.COVERALLS_TOKEN]) logger.info('Report generated successfully') report = json.loads(output) expected_extensions = ['.js', '.cpp'] for extension in expected_extensions: assert any( f['name'].endswith(extension) for f in report['source_files'] ), 'No {} file in the generated report'.format(extension) logger.info('Upload changeset coverage data to Phabricator') phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) phabricatorUploader.upload(report) with ThreadPoolExecutorResult(max_workers=2) as executor: executor.submit(uploader.coveralls, output) executor.submit(uploader.codecov, output, commit_sha) logger.info('Waiting for build to be ingested by Codecov...') # Wait until the build has been ingested by Codecov. if uploader.codecov_wait(commit_sha): logger.info('Build ingested by codecov.io') notifier = Notifier(self.repo_dir, self.revision, self.client_id, self.access_token) notifier.notify() else: logger.error('codecov.io took too much time to ingest data.')
def go_from_trigger_mozilla_central(self): commit_sha = self.githubUtils.mercurial_to_git(self.revision) try: uploader.get_codecov(commit_sha) logger.warn('Build was already injested') return except requests.exceptions.HTTPError: pass self.retrieve_source_and_artifacts() self.githubUtils.update_geckodev_repo() logger.info('GitHub revision', revision=commit_sha) self.githubUtils.post_github_status(commit_sha) r = requests.get('https://hg.mozilla.org/mozilla-central/json-rev/%s' % self.revision) r.raise_for_status() push_id = r.json()['pushid'] # Check that all JavaScript files present in the coverage artifacts actually exist. # If they don't, there might be a bug in the LCOV rewriter. for artifact in self.artifactsHandler.get(): if 'jsvm' not in artifact: continue with zipfile.ZipFile(artifact, 'r') as zf: for file_name in zf.namelist(): with zf.open(file_name, 'r') as fl: source_files = [line[3:].decode('utf-8').rstrip() for line in fl if line.startswith(b'SF:')] missing_files = [f for f in source_files if not os.path.exists(os.path.join(self.repo_dir, f))] if len(missing_files) != 0: logger.warn(f'{missing_files} are present in coverage reports, but missing from the repository') output = grcov.report( self.artifactsHandler.get(), source_dir=self.repo_dir, service_number=push_id, commit_sha=commit_sha, token=secrets[secrets.COVERALLS_TOKEN] ) logger.info('Report generated successfully') report = json.loads(output) expected_extensions = ['.js', '.cpp'] for extension in expected_extensions: assert any(f['name'].endswith(extension) for f in report['source_files']), 'No {} file in the generated report'.format(extension) logger.info('Upload changeset coverage data to Phabricator') phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) phabricatorUploader.upload(report) with ThreadPoolExecutorResult(max_workers=2) as executor: executor.submit(uploader.coveralls, output) executor.submit(uploader.codecov, output, commit_sha) logger.info('Waiting for build to be ingested by Codecov...') # Wait until the build has been ingested by Codecov. if uploader.codecov_wait(commit_sha): logger.info('Build ingested by codecov.io') notifier = Notifier(self.repo_dir, self.revision, self.client_id, self.access_token) notifier.notify() else: logger.error('codecov.io took too much time to ingest data.')
def go(self): if self.from_pulse: commit_sha = self.githubUtils.mercurial_to_git(self.revision) try: uploader.get_codecov(commit_sha) logger.warn('Build was already injested') return except requests.exceptions.HTTPError: pass with ThreadPoolExecutorResult(max_workers=2) as executor: # Thread 1 - Download coverage artifacts. executor.submit(self.artifactsHandler.download_all) # Thread 2 - Clone mozilla-central. executor.submit(self.clone_mozilla_central, self.revision) if self.from_pulse: self.githubUtils.update_geckodev_repo() logger.info('GitHub revision', revision=commit_sha) self.githubUtils.post_github_status(commit_sha) r = requests.get( 'https://hg.mozilla.org/mozilla-central/json-rev/%s' % self.revision) r.raise_for_status() push_id = r.json()['pushid'] output = grcov.report(self.artifactsHandler.get(), source_dir=self.repo_dir, service_number=push_id, commit_sha=commit_sha, token=secrets[secrets.COVERALLS_TOKEN]) logger.info('Report generated successfully') logger.info('Upload changeset coverage data to Phabricator') phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) phabricatorUploader.upload(json.loads(output)) with ThreadPoolExecutorResult(max_workers=2) as executor: executor.submit(uploader.coveralls, output) executor.submit(uploader.codecov, output, commit_sha) logger.info('Waiting for build to be ingested by Codecov...') # Wait until the build has been ingested by Codecov. if uploader.codecov_wait(commit_sha): logger.info('Build ingested by codecov.io') self.notifier.notify() else: logger.error('codecov.io took too much time to ingest data.') else: logger.info('Generating suite reports') os.makedirs(self.ccov_reports_dir, exist_ok=True) suite_reports.generate(self.suites, self.artifactsHandler, self.ccov_reports_dir, self.repo_dir) logger.info('Generating zero coverage reports') zc = ZeroCov(self.repo_dir) zc.generate(self.artifactsHandler.get(), self.revision, self.github_revision) logger.info('Generating chunk mapping') chunk_mapping.generate(self.repo_dir, self.revision, self.artifactsHandler) # Index the task in the TaskCluster index at the given revision and as "latest". # Given that all tasks have the same rank, the latest task that finishes will # overwrite the "latest" entry. namespaces = [ 'project.releng.services.project.{}.code_coverage_bot.{}'. format(secrets[secrets.APP_CHANNEL], self.revision), 'project.releng.services.project.{}.code_coverage_bot.latest'. format(secrets[secrets.APP_CHANNEL]), ] for namespace in namespaces: self.index_service.insertTask( namespace, { 'taskId': os.environ['TASK_ID'], 'rank': 0, 'data': {}, 'expires': (datetime.utcnow() + timedelta(180)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'), }) os.chdir(self.ccov_reports_dir) self.githubUtils.update_codecoveragereports_repo()