def _CreateSampleDirectoryCoverageData(): """Returns a sample directory SummaryCoverageData for testing purpose. Note: only use this method if the exact values don't matter. """ return SummaryCoverageData.Create( server_host='chromium.googlesource.com', project='chromium/src', ref='refs/heads/master', revision='aaaaa', data_type='dirs', path='//dir/', bucket='coverage', builder='linux-code-coverage', data={ 'dirs': [], 'path': '//dir/', 'summaries': _CreateSampleCoverageSummaryMetric(), 'files': [{ 'path': '//dir/test.cc', 'name': 'test.cc', 'summaries': _CreateSampleCoverageSummaryMetric() }] })
def _CreateSampleRootComponentCoverageData(): """Returns a sample component SummaryCoverageData for >> for testing purpose. Note: only use this method if the exact values don't matter. """ return SummaryCoverageData.Create( server_host='chromium.googlesource.com', project='chromium/src', ref='refs/heads/master', revision='aaaaa', data_type='components', path='>>', bucket='coverage', builder='linux-code-coverage', data={ 'dirs': [{ 'path': 'Component>Test', 'name': 'Component>Test', 'summaries': _CreateSampleCoverageSummaryMetric() }], 'path': '>>' })
def _ProcessFullRepositoryData(self, commit, data, full_gs_metadata_dir, builder, build_id): # Load the commit log first so that we could fail fast before redo all. repo_url = 'https://%s/%s.git' % (commit.host, commit.project) change_log = CachedGitilesRepository(FinditHttpClient(), repo_url).GetChangeLog(commit.id) assert change_log is not None, 'Failed to retrieve the commit log' # Load the manifest based on the DEPS file. # TODO(crbug.com/921714): output the manifest as a build output property. manifest = _RetrieveManifest(repo_url, commit.id, 'unix') report = PostsubmitReport.Create( server_host=commit.host, project=commit.project, ref=commit.ref, revision=commit.id, bucket=builder.bucket, builder=builder.builder, commit_timestamp=change_log.committer.time, manifest=manifest, summary_metrics=data.get('summaries'), build_id=build_id, visible=False) report.put() # Save the file-level, directory-level and line-level coverage data. for data_type in ('dirs', 'components', 'files', 'file_shards'): sub_data = data.get(data_type) if not sub_data: continue logging.info('Processing %d entries for %s', len(sub_data), data_type) actual_data_type = data_type if data_type == 'file_shards': actual_data_type = 'files' def FlushEntries(entries, total, last=False): # Flush the data in a batch and release memory. if len(entries) < 100 and not (last and entries): return entries, total ndb.put_multi(entries) total += len(entries) logging.info('Dumped %d coverage data entries of type %s', total, actual_data_type) return [], total def IterateOverFileShards(file_shards): for file_path in file_shards: url = '%s/%s' % (full_gs_metadata_dir, file_path) # Download data one by one. yield _GetValidatedData(url).get('files', []) if data_type == 'file_shards': data_iterator = IterateOverFileShards(sub_data) else: data_iterator = [sub_data] entities = [] total = 0 component_summaries = [] for dataset in data_iterator: for group_data in dataset: if actual_data_type == 'components': component_summaries.append({ 'name': group_data['path'], 'path': group_data['path'], 'summaries': group_data['summaries'], }) if actual_data_type == 'files' and 'revision' in group_data: self._FetchAndSaveFileIfNecessary(report, group_data['path'], group_data['revision']) if actual_data_type == 'files': coverage_data = FileCoverageData.Create( server_host=commit.host, project=commit.project, ref=commit.ref, revision=commit.id, path=group_data['path'], bucket=builder.bucket, builder=builder.builder, data=group_data) else: coverage_data = SummaryCoverageData.Create( server_host=commit.host, project=commit.project, ref=commit.ref, revision=commit.id, data_type=actual_data_type, path=group_data['path'], bucket=builder.bucket, builder=builder.builder, data=group_data) entities.append(coverage_data) entities, total = FlushEntries(entities, total, last=False) del dataset # Explicitly release memory. FlushEntries(entities, total, last=True) if component_summaries: component_summaries.sort(key=lambda x: x['path']) SummaryCoverageData.Create( server_host=commit.host, project=commit.project, ref=commit.ref, revision=commit.id, data_type='components', path='>>', bucket=builder.bucket, builder=builder.builder, data={ 'dirs': component_summaries, 'path': '>>' }).put() component_summaries = [] logging.info('Summary of all components are saved to datastore.') if not _IsReportSuspicious(report): report.visible = True report.put() monitoring.code_coverage_full_reports.increment({ 'host': commit.host, 'project': commit.project, 'ref': commit.ref or 'refs/heads/master', 'builder': '%s/%s/%s' % (builder.project, builder.bucket, builder.builder), }) monitoring.code_coverage_report_timestamp.set( int(time.time()), fields={ 'host': commit.host, 'project': commit.project, 'ref': commit.ref or 'refs/heads/master', 'builder': '%s/%s/%s' % (builder.project, builder.bucket, builder.builder), 'is_success': report.visible, })
def testAndCreateAndGetComponentCoverageData(self): server_host = 'chromium.googlesource.com' project = 'chromium/src' ref = 'refs/heads/master' revision = '99999' data_type = 'components' path = 'Test>Component' bucket = 'coverage' builder = 'linux-code-coverage' data = { 'dirs': [], 'files': [], 'summaries': [{ 'covered': 1, 'total': 1, 'name': 'region' }, { 'covered': 1, 'total': 1, 'name': 'function' }, { 'covered': 1, 'total': 1, 'name': 'line' }], 'path': 'Test>Component', } component_coverage_data = SummaryCoverageData.Create( server_host=server_host, project=project, ref=ref, revision=revision, data_type=data_type, path=path, bucket=bucket, builder=builder, data=data) component_coverage_data.put() # Test key. self.assertEqual( 'chromium.googlesource.com$chromium/src$refs/heads/master$99999$' 'components$Test>Component$coverage$linux-code-coverage', component_coverage_data.key.id()) # Test Create. fetched_component_coverage_data = SummaryCoverageData.query().fetch() self.assertEqual(1, len(fetched_component_coverage_data)) self.assertEqual(component_coverage_data, fetched_component_coverage_data[0]) # Test Get. self.assertEqual( component_coverage_data, SummaryCoverageData.Get( server_host=server_host, project=project, ref=ref, revision=revision, data_type=data_type, path=path, bucket=bucket, builder=builder))