예제 #1
0
def _CreateSampleDirectoryCoverageData():
  """Returns a sample directory SummaryCoverageData for testing purpose.

  Note: only use this method if the exact values don't matter.
  """
  return SummaryCoverageData.Create(
      server_host='chromium.googlesource.com',
      project='chromium/src',
      ref='refs/heads/master',
      revision='aaaaa',
      data_type='dirs',
      path='//dir/',
      bucket='coverage',
      builder='linux-code-coverage',
      data={
          'dirs': [],
          'path':
              '//dir/',
          'summaries':
              _CreateSampleCoverageSummaryMetric(),
          'files': [{
              'path': '//dir/test.cc',
              'name': 'test.cc',
              'summaries': _CreateSampleCoverageSummaryMetric()
          }]
      })
예제 #2
0
def _CreateSampleRootComponentCoverageData():
  """Returns a sample component SummaryCoverageData for >> for testing purpose.

  Note: only use this method if the exact values don't matter.
  """
  return SummaryCoverageData.Create(
      server_host='chromium.googlesource.com',
      project='chromium/src',
      ref='refs/heads/master',
      revision='aaaaa',
      data_type='components',
      path='>>',
      bucket='coverage',
      builder='linux-code-coverage',
      data={
          'dirs': [{
              'path': 'Component>Test',
              'name': 'Component>Test',
              'summaries': _CreateSampleCoverageSummaryMetric()
          }],
          'path':
              '>>'
      })
예제 #3
0
  def testProcessFullRepoData(self, mocked_is_request_from_appself,
                              mocked_get_build, mocked_get_validated_data,
                              mocked_get_change_log, mocked_retrieve_manifest,
                              mocked_fetch_file):
    # Mock buildbucket v2 API.
    build = mock.Mock()
    build.builder.project = 'chrome'
    build.builder.bucket = 'coverage'
    build.builder.builder = 'linux-code-coverage'
    build.output.properties.items.return_value = [
        ('coverage_gs_bucket', 'code-coverage-data'),
        ('coverage_metadata_gs_path',
         ('postsubmit/chromium.googlesource.com/chromium/src/'
          'aaaaa/coverage/linux-code-coverage/123456789/metadata'))
    ]
    build.input.gitiles_commit = mock.Mock(
        host='chromium.googlesource.com',
        project='chromium/src',
        ref='refs/heads/master',
        id='aaaaa')
    mocked_get_build.return_value = build

    # Mock Gitiles API to get change log.
    change_log = mock.Mock()
    change_log.committer.time = datetime.datetime(2018, 1, 1)
    mocked_get_change_log.return_value = change_log

    # Mock retrieve manifest.
    manifest = _CreateSampleManifest()
    mocked_retrieve_manifest.return_value = manifest

    # Mock get validated data from cloud storage for both all.json and file
    # shard json.
    all_coverage_data = {
        'dirs': [{
            'path':
                '//dir/',
            'dirs': [],
            'files': [{
                'path': '//dir/test.cc',
                'name': 'test.cc',
                'summaries': _CreateSampleCoverageSummaryMetric()
            }],
            'summaries':
                _CreateSampleCoverageSummaryMetric()
        }],
        'file_shards': ['file_coverage/files1.json.gz'],
        'summaries':
            _CreateSampleCoverageSummaryMetric(),
        'components': [{
            'path':
                'Component>Test',
            'dirs': [{
                'path': '//dir/',
                'name': 'dir/',
                'summaries': _CreateSampleCoverageSummaryMetric()
            }],
            'summaries':
                _CreateSampleCoverageSummaryMetric()
        }],
    }

    file_shard_coverage_data = {
        'files': [{
            'path':
                '//dir/test.cc',
            'revision':
                'bbbbb',
            'lines': [{
                'count': 100,
                'last': 2,
                'first': 1
            }],
            'timestamp':
                '140000',
            'uncovered_blocks': [{
                'line': 1,
                'ranges': [{
                    'first': 1,
                    'last': 2
                }]
            }]
        }]
    }

    mocked_get_validated_data.side_effect = [
        all_coverage_data, file_shard_coverage_data
    ]

    request_url = '/coverage/task/process-data/build/123456789'
    response = self.test_app.post(request_url)
    self.assertEqual(200, response.status_int)
    mocked_is_request_from_appself.assert_called()

    fetched_reports = PostsubmitReport.query().fetch()
    self.assertEqual(1, len(fetched_reports))
    self.assertEqual(_CreateSamplePostsubmitReport(), fetched_reports[0])
    mocked_fetch_file.assert_called_with(_CreateSamplePostsubmitReport(),
                                         '//dir/test.cc', 'bbbbb')

    fetched_file_coverage_data = FileCoverageData.query().fetch()
    self.assertEqual(1, len(fetched_file_coverage_data))
    self.assertEqual(_CreateSampleFileCoverageData(),
                     fetched_file_coverage_data[0])

    fetched_summary_coverage_data = SummaryCoverageData.query().fetch()
    self.assertListEqual([
        _CreateSampleRootComponentCoverageData(),
        _CreateSampleComponentCoverageData(),
        _CreateSampleDirectoryCoverageData()
    ], fetched_summary_coverage_data)
예제 #4
0
  def _ProcessFullRepositoryData(self, commit, data, full_gs_metadata_dir,
                                 builder, build_id):

    # Load the commit log first so that we could fail fast before redo all.
    repo_url = 'https://%s/%s.git' % (commit.host, commit.project)
    change_log = CachedGitilesRepository(FinditHttpClient(),
                                         repo_url).GetChangeLog(commit.id)
    assert change_log is not None, 'Failed to retrieve the commit log'

    # Load the manifest based on the DEPS file.
    # TODO(crbug.com/921714): output the manifest as a build output property.
    manifest = _RetrieveManifest(repo_url, commit.id, 'unix')

    report = PostsubmitReport.Create(
        server_host=commit.host,
        project=commit.project,
        ref=commit.ref,
        revision=commit.id,
        bucket=builder.bucket,
        builder=builder.builder,
        commit_timestamp=change_log.committer.time,
        manifest=manifest,
        summary_metrics=data.get('summaries'),
        build_id=build_id,
        visible=False)
    report.put()

    # Save the file-level, directory-level and line-level coverage data.
    for data_type in ('dirs', 'components', 'files', 'file_shards'):
      sub_data = data.get(data_type)
      if not sub_data:
        continue

      logging.info('Processing %d entries for %s', len(sub_data), data_type)

      actual_data_type = data_type
      if data_type == 'file_shards':
        actual_data_type = 'files'

      def FlushEntries(entries, total, last=False):
        # Flush the data in a batch and release memory.
        if len(entries) < 100 and not (last and entries):
          return entries, total

        ndb.put_multi(entries)
        total += len(entries)
        logging.info('Dumped %d coverage data entries of type %s', total,
                     actual_data_type)

        return [], total

      def IterateOverFileShards(file_shards):
        for file_path in file_shards:
          url = '%s/%s' % (full_gs_metadata_dir, file_path)
          # Download data one by one.
          yield _GetValidatedData(url).get('files', [])

      if data_type == 'file_shards':
        data_iterator = IterateOverFileShards(sub_data)
      else:
        data_iterator = [sub_data]

      entities = []
      total = 0

      component_summaries = []
      for dataset in data_iterator:
        for group_data in dataset:
          if actual_data_type == 'components':
            component_summaries.append({
                'name': group_data['path'],
                'path': group_data['path'],
                'summaries': group_data['summaries'],
            })

          if actual_data_type == 'files' and 'revision' in group_data:
            self._FetchAndSaveFileIfNecessary(report, group_data['path'],
                                              group_data['revision'])

          if actual_data_type == 'files':
            coverage_data = FileCoverageData.Create(
                server_host=commit.host,
                project=commit.project,
                ref=commit.ref,
                revision=commit.id,
                path=group_data['path'],
                bucket=builder.bucket,
                builder=builder.builder,
                data=group_data)
          else:
            coverage_data = SummaryCoverageData.Create(
                server_host=commit.host,
                project=commit.project,
                ref=commit.ref,
                revision=commit.id,
                data_type=actual_data_type,
                path=group_data['path'],
                bucket=builder.bucket,
                builder=builder.builder,
                data=group_data)

          entities.append(coverage_data)
          entities, total = FlushEntries(entities, total, last=False)
        del dataset  # Explicitly release memory.
      FlushEntries(entities, total, last=True)

      if component_summaries:
        component_summaries.sort(key=lambda x: x['path'])
        SummaryCoverageData.Create(
            server_host=commit.host,
            project=commit.project,
            ref=commit.ref,
            revision=commit.id,
            data_type='components',
            path='>>',
            bucket=builder.bucket,
            builder=builder.builder,
            data={
                'dirs': component_summaries,
                'path': '>>'
            }).put()
        component_summaries = []
        logging.info('Summary of all components are saved to datastore.')

    if not _IsReportSuspicious(report):
      report.visible = True
      report.put()

      monitoring.code_coverage_full_reports.increment({
          'host':
              commit.host,
          'project':
              commit.project,
          'ref':
              commit.ref or 'refs/heads/master',
          'builder':
              '%s/%s/%s' % (builder.project, builder.bucket, builder.builder),
      })

    monitoring.code_coverage_report_timestamp.set(
        int(time.time()),
        fields={
            'host':
                commit.host,
            'project':
                commit.project,
            'ref':
                commit.ref or 'refs/heads/master',
            'builder':
                '%s/%s/%s' % (builder.project, builder.bucket, builder.builder),
            'is_success':
                report.visible,
        })
예제 #5
0
  def HandleGet(self):
    if self.request.path == '/coverage/api/coverage-data':
      return self._ServePerCLCoverageData()

    match = _LUCI_PROJECT_REGEX.match(self.request.path)
    if not match:
      return BaseHandler.CreateError('Invalid url path %s' % self.request.path,
                                     400)
    luci_project = match.group(1)

    host = self.request.get('host', 'chromium.googlesource.com')
    project = self.request.get('project', 'chromium/src')
    ref = self.request.get('ref', 'refs/heads/master')

    revision = self.request.get('revision')
    path = self.request.get('path')
    data_type = self.request.get('data_type')
    platform = self.request.get('platform', 'linux')
    list_reports = self.request.get('list_reports', False)
    if isinstance(list_reports, str):
      list_reports = (list_reports.lower() == 'true')

    if not data_type and path:
      if path.endswith('/'):
        data_type = 'dirs'
      elif path and '>' in path:
        data_type = 'components'
      else:
        data_type = 'files'

    logging.info('host=%s', host)
    logging.info('project=%s', project)
    logging.info('ref=%s', ref)
    logging.info('revision=%s', revision)
    logging.info('data_type=%s', data_type)
    logging.info('path=%s', path)
    logging.info('platform=%s', platform)

    if not project:
      return BaseHandler.CreateError('Invalid request', 400)

    logging.info('Servicing coverage data for postsubmit')
    if platform not in _POSTSUBMIT_PLATFORM_INFO_MAP:
      return BaseHandler.CreateError('Platform: %s is not supported' % platform,
                                     404)
    bucket = _POSTSUBMIT_PLATFORM_INFO_MAP[platform]['bucket']
    builder = _POSTSUBMIT_PLATFORM_INFO_MAP[platform]['builder']

    if list_reports:
      return self._ServeProjectViewCoverageData(
          luci_project, host, project, ref, revision, platform, bucket, builder)

    template = None
    warning = None
    if not data_type:
      data_type = 'dirs'
    if not revision:
      query = PostsubmitReport.query(
          PostsubmitReport.gitiles_commit.server_host == host,
          PostsubmitReport.gitiles_commit.project == project,
          PostsubmitReport.bucket == bucket,
          PostsubmitReport.builder == builder, PostsubmitReport.visible ==
          True).order(-PostsubmitReport.commit_timestamp)
      entities = query.fetch(limit=1)
      report = entities[0]
      revision = report.gitiles_commit.revision

    else:
      report = PostsubmitReport.Get(
          server_host=host,
          project=project,
          ref=ref,
          revision=revision,
          bucket=bucket,
          builder=builder)
      if not report:
        return BaseHandler.CreateError('Report record not found', 404)

    template = 'coverage/summary_view.html'
    if data_type == 'dirs':
      default_path = '//'
    elif data_type == 'components':
      default_path = '>>'
    else:
      if data_type != 'files':
        return BaseHandler.CreateError(
            'Expected data_type to be "files", but got "%s"' % data_type, 400)

      template = 'coverage/file_view.html'

    path = path or default_path

    if data_type == 'files':
      entity = FileCoverageData.Get(
          server_host=host,
          project=project,
          ref=ref,
          revision=revision,
          path=path,
          bucket=bucket,
          builder=builder)
      if not entity:
        warning = ('File "%s" does not exist in this report, defaulting to root'
                   % path)
        logging.warning(warning)
        path = '//'
        data_type = 'dirs'
        template = 'coverage/summary_view.html'
    if data_type != 'files':
      entity = SummaryCoverageData.Get(
          server_host=host,
          project=project,
          ref=ref,
          revision=revision,
          data_type=data_type,
          path=path,
          bucket=bucket,
          builder=builder)
      if not entity:
        warning = ('Path "%s" does not exist in this report, defaulting to root'
                   % path)
        logging.warning(warning)
        path = default_path
        entity = SummaryCoverageData.Get(
            server_host=host,
            project=project,
            ref=ref,
            revision=revision,
            data_type=data_type,
            path=path,
            bucket=bucket,
            builder=builder)

    metadata = entity.data
    data = {
        'metadata': metadata,
    }

    line_to_data = None
    if data_type == 'files':
      line_to_data = collections.defaultdict(dict)

      if 'revision' in metadata:
        gs_path = _ComposeSourceFileGsPath(report, path, metadata['revision'])
        file_content = _GetFileContentFromGs(gs_path)
        if not file_content:
          # Fetching files from Gitiles is slow, only use it as a backup.
          file_content = _GetFileContentFromGitiles(report, path,
                                                    metadata['revision'])
      else:
        # If metadata['revision'] is empty, it means that the file is not
        # a source file.
        file_content = None

      if not file_content:
        line_to_data[1]['line'] = '!!!!No source code available!!!!'
        line_to_data[1]['count'] = 0
      else:
        file_lines = file_content.splitlines()
        for i, line in enumerate(file_lines):
          # According to http://jinja.pocoo.org/docs/2.10/api/#unicode,
          # Jinja requires passing unicode objects or ASCII-only bytestring,
          # and given that it is possible for source files to have non-ASCII
          # chars, thus converting lines to unicode.
          line_to_data[i + 1]['line'] = unicode(line, 'utf8')
          line_to_data[i + 1]['count'] = -1

        uncovered_blocks = {}
        if 'uncovered_blocks' in metadata:
          for line_data in metadata['uncovered_blocks']:
            uncovered_blocks[line_data['line']] = line_data['ranges']

        for line in metadata['lines']:
          for line_num in range(line['first'], line['last'] + 1):
            line_to_data[line_num]['count'] = line['count']
            if line_num in uncovered_blocks:
              text = line_to_data[line_num]['line']
              regions = _SplitLineIntoRegions(text, uncovered_blocks[line_num])
              line_to_data[line_num]['regions'] = regions
              line_to_data[line_num]['is_partially_covered'] = True
            else:
              line_to_data[line_num]['is_partially_covered'] = False

      line_to_data = list(line_to_data.iteritems())
      line_to_data.sort(key=lambda x: x[0])
      data['line_to_data'] = line_to_data

    # Compute the mapping of the name->path mappings in order.
    path_parts = _GetNameToPathSeparator(path, data_type)
    path_root, _ = _GetPathRootAndSeparatorFromDataType(data_type)
    return {
        'data': {
            'luci_project':
                luci_project,
            'gitiles_commit': {
                'host': host,
                'project': project,
                'ref': ref,
                'revision': revision,
            },
            'path':
                path,
            'platform':
                platform,
            'platform_ui_name':
                _POSTSUBMIT_PLATFORM_INFO_MAP[platform]['ui_name'],
            'path_root':
                path_root,
            'metrics':
                code_coverage_util.GetMetricsBasedOnCoverageTool(
                    _POSTSUBMIT_PLATFORM_INFO_MAP[platform]['coverage_tool']),
            'data':
                data,
            'data_type':
                data_type,
            'path_parts':
                path_parts,
            'platform_select':
                _MakePlatformSelect(host, project, ref, revision, path,
                                    platform),
            'banner':
                _GetBanner(project),
            'warning':
                warning,
        },
        'template': template,
    }
예제 #6
0
  def testAndCreateAndGetComponentCoverageData(self):
    server_host = 'chromium.googlesource.com'
    project = 'chromium/src'
    ref = 'refs/heads/master'
    revision = '99999'
    data_type = 'components'
    path = 'Test>Component'
    bucket = 'coverage'
    builder = 'linux-code-coverage'
    data = {
        'dirs': [],
        'files': [],
        'summaries': [{
            'covered': 1,
            'total': 1,
            'name': 'region'
        }, {
            'covered': 1,
            'total': 1,
            'name': 'function'
        }, {
            'covered': 1,
            'total': 1,
            'name': 'line'
        }],
        'path':
            'Test>Component',
    }

    component_coverage_data = SummaryCoverageData.Create(
        server_host=server_host,
        project=project,
        ref=ref,
        revision=revision,
        data_type=data_type,
        path=path,
        bucket=bucket,
        builder=builder,
        data=data)
    component_coverage_data.put()

    # Test key.
    self.assertEqual(
        'chromium.googlesource.com$chromium/src$refs/heads/master$99999$'
        'components$Test>Component$coverage$linux-code-coverage',
        component_coverage_data.key.id())

    # Test Create.
    fetched_component_coverage_data = SummaryCoverageData.query().fetch()
    self.assertEqual(1, len(fetched_component_coverage_data))
    self.assertEqual(component_coverage_data,
                     fetched_component_coverage_data[0])

    # Test Get.
    self.assertEqual(
        component_coverage_data,
        SummaryCoverageData.Get(
            server_host=server_host,
            project=project,
            ref=ref,
            revision=revision,
            data_type=data_type,
            path=path,
            bucket=bucket,
            builder=builder))