def AddResults(self, tab, results):
   # Add results of smoothness metric. This computes the smoothness metric for
   # the time range between the first action starts and the last action ends.
   # To get the measurement for each action, use
   # measurement.TimelineBasedMeasurement.
   time_bounds = timeline_bounds.Bounds()
   for action in self._actions:
     time_bounds.AddBounds(
       action.GetActiveRangeOnTimeline(self._timeline_model))
   # Create an interaction_record for this legacy measurement. Since we don't
   # wrap the results that is sent to smoothnes metric, the logical_name will
   # not be used.
   interaction_record = tir_module.TimelineInteractionRecord(
     'smoothness_interaction', time_bounds.min, time_bounds.max)
   renderer_thread = self._timeline_model.GetRendererThreadFromTab(tab)
   smoothness_metric = smoothness.SmoothnessMetric()
   smoothness_metric.AddResults(self._timeline_model,
                                      renderer_thread,
                                      interaction_record,
                                      results)
   if tab.browser.platform.IsRawDisplayFrameRateSupported():
     for r in tab.browser.platform.GetRawDisplayFrameRateMeasurements():
       if r.value is None:
         raise MissingDisplayFrameRateError(r.name)
       results.Add(r.name, r.unit, r.value)
Esempio n. 2
0
 def CreateMetricsForTimelineInteractionRecord(self, interaction):
     """ Subclass of TimelineBasedMeasurement overrides this method to customize
 the binding of interaction's flags to metrics.
 """
     res = []
     if interaction.is_smooth:
         res.append(smoothness.SmoothnessMetric())
     return res
Esempio n. 3
0
    def WillRunActions(self, page, tab):
        if self.options.metric == 'smoothness':
            self._metric = smoothness.SmoothnessMetric()
        elif self.options.metric == 'timeline':
            self._metric = timeline.ThreadTimesTimelineMetric()

        self._metric.Start(page, tab)

        if tab.browser.platform.IsRawDisplayFrameRateSupported():
            tab.browser.platform.StartRawDisplayFrameRateMeasurement()
Esempio n. 4
0
  def WillRunAction(self, page, tab, action):
    if self.options.metric == 'smoothness':
      compound_action = page_test.GetCompoundActionFromPage(
          page, self._action_name_to_run)
      self._metric = smoothness.SmoothnessMetric(compound_action)
    elif self.options.metric == 'timeline':
      self._metric = timeline.TimelineMetric(timeline.TRACING_MODE)

    self._metric.Start(page, tab)

    if tab.browser.platform.IsRawDisplayFrameRateSupported():
      tab.browser.platform.StartRawDisplayFrameRateMeasurement()
  def testSyntheticDelayConfiguration(self):
    attributes = {
      'synthetic_delays': {
        'cc.BeginMainFrame': { 'target_duration': 0.012 },
        'cc.DrawAndSwap': { 'target_duration': 0.012, 'mode': 'alternating' },
        'gpu.SwapBuffers': { 'target_duration': 0.012 }
      }
    }
    test_page = page.Page('http://dummy', None, attributes=attributes)

    tab = FakeTab()
    smoothness_metric = smoothness.SmoothnessMetric()
    smoothness_metric.Start(test_page, tab)

    expected_category_filter = [
        'DELAY(cc.BeginMainFrame;0.012000;static)',
        'DELAY(cc.DrawAndSwap;0.012000;alternating)',
        'DELAY(gpu.SwapBuffers;0.012000;static)',
        'benchmark',
        'webkit.console'
    ]
    self.assertEquals(expected_category_filter,
                      sorted(tab.browser.category_filter.split(',')))
Esempio n. 6
0
    def testCalcResultsTraceEvents(self):
        # Make the test repeatable by seeding the random number generator
        # (which is used by the mock timer) with a constant number.
        random.seed(1234567)
        mock_timer = MockTimer()
        trace_events = []
        total_time_seconds = 0.0
        num_frames_sent = 0.0
        first_frame = True
        previous_frame_time = None
        # This list represents time differences between frames in milliseconds.
        expected_frame_times = []

        # Append start trace events for the timeline marker and gesture marker,
        # with some amount of time in between them.
        trace_events.append({
            'name': RENDERER_PROCESS_MARKER,
            'tts': mock_timer.microseconds,
            'args': {},
            'pid': 20978,
            'ts': mock_timer.microseconds,
            'cat': 'webkit',
            'tid': 11,
            'ph': 'S',  # Phase: start.
            'id': '0x12345'
        })
        mock_timer.Advance()
        trace_events.append({
            'name': SYNTHETIC_GESTURE_MARKER,
            'tts': mock_timer.microseconds,
            'args': {},
            'pid': 20978,
            'ts': mock_timer.microseconds,
            'cat': 'webkit',
            'tid': 11,
            'ph': 'S',
            'id': '0xabcde'
        })

        # Generate 100 random mock frames and append their trace events.
        for _ in xrange(0, 100):
            mock_frame = MockFrame(mock_timer)
            mock_frame.AppendTraceEventForMainThreadStats(trace_events)
            mock_frame.AppendTraceEventForImplThreadStats(trace_events)
            # Exclude the first frame, because it may have started before the
            # benchmark run.
            if not first_frame:
                total_time_seconds += mock_frame.duration / 1e6
                num_frames_sent += mock_frame.main_stats['frame_count']
                num_frames_sent += mock_frame.impl_stats['frame_count']
            first_frame = False
            current_frame_time = mock_timer.microseconds / 1000.0
            if previous_frame_time:
                difference = current_frame_time - previous_frame_time
                difference = round(difference, 2)
                expected_frame_times.append(difference)
            previous_frame_time = current_frame_time

        # Append finish trace events for the timeline and gesture markers, in the
        # reverse order from how they were added, with some time in between.
        trace_events.append({
            'name': SYNTHETIC_GESTURE_MARKER,
            'tts': mock_timer.microseconds,
            'args': {},
            'pid': 20978,
            'ts': mock_timer.microseconds,
            'cat': 'webkit',
            'tid': 11,
            'ph': 'F',  # Phase: finish.
            'id': '0xabcde'
        })
        mock_timer.Advance()
        trace_events.append({
            'name': RENDERER_PROCESS_MARKER,
            'tts': mock_timer.microseconds,
            'args': {},
            'pid': 20978,
            'ts': mock_timer.microseconds,
            'cat': 'webkit',
            'tid': 11,
            'ph': 'F',
            'id': '0x12345'
        })

        # Create a timeline object from the trace.
        trace_result = TraceResult(ChromeRawTraceResult(trace_events))
        timeline = trace_result.AsTimelineModel()

        # Find the timeline marker and gesture marker in the timeline,
        # and create a RenderingStats object.
        renderer_process_markers = timeline.FindTimelineMarkers(
            RENDERER_PROCESS_MARKER)
        self.assertEquals(len(renderer_process_markers), 1)
        renderer_process = renderer_process_markers[0].start_thread.parent
        timeline_markers = timeline.FindTimelineMarkers(
            SYNTHETIC_GESTURE_MARKER)
        stats = rendering_stats.RenderingStats(renderer_process,
                                               timeline_markers)

        # Make a results object and add results to it from the smoothness metric.
        results = PageMeasurementResults()
        results.WillMeasurePage(page.Page('http://foo.com/', None))
        smoothness_metric = smoothness.SmoothnessMetric(None)
        smoothness_metric.SetStats(stats)
        smoothness_metric.AddResults(None, results)
        results.DidMeasurePage()

        self.assertEquals(expected_frame_times,
                          results.page_results[0]['frame_times'].value)
        self.assertAlmostEquals(
            1000.0 * (total_time_seconds / num_frames_sent),
            results.page_results[0]['mean_frame_time'].value,
            places=2)

        # We don't verify the correctness of the discrepancy computation itself,
        # because we have a separate unit test for that purpose.
        self.assertAlmostEquals(statistics.FrameDiscrepancy(
            stats.frame_timestamps, True),
                                results.page_results[0]['jank'].value,
                                places=4)

        # We do not verify the correctness of Percentile here; Percentile should
        # have its own test.
        # The 17 here represents a threshold of 17 ms; this should match the value
        # in the smoothness metric.
        self.assertEquals(
            statistics.Percentile(expected_frame_times, 95.0) < 17.0,
            results.page_results[0]['mostly_smooth'].value)
Esempio n. 7
0
 def WillRunActions(self, page, tab):
     self._power_metric = power.PowerMetric()
     self._power_metric.Start(page, tab)
     self._smoothness_metric = smoothness.SmoothnessMetric()
     self._smoothness_metric.Start(page, tab)