Ejemplo n.º 1
0
  def AddResults(self, tab, results):
    # List of raw frame times.
    results.Add('frame_times', 'ms', self._stats.frame_times)

    # Arithmetic mean of frame times.
    mean_frame_time = statistics.ArithmeticMean(self._stats.frame_times,
                                                len(self._stats.frame_times))
    results.Add('mean_frame_time', 'ms', round(mean_frame_time, 3))

    # Absolute discrepancy of frame time stamps.
    jank = statistics.FrameDiscrepancy(self._stats.frame_timestamps)
    results.Add('jank', '', round(jank, 4))

    # Are we hitting 60 fps for 95 percent of all frames? (Boolean value)
    # We use 17ms as a slightly looser threshold, instead of 1000.0/60.0.
    results.Add('mostly_smooth', '',
        statistics.Percentile(self._stats.frame_times, 95.0) < 17.0)
Ejemplo n.º 2
0
    def AddResults(self, tab, results):
        if self._stats.mouse_wheel_scroll_latency:
            mean_mouse_wheel_scroll_latency = statistics.ArithmeticMean(
                self._stats.mouse_wheel_scroll_latency,
                len(self._stats.mouse_wheel_scroll_latency))
            results.Add('mean_mouse_wheel_scroll_latency', 'ms',
                        round(mean_mouse_wheel_scroll_latency, 3))

        if self._stats.touch_scroll_latency:
            mean_touch_scroll_latency = statistics.ArithmeticMean(
                self._stats.touch_scroll_latency,
                len(self._stats.touch_scroll_latency))
            results.Add('mean_touch_scroll_latency', 'ms',
                        round(mean_touch_scroll_latency, 3))

        if self._stats.js_touch_scroll_latency:
            mean_js_touch_scroll_latency = statistics.ArithmeticMean(
                self._stats.js_touch_scroll_latency,
                len(self._stats.js_touch_scroll_latency))
            results.Add('mean_js_touch_scroll_latency', 'ms',
                        round(mean_js_touch_scroll_latency, 3))

        # List of raw frame times.
        frame_times = FlattenList(self._stats.frame_times)
        results.Add('frame_times', 'ms', frame_times)

        # Arithmetic mean of frame times.
        mean_frame_time = statistics.ArithmeticMean(frame_times,
                                                    len(frame_times))
        results.Add('mean_frame_time', 'ms', round(mean_frame_time, 3))

        # Absolute discrepancy of frame time stamps.
        jank = statistics.FrameDiscrepancy(self._stats.frame_timestamps)
        results.Add('jank', '', round(jank, 4))

        # Are we hitting 60 fps for 95 percent of all frames?
        # We use 19ms as a somewhat looser threshold, instead of 1000.0/60.0.
        percentile_95 = statistics.Percentile(frame_times, 95.0)
        results.Add('mostly_smooth', 'score',
                    1.0 if percentile_95 < 19.0 else 0.0)

        if tab.browser.platform.IsRawDisplayFrameRateSupported():
            for r in tab.browser.platform.GetRawDisplayFrameRateMeasurements():
                if r.value is None:
                    raise MissingDisplayFrameRateError(r.name)
                results.Add(r.name, r.unit, r.value)
Ejemplo n.º 3
0
 def testPercentile(self):
   # The 50th percentile is the median value.
   self.assertEquals(3, statistics.Percentile([4, 5, 1, 3, 2], 50))
   self.assertEquals(2.5, statistics.Percentile([5, 1, 3, 2], 50))
   # When the list of values is empty, 0 is returned.
   self.assertEquals(0, statistics.Percentile([], 50))
   # When the given percentage is very low, the lowest value is given.
   self.assertEquals(1, statistics.Percentile([2, 1, 5, 4, 3], 5))
   # When the given percentage is very high, the highest value is given.
   self.assertEquals(5, statistics.Percentile([5, 2, 4, 1, 3], 95))
   # Linear interpolation between closest ranks is used. Using the example
   # from <http://en.wikipedia.org/wiki/Percentile>:
   self.assertEquals(27.5, statistics.Percentile([15, 20, 35, 40, 50], 40))
Ejemplo n.º 4
0
    def testCalcResultsTraceEvents(self):
        # Make the test repeatable by seeding the random number generator
        # (which is used by the mock timer) with a constant number.
        random.seed(1234567)
        mock_timer = MockTimer()
        trace_events = []
        total_time_seconds = 0.0
        num_frames_sent = 0.0
        first_frame = True
        previous_frame_time = None
        # This list represents time differences between frames in milliseconds.
        expected_frame_times = []

        # Append start trace events for the timeline marker and gesture marker,
        # with some amount of time in between them.
        trace_events.append({
            'name': RENDERER_PROCESS_MARKER,
            'tts': mock_timer.microseconds,
            'args': {},
            'pid': 20978,
            'ts': mock_timer.microseconds,
            'cat': 'webkit',
            'tid': 11,
            'ph': 'S',  # Phase: start.
            'id': '0x12345'
        })
        mock_timer.Advance()
        trace_events.append({
            'name': SYNTHETIC_GESTURE_MARKER,
            'tts': mock_timer.microseconds,
            'args': {},
            'pid': 20978,
            'ts': mock_timer.microseconds,
            'cat': 'webkit',
            'tid': 11,
            'ph': 'S',
            'id': '0xabcde'
        })

        # Generate 100 random mock frames and append their trace events.
        for _ in xrange(0, 100):
            mock_frame = MockFrame(mock_timer)
            mock_frame.AppendTraceEventForMainThreadStats(trace_events)
            mock_frame.AppendTraceEventForImplThreadStats(trace_events)
            # Exclude the first frame, because it may have started before the
            # benchmark run.
            if not first_frame:
                total_time_seconds += mock_frame.duration / 1e6
                num_frames_sent += mock_frame.main_stats['frame_count']
                num_frames_sent += mock_frame.impl_stats['frame_count']
            first_frame = False
            current_frame_time = mock_timer.microseconds / 1000.0
            if previous_frame_time:
                difference = current_frame_time - previous_frame_time
                difference = round(difference, 2)
                expected_frame_times.append(difference)
            previous_frame_time = current_frame_time

        # Append finish trace events for the timeline and gesture markers, in the
        # reverse order from how they were added, with some time in between.
        trace_events.append({
            'name': SYNTHETIC_GESTURE_MARKER,
            'tts': mock_timer.microseconds,
            'args': {},
            'pid': 20978,
            'ts': mock_timer.microseconds,
            'cat': 'webkit',
            'tid': 11,
            'ph': 'F',  # Phase: finish.
            'id': '0xabcde'
        })
        mock_timer.Advance()
        trace_events.append({
            'name': RENDERER_PROCESS_MARKER,
            'tts': mock_timer.microseconds,
            'args': {},
            'pid': 20978,
            'ts': mock_timer.microseconds,
            'cat': 'webkit',
            'tid': 11,
            'ph': 'F',
            'id': '0x12345'
        })

        # Create a timeline object from the trace.
        trace_result = TraceResult(ChromeRawTraceResult(trace_events))
        timeline = trace_result.AsTimelineModel()

        # Find the timeline marker and gesture marker in the timeline,
        # and create a RenderingStats object.
        renderer_process_markers = timeline.FindTimelineMarkers(
            RENDERER_PROCESS_MARKER)
        self.assertEquals(len(renderer_process_markers), 1)
        renderer_process = renderer_process_markers[0].start_thread.parent
        timeline_markers = timeline.FindTimelineMarkers(
            SYNTHETIC_GESTURE_MARKER)
        stats = rendering_stats.RenderingStats(renderer_process,
                                               timeline_markers)

        # Make a results object and add results to it from the smoothness metric.
        results = PageMeasurementResults()
        results.WillMeasurePage(page.Page('http://foo.com/', None))
        smoothness_metric = smoothness.SmoothnessMetric(None)
        smoothness_metric.SetStats(stats)
        smoothness_metric.AddResults(None, results)
        results.DidMeasurePage()

        self.assertEquals(expected_frame_times,
                          results.page_results[0]['frame_times'].value)
        self.assertAlmostEquals(
            1000.0 * (total_time_seconds / num_frames_sent),
            results.page_results[0]['mean_frame_time'].value,
            places=2)

        # We don't verify the correctness of the discrepancy computation itself,
        # because we have a separate unit test for that purpose.
        self.assertAlmostEquals(statistics.FrameDiscrepancy(
            stats.frame_timestamps, True),
                                results.page_results[0]['jank'].value,
                                places=4)

        # We do not verify the correctness of Percentile here; Percentile should
        # have its own test.
        # The 17 here represents a threshold of 17 ms; this should match the value
        # in the smoothness metric.
        self.assertEquals(
            statistics.Percentile(expected_frame_times, 95.0) < 17.0,
            results.page_results[0]['mostly_smooth'].value)