def testDiscrepancyAnalytic(self): """Computes discrepancy for sample sets with known statistics.""" interval_multiplier = 100000 samples = [] d = statistics.Discrepancy(samples, interval_multiplier) self.assertEquals(d, 1.0) samples = [0.5] d = statistics.Discrepancy(samples, interval_multiplier) self.assertEquals(round(d), 1.0) samples = [0.0, 1.0] d = statistics.Discrepancy(samples, interval_multiplier) self.assertAlmostEquals(round(d, 2), 1.0) samples = [0.5, 0.5, 0.5] d = statistics.Discrepancy(samples, interval_multiplier) self.assertAlmostEquals(d, 1.0) samples = [1.0/8.0, 3.0/8.0, 5.0/8.0, 7.0/8.0] d = statistics.Discrepancy(samples, interval_multiplier) self.assertAlmostEquals(round(d, 2), 0.25) samples = [0.0, 1.0/3.0, 2.0/3.0, 1.0] d = statistics.Discrepancy(samples, interval_multiplier) self.assertAlmostEquals(round(d, 2), 0.5) samples = statistics.NormalizeSamples(samples)[0] d = statistics.Discrepancy(samples, interval_multiplier) self.assertAlmostEquals(round(d, 2), 0.25) time_stamps_a = [0, 1, 2, 3, 5, 6] time_stamps_b = [0, 1, 2, 3, 5, 7] time_stamps_c = [0, 2, 3, 4] time_stamps_d = [0, 2, 3, 4, 5] d_abs_a = statistics.FrameDiscrepancy(time_stamps_a, True, interval_multiplier) d_abs_b = statistics.FrameDiscrepancy(time_stamps_b, True, interval_multiplier) d_abs_c = statistics.FrameDiscrepancy(time_stamps_c, True, interval_multiplier) d_abs_d = statistics.FrameDiscrepancy(time_stamps_d, True, interval_multiplier) d_rel_a = statistics.FrameDiscrepancy(time_stamps_a, False, interval_multiplier) d_rel_b = statistics.FrameDiscrepancy(time_stamps_b, False, interval_multiplier) d_rel_c = statistics.FrameDiscrepancy(time_stamps_c, False, interval_multiplier) d_rel_d = statistics.FrameDiscrepancy(time_stamps_d, False, interval_multiplier) self.assertTrue(d_abs_a < d_abs_b) self.assertTrue(d_rel_a < d_rel_b) self.assertTrue(d_rel_d < d_rel_c) self.assertEquals(round(d_abs_d, 2), round(d_abs_c, 2))
def AddResults(self, tab, results): # List of raw frame times. results.Add('frame_times', 'ms', self._stats.frame_times) # Arithmetic mean of frame times. mean_frame_time = statistics.ArithmeticMean(self._stats.frame_times, len(self._stats.frame_times)) results.Add('mean_frame_time', 'ms', round(mean_frame_time, 3)) # Absolute discrepancy of frame time stamps. jank = statistics.FrameDiscrepancy(self._stats.frame_timestamps) results.Add('jank', '', round(jank, 4)) # Are we hitting 60 fps for 95 percent of all frames? (Boolean value) # We use 17ms as a slightly looser threshold, instead of 1000.0/60.0. results.Add('mostly_smooth', '', statistics.Percentile(self._stats.frame_times, 95.0) < 17.0)
def AddResults(self, tab, results): if self._stats.mouse_wheel_scroll_latency: mean_mouse_wheel_scroll_latency = statistics.ArithmeticMean( self._stats.mouse_wheel_scroll_latency, len(self._stats.mouse_wheel_scroll_latency)) results.Add('mean_mouse_wheel_scroll_latency', 'ms', round(mean_mouse_wheel_scroll_latency, 3)) if self._stats.touch_scroll_latency: mean_touch_scroll_latency = statistics.ArithmeticMean( self._stats.touch_scroll_latency, len(self._stats.touch_scroll_latency)) results.Add('mean_touch_scroll_latency', 'ms', round(mean_touch_scroll_latency, 3)) if self._stats.js_touch_scroll_latency: mean_js_touch_scroll_latency = statistics.ArithmeticMean( self._stats.js_touch_scroll_latency, len(self._stats.js_touch_scroll_latency)) results.Add('mean_js_touch_scroll_latency', 'ms', round(mean_js_touch_scroll_latency, 3)) # List of raw frame times. frame_times = FlattenList(self._stats.frame_times) results.Add('frame_times', 'ms', frame_times) # Arithmetic mean of frame times. mean_frame_time = statistics.ArithmeticMean(frame_times, len(frame_times)) results.Add('mean_frame_time', 'ms', round(mean_frame_time, 3)) # Absolute discrepancy of frame time stamps. jank = statistics.FrameDiscrepancy(self._stats.frame_timestamps) results.Add('jank', '', round(jank, 4)) # Are we hitting 60 fps for 95 percent of all frames? # We use 19ms as a somewhat looser threshold, instead of 1000.0/60.0. percentile_95 = statistics.Percentile(frame_times, 95.0) results.Add('mostly_smooth', 'score', 1.0 if percentile_95 < 19.0 else 0.0) if tab.browser.platform.IsRawDisplayFrameRateSupported(): for r in tab.browser.platform.GetRawDisplayFrameRateMeasurements(): if r.value is None: raise MissingDisplayFrameRateError(r.name) results.Add(r.name, r.unit, r.value)
def testCalcResultsTraceEvents(self): # Make the test repeatable by seeding the random number generator # (which is used by the mock timer) with a constant number. random.seed(1234567) mock_timer = MockTimer() trace_events = [] total_time_seconds = 0.0 num_frames_sent = 0.0 first_frame = True previous_frame_time = None # This list represents time differences between frames in milliseconds. expected_frame_times = [] # Append start trace events for the timeline marker and gesture marker, # with some amount of time in between them. trace_events.append({ 'name': RENDERER_PROCESS_MARKER, 'tts': mock_timer.microseconds, 'args': {}, 'pid': 20978, 'ts': mock_timer.microseconds, 'cat': 'webkit', 'tid': 11, 'ph': 'S', # Phase: start. 'id': '0x12345' }) mock_timer.Advance() trace_events.append({ 'name': SYNTHETIC_GESTURE_MARKER, 'tts': mock_timer.microseconds, 'args': {}, 'pid': 20978, 'ts': mock_timer.microseconds, 'cat': 'webkit', 'tid': 11, 'ph': 'S', 'id': '0xabcde' }) # Generate 100 random mock frames and append their trace events. for _ in xrange(0, 100): mock_frame = MockFrame(mock_timer) mock_frame.AppendTraceEventForMainThreadStats(trace_events) mock_frame.AppendTraceEventForImplThreadStats(trace_events) # Exclude the first frame, because it may have started before the # benchmark run. if not first_frame: total_time_seconds += mock_frame.duration / 1e6 num_frames_sent += mock_frame.main_stats['frame_count'] num_frames_sent += mock_frame.impl_stats['frame_count'] first_frame = False current_frame_time = mock_timer.microseconds / 1000.0 if previous_frame_time: difference = current_frame_time - previous_frame_time difference = round(difference, 2) expected_frame_times.append(difference) previous_frame_time = current_frame_time # Append finish trace events for the timeline and gesture markers, in the # reverse order from how they were added, with some time in between. trace_events.append({ 'name': SYNTHETIC_GESTURE_MARKER, 'tts': mock_timer.microseconds, 'args': {}, 'pid': 20978, 'ts': mock_timer.microseconds, 'cat': 'webkit', 'tid': 11, 'ph': 'F', # Phase: finish. 'id': '0xabcde' }) mock_timer.Advance() trace_events.append({ 'name': RENDERER_PROCESS_MARKER, 'tts': mock_timer.microseconds, 'args': {}, 'pid': 20978, 'ts': mock_timer.microseconds, 'cat': 'webkit', 'tid': 11, 'ph': 'F', 'id': '0x12345' }) # Create a timeline object from the trace. trace_result = TraceResult(ChromeRawTraceResult(trace_events)) timeline = trace_result.AsTimelineModel() # Find the timeline marker and gesture marker in the timeline, # and create a RenderingStats object. renderer_process_markers = timeline.FindTimelineMarkers( RENDERER_PROCESS_MARKER) self.assertEquals(len(renderer_process_markers), 1) renderer_process = renderer_process_markers[0].start_thread.parent timeline_markers = timeline.FindTimelineMarkers( SYNTHETIC_GESTURE_MARKER) stats = rendering_stats.RenderingStats(renderer_process, timeline_markers) # Make a results object and add results to it from the smoothness metric. results = PageMeasurementResults() results.WillMeasurePage(page.Page('http://foo.com/', None)) smoothness_metric = smoothness.SmoothnessMetric(None) smoothness_metric.SetStats(stats) smoothness_metric.AddResults(None, results) results.DidMeasurePage() self.assertEquals(expected_frame_times, results.page_results[0]['frame_times'].value) self.assertAlmostEquals( 1000.0 * (total_time_seconds / num_frames_sent), results.page_results[0]['mean_frame_time'].value, places=2) # We don't verify the correctness of the discrepancy computation itself, # because we have a separate unit test for that purpose. self.assertAlmostEquals(statistics.FrameDiscrepancy( stats.frame_timestamps, True), results.page_results[0]['jank'].value, places=4) # We do not verify the correctness of Percentile here; Percentile should # have its own test. # The 17 here represents a threshold of 17 ms; this should match the value # in the smoothness metric. self.assertEquals( statistics.Percentile(expected_frame_times, 95.0) < 17.0, results.page_results[0]['mostly_smooth'].value)