def _ComputeMeanPixelsCheckerboarded(self, page, stats): """Add the mean percentage of pixels checkerboarded. This looks at tiles which are only missing. It does not take into consideration tiles which are of low or non-ideal resolution. """ mean_pixels_checkerboarded = None none_value_reason = None if self._HasEnoughFrames(stats.frame_timestamps): if rendering_stats.CHECKERBOARDED_PIXEL_ERROR in stats.errors: none_value_reason = stats.errors[ rendering_stats.CHECKERBOARDED_PIXEL_ERROR] else: mean_pixels_checkerboarded = round( statistics.ArithmeticMean( perf_tests_helper.FlattenList( stats.checkerboarded_pixel_percentages)), 3) else: none_value_reason = NOT_ENOUGH_FRAMES_MESSAGE return scalar.ScalarValue( page, 'mean_pixels_checkerboarded', 'percent', mean_pixels_checkerboarded, description='Percentage of pixels that were checkerboarded.', none_value_reason=none_value_reason)
def _ComputeQueueingDuration(self, page, stats): """Returns a Value for the frame queueing durations.""" queueing_durations = None none_value_reason = None if 'frame_queueing_durations' in stats.errors: none_value_reason = stats.errors['frame_queueing_durations'] elif self._HasEnoughFrames(stats.frame_timestamps): queueing_durations = perf_tests_helper.FlattenList( stats.frame_queueing_durations) if len(queueing_durations) == 0: queueing_durations = None none_value_reason = 'No frame queueing durations recorded.' else: none_value_reason = NOT_ENOUGH_FRAMES_MESSAGE return list_of_scalar_values.ListOfScalarValues( page, 'queueing_durations', 'ms', queueing_durations, description='The frame queueing duration quantifies how out of sync ' 'the compositor and renderer threads are. It is the amount ' 'of wall time that elapses between a ' 'ScheduledActionSendBeginMainFrame event in the compositor ' 'thread and the corresponding BeginMainFrame event in the ' 'main thread.', none_value_reason=none_value_reason)
def _ComputeFrameTimeMetric( self, prefix, page, frame_timestamps, frame_times): """Returns Values for the frame time metrics. This includes the raw and mean frame times, as well as the percentage of frames that were hitting 60 fps. """ flatten_frame_times = None percentage_smooth = None none_value_reason = None if self._HasEnoughFrames(frame_timestamps): flatten_frame_times = perf_tests_helper.FlattenList(frame_times) # We use 17ms as a somewhat looser threshold, instead of 1000.0/60.0. smooth_threshold = 17.0 smooth_count = sum(1 for t in flatten_frame_times if t < smooth_threshold) percentage_smooth = float(smooth_count) / len(flatten_frame_times) * 100.0 else: none_value_reason = NOT_ENOUGH_FRAMES_MESSAGE return ( list_of_scalar_values.ListOfScalarValues( page, '%sframe_times' % prefix, 'ms', flatten_frame_times, description='List of raw frame times, helpful to understand the ' 'other metrics.', none_value_reason=none_value_reason, improvement_direction=improvement_direction.DOWN), scalar.ScalarValue( page, '%spercentage_smooth' % prefix, 'score', percentage_smooth, description='Percentage of frames that were hitting 60 fps.', none_value_reason=none_value_reason, improvement_direction=improvement_direction.UP) )
def _ComputeLatencyMetric(self, page, stats, name, list_of_latency_lists): """Returns Values for the mean and discrepancy for given latency stats.""" mean_latency = None latency_discrepancy = None none_value_reason = None if self._HasEnoughFrames(stats.frame_timestamps): latency_list = perf_tests_helper.FlattenList(list_of_latency_lists) if len(latency_list) == 0: return () mean_latency = round(statistics.ArithmeticMean(latency_list), 3) latency_discrepancy = (round( statistics.DurationsDiscrepancy(latency_list), 4)) else: none_value_reason = NOT_ENOUGH_FRAMES_MESSAGE return (scalar.ScalarValue( page, 'mean_%s' % name, 'ms', mean_latency, description='Arithmetic mean of the raw %s values' % name, none_value_reason=none_value_reason), scalar.ScalarValue( page, '%s_discrepancy' % name, 'ms', latency_discrepancy, description='Discrepancy of the raw %s values' % name, none_value_reason=none_value_reason))
def _ComputeLatencyMetric(self, page, name, list_of_latency_lists): """Returns Values for given latency stats.""" latency_list = None latency_list = perf_tests_helper.FlattenList(list_of_latency_lists) if len(latency_list) == 0: return None return list_of_scalar_values.ListOfScalarValues( page, name, 'ms', latency_list, description='Raw %s values' % name, improvement_direction=improvement_direction.DOWN)
def _ComputeLatencyMetric(self, page, stats, name, list_of_latency_lists): """Returns Values for given latency stats.""" none_value_reason = None latency_list = None if self._HasEnoughFrames(stats.frame_timestamps): latency_list = perf_tests_helper.FlattenList(list_of_latency_lists) if len(latency_list) == 0: return None else: none_value_reason = NOT_ENOUGH_FRAMES_MESSAGE return list_of_scalar_values.ListOfScalarValues( page, name, 'ms', latency_list, description='Raw %s values' % name, none_value_reason=none_value_reason, improvement_direction=improvement_direction.DOWN)
def _ComputeMeanPixelsApproximated(self, page, stats): """Add the mean percentage of pixels approximated. This looks at tiles which are missing or of low or non-ideal resolution. """ mean_pixels_approximated = None none_value_reason = None if self._HasEnoughFrames(stats.frame_timestamps): mean_pixels_approximated = round(statistics.ArithmeticMean( perf_tests_helper.FlattenList( stats.approximated_pixel_percentages)), 3) else: none_value_reason = NOT_ENOUGH_FRAMES_MESSAGE return scalar.ScalarValue( page, 'mean_pixels_approximated', 'percent', mean_pixels_approximated, description='Percentage of pixels that were approximated ' '(checkerboarding, low-resolution tiles, etc.).', none_value_reason=none_value_reason, improvement_direction=improvement_direction.DOWN)
def _ComputeFrameTimeMetric(self, page, stats): """Returns Values for the frame time metrics. This includes the raw and mean frame times, as well as the percentage of frames that were hitting 60 fps. """ frame_times = None mean_frame_time = None percentage_smooth = None none_value_reason = None if self._HasEnoughFrames(stats.frame_timestamps): frame_times = perf_tests_helper.FlattenList(stats.frame_times) mean_frame_time = round(statistics.ArithmeticMean(frame_times), 3) # We use 17ms as a somewhat looser threshold, instead of 1000.0/60.0. smooth_threshold = 17.0 smooth_count = sum(1 for t in frame_times if t < smooth_threshold) percentage_smooth = float(smooth_count) / len(frame_times) * 100.0 else: none_value_reason = NOT_ENOUGH_FRAMES_MESSAGE return (list_of_scalar_values.ListOfScalarValues( page, 'frame_times', 'ms', frame_times, description='List of raw frame times, helpful to understand the ' 'other metrics.', none_value_reason=none_value_reason), scalar.ScalarValue( page, 'mean_frame_time', 'ms', mean_frame_time, description='Arithmetic mean of frame times.', none_value_reason=none_value_reason), scalar.ScalarValue( page, 'percentage_smooth', 'score', percentage_smooth, description= 'Percentage of frames that were hitting 60 fps.', none_value_reason=none_value_reason))
def _ComputeFirstGestureScrollUpdateLatency(self, page, stats): """Returns a Value for the first gesture scroll update latency.""" first_gesture_scroll_update_latency = None none_value_reason = None if self._HasEnoughFrames(stats.frame_timestamps): latency_list = perf_tests_helper.FlattenList( stats.gesture_scroll_update_latency) if len(latency_list) == 0: return () first_gesture_scroll_update_latency = round(latency_list[0], 4) else: none_value_reason = NOT_ENOUGH_FRAMES_MESSAGE return (scalar.ScalarValue( page, 'first_gesture_scroll_update_latency', 'ms', first_gesture_scroll_update_latency, description= 'First gesture scroll update latency measures the time it ' 'takes to process the very first gesture scroll update ' 'input event. The first scroll gesture can often get ' 'delayed by work related to page loading.', none_value_reason=none_value_reason), )
def testInputLatencyFromTimeline(self): timeline = model.TimelineModel() # Create a browser process and a renderer process. browser = timeline.GetOrCreateProcess(pid=1) browser_main = browser.GetOrCreateThread(tid=11) renderer = timeline.GetOrCreateProcess(pid=2) renderer_main = renderer.GetOrCreateThread(tid=21) timer = MockTimer() ref_latency = ReferenceInputLatencyStats() # Create 10 input latency stats events for Action A. renderer_main.BeginSlice('webkit.console', 'ActionA', timer.AdvanceAndGet(2, 4), '') for _ in xrange(0, 10): AddInputLatencyStats(timer, browser_main, renderer_main, ref_latency) renderer_main.EndSlice(timer.AdvanceAndGet(2, 4)) # Create 5 input latency stats events not within any action. timer.Advance(2, 4) for _ in xrange(0, 5): AddInputLatencyStats(timer, browser_main, renderer_main, None) # Create 10 input latency stats events for Action B. renderer_main.BeginSlice('webkit.console', 'ActionB', timer.AdvanceAndGet(2, 4), '') for _ in xrange(0, 10): AddInputLatencyStats(timer, browser_main, renderer_main, ref_latency) renderer_main.EndSlice(timer.AdvanceAndGet(2, 4)) # Create 10 input latency stats events for Action A. renderer_main.BeginSlice('webkit.console', 'ActionA', timer.AdvanceAndGet(2, 4), '') for _ in xrange(0, 10): AddInputLatencyStats(timer, browser_main, renderer_main, ref_latency) renderer_main.EndSlice(timer.AdvanceAndGet(2, 4)) browser.FinalizeImport() renderer.FinalizeImport() latency_events = [] timeline_markers = timeline.FindTimelineMarkers( ['ActionA', 'ActionB', 'ActionA']) timeline_ranges = [bounds.Bounds.CreateFromEvent(marker) for marker in timeline_markers] for timeline_range in timeline_ranges: if timeline_range.is_empty: continue latency_events.extend(rendering_stats.GetLatencyEvents( browser, timeline_range)) self.assertEquals(latency_events, ref_latency.input_event) event_latency_result = rendering_stats.ComputeEventLatencies(latency_events) self.assertEquals(event_latency_result, ref_latency.input_event_latency) stats = rendering_stats.RenderingStats( renderer, browser, None, timeline_ranges) self.assertEquals( perf_tests_helper.FlattenList(stats.input_event_latency), [latency for name, latency in ref_latency.input_event_latency if name != rendering_stats.SCROLL_UPDATE_EVENT_NAME]) self.assertEquals( perf_tests_helper.FlattenList(stats.scroll_update_latency), [latency for name, latency in ref_latency.input_event_latency if name == rendering_stats.SCROLL_UPDATE_EVENT_NAME]) self.assertEquals( perf_tests_helper.FlattenList(stats.gesture_scroll_update_latency), [latency for name, latency in ref_latency.input_event_latency if name == rendering_stats.GESTURE_SCROLL_UPDATE_EVENT_NAME])