示例#1
0
    def AddResults(self, model, renderer_thread, interaction_record, results):
        renderer_process = renderer_thread.parent
        time_bounds = bounds.Bounds()
        time_bounds.AddValue(interaction_record.start)
        time_bounds.AddValue(interaction_record.end)
        stats = rendering_stats.RenderingStats(renderer_process,
                                               model.browser_process,
                                               [time_bounds])
        if stats.mouse_wheel_scroll_latency:
            mean_mouse_wheel_scroll_latency = statistics.ArithmeticMean(
                stats.mouse_wheel_scroll_latency)
            mouse_wheel_scroll_latency_discrepancy = statistics.DurationsDiscrepancy(
                stats.mouse_wheel_scroll_latency)
            results.Add('mean_mouse_wheel_scroll_latency', 'ms',
                        round(mean_mouse_wheel_scroll_latency, 3))
            results.Add('mouse_wheel_scroll_latency_discrepancy', '',
                        round(mouse_wheel_scroll_latency_discrepancy, 4))

        if stats.touch_scroll_latency:
            mean_touch_scroll_latency = statistics.ArithmeticMean(
                stats.touch_scroll_latency)
            touch_scroll_latency_discrepancy = statistics.DurationsDiscrepancy(
                stats.touch_scroll_latency)
            results.Add('mean_touch_scroll_latency', 'ms',
                        round(mean_touch_scroll_latency, 3))
            results.Add('touch_scroll_latency_discrepancy', '',
                        round(touch_scroll_latency_discrepancy, 4))

        if stats.js_touch_scroll_latency:
            mean_js_touch_scroll_latency = statistics.ArithmeticMean(
                stats.js_touch_scroll_latency)
            js_touch_scroll_latency_discrepancy = statistics.DurationsDiscrepancy(
                stats.js_touch_scroll_latency)
            results.Add('mean_js_touch_scroll_latency', 'ms',
                        round(mean_js_touch_scroll_latency, 3))
            results.Add('js_touch_scroll_latency_discrepancy', '',
                        round(js_touch_scroll_latency_discrepancy, 4))

        # List of raw frame times.
        frame_times = FlattenList(stats.frame_times)
        results.Add('frame_times', 'ms', frame_times)

        # Arithmetic mean of frame times.
        mean_frame_time = statistics.ArithmeticMean(frame_times)
        results.Add('mean_frame_time', 'ms', round(mean_frame_time, 3))

        # Absolute discrepancy of frame time stamps.
        frame_discrepancy = statistics.TimestampsDiscrepancy(
            stats.frame_timestamps)
        results.Add('jank', 'ms', round(frame_discrepancy, 4))

        # Are we hitting 60 fps for 95 percent of all frames?
        # We use 19ms as a somewhat looser threshold, instead of 1000.0/60.0.
        percentile_95 = statistics.Percentile(frame_times, 95.0)
        results.Add('mostly_smooth', 'score',
                    1.0 if percentile_95 < 19.0 else 0.0)
示例#2
0
    def AddResults(self, model, renderer_thread, interaction_records, results):
        self.VerifyNonOverlappedRecords(interaction_records)
        renderer_process = renderer_thread.parent
        stats = rendering_stats.RenderingStats(
            renderer_process, model.browser_process,
            [r.GetBounds() for r in interaction_records])

        input_event_latency = FlattenList(stats.input_event_latency)
        if input_event_latency:
            mean_input_event_latency = statistics.ArithmeticMean(
                input_event_latency)
            input_event_latency_discrepancy = statistics.DurationsDiscrepancy(
                input_event_latency)
            results.Add('mean_input_event_latency', 'ms',
                        round(mean_input_event_latency, 3))
            results.Add('input_event_latency_discrepancy', 'ms',
                        round(input_event_latency_discrepancy, 4))

        # List of raw frame times.
        frame_times = FlattenList(stats.frame_times)
        results.Add('frame_times', 'ms', frame_times)

        # Arithmetic mean of frame times.
        mean_frame_time = statistics.ArithmeticMean(frame_times)
        results.Add('mean_frame_time', 'ms', round(mean_frame_time, 3))

        # Absolute discrepancy of frame time stamps.
        frame_discrepancy = statistics.TimestampsDiscrepancy(
            stats.frame_timestamps)
        results.Add('jank', 'ms', round(frame_discrepancy, 4))

        # Are we hitting 60 fps for 95 percent of all frames?
        # We use 19ms as a somewhat looser threshold, instead of 1000.0/60.0.
        percentile_95 = statistics.Percentile(frame_times, 95.0)
        results.Add('mostly_smooth', 'score',
                    1.0 if percentile_95 < 19.0 else 0.0)

        # Mean percentage of pixels approximated (missing tiles, low resolution
        # tiles, non-ideal resolution tiles)
        results.Add(
            'mean_pixels_approximated', 'percent',
            round(
                statistics.ArithmeticMean(
                    FlattenList(stats.approximated_pixel_percentages)), 3))
示例#3
0
 def testPercentile(self):
   # The 50th percentile is the median value.
   self.assertEquals(3, statistics.Percentile([4, 5, 1, 3, 2], 50))
   self.assertEquals(2.5, statistics.Percentile([5, 1, 3, 2], 50))
   # When the list of values is empty, 0 is returned.
   self.assertEquals(0, statistics.Percentile([], 50))
   # When the given percentage is very low, the lowest value is given.
   self.assertEquals(1, statistics.Percentile([2, 1, 5, 4, 3], 5))
   # When the given percentage is very high, the highest value is given.
   self.assertEquals(5, statistics.Percentile([5, 2, 4, 1, 3], 95))
   # Linear interpolation between closest ranks is used. Using the example
   # from <http://en.wikipedia.org/wiki/Percentile>:
   self.assertEquals(27.5, statistics.Percentile([15, 20, 35, 40, 50], 40))
示例#4
0
    def _ComputeFrameTimeMetric(self, page, stats):
        """Returns Values for the frame time metrics.

    This includes the raw and mean frame times, as well as the mostly_smooth
    metric which tracks whether we hit 60 fps for 95% of the frames.
    """
        frame_times = None
        mean_frame_time = None
        mostly_smooth = None
        none_value_reason = None
        if self._HasEnoughFrames(stats.frame_timestamps):
            frame_times = FlattenList(stats.frame_times)
            mean_frame_time = round(statistics.ArithmeticMean(frame_times), 3)
            # We use 19ms as a somewhat looser threshold, instead of 1000.0/60.0.
            percentile_95 = statistics.Percentile(frame_times, 95.0)
            mostly_smooth = 1.0 if percentile_95 < 19.0 else 0.0
        else:
            none_value_reason = NOT_ENOUGH_FRAMES_MESSAGE
        return (list_of_scalar_values.ListOfScalarValues(
            page,
            'frame_times',
            'ms',
            frame_times,
            description='List of raw frame times, helpful to understand the '
            'other metrics.',
            none_value_reason=none_value_reason),
                scalar.ScalarValue(
                    page,
                    'mean_frame_time',
                    'ms',
                    mean_frame_time,
                    description='Arithmetic mean of frame times.',
                    none_value_reason=none_value_reason),
                scalar.ScalarValue(
                    page,
                    'mostly_smooth',
                    'score',
                    mostly_smooth,
                    description='Were 95 percent of the frames hitting 60 fps?'
                    'boolean value (1/0).',
                    none_value_reason=none_value_reason))
示例#5
0
    def AddResults(self, model, renderer_thread, interaction_records, results):
        self.VerifyNonOverlappedRecords(interaction_records)
        renderer_process = renderer_thread.parent
        stats = rendering_stats.RenderingStats(
            renderer_process, model.browser_process,
            [r.GetBounds() for r in interaction_records])

        input_event_latency = FlattenList(stats.input_event_latency)
        if input_event_latency:
            mean_input_event_latency = statistics.ArithmeticMean(
                input_event_latency)
            input_event_latency_discrepancy = statistics.DurationsDiscrepancy(
                input_event_latency)
            results.AddValue(
                scalar.ScalarValue(results.current_page,
                                   'mean_input_event_latency', 'ms',
                                   round(mean_input_event_latency, 3)))
            results.AddValue(
                scalar.ScalarValue(results.current_page,
                                   'input_event_latency_discrepancy', 'ms',
                                   round(input_event_latency_discrepancy, 4)))
        scroll_update_latency = FlattenList(stats.scroll_update_latency)
        if scroll_update_latency:
            mean_scroll_update_latency = statistics.ArithmeticMean(
                scroll_update_latency)
            scroll_update_latency_discrepancy = statistics.DurationsDiscrepancy(
                scroll_update_latency)
            results.AddValue(
                scalar.ScalarValue(results.current_page,
                                   'mean_scroll_update_latency', 'ms',
                                   round(mean_scroll_update_latency, 3)))
            results.AddValue(
                scalar.ScalarValue(results.current_page,
                                   'scroll_update_latency_discrepancy', 'ms',
                                   round(scroll_update_latency_discrepancy,
                                         4)))
        gesture_scroll_update_latency = FlattenList(
            stats.gesture_scroll_update_latency)
        if gesture_scroll_update_latency:
            results.AddValue(
                scalar.ScalarValue(results.current_page,
                                   'first_gesture_scroll_update_latency', 'ms',
                                   round(gesture_scroll_update_latency[0], 4)))

        # List of queueing durations.
        frame_queueing_durations = FlattenList(stats.frame_queueing_durations)
        if frame_queueing_durations:
            results.AddValue(
                list_of_scalar_values.ListOfScalarValues(
                    results.current_page, 'queueing_durations', 'ms',
                    frame_queueing_durations))

        # List of raw frame times.
        frame_times = FlattenList(stats.frame_times)
        results.AddValue(
            list_of_scalar_values.ListOfScalarValues(
                results.current_page,
                'frame_times',
                'ms',
                frame_times,
                description=
                'List of raw frame times, helpful to understand the other '
                'metrics.'))

        # Arithmetic mean of frame times.
        mean_frame_time = statistics.ArithmeticMean(frame_times)
        results.AddValue(
            scalar.ScalarValue(results.current_page,
                               'mean_frame_time',
                               'ms',
                               round(mean_frame_time, 3),
                               description='Arithmetic mean of frame times.'))

        # Absolute discrepancy of frame time stamps.
        frame_discrepancy = statistics.TimestampsDiscrepancy(
            stats.frame_timestamps)
        results.AddValue(
            scalar.ScalarValue(
                results.current_page,
                'jank',
                'ms',
                round(frame_discrepancy, 4),
                description='Absolute discrepancy of frame time stamps, where '
                'discrepancy is a measure of irregularity. It quantifies '
                'the worst jank. For a single pause, discrepancy '
                'corresponds to the length of this pause in milliseconds. '
                'Consecutive pauses increase the discrepancy. This metric '
                'is important because even if the mean and 95th '
                'percentile are good, one long pause in the middle of an '
                'interaction is still bad.'))

        # Are we hitting 60 fps for 95 percent of all frames?
        # We use 19ms as a somewhat looser threshold, instead of 1000.0/60.0.
        percentile_95 = statistics.Percentile(frame_times, 95.0)
        results.AddValue(
            scalar.ScalarValue(
                results.current_page,
                'mostly_smooth',
                'score',
                1.0 if percentile_95 < 19.0 else 0.0,
                description='Were 95 percent of the frames hitting 60 fps?'
                'boolean value (1/0).'))

        # Mean percentage of pixels approximated (missing tiles, low resolution
        # tiles, non-ideal resolution tiles).
        results.AddValue(
            scalar.ScalarValue(
                results.current_page,
                'mean_pixels_approximated',
                'percent',
                round(
                    statistics.ArithmeticMean(
                        FlattenList(stats.approximated_pixel_percentages)), 3),
                description='Percentage of pixels that were approximated '
                '(checkerboarding, low-resolution tiles, etc.).'))