def _CalculateStatistics(self, value_list, trace_name):
        """Handles statistics generation and recording for page-cycler data.

    Sums the timings over all pages for each iteration and returns a tuple
    (mean, standard deviation) of those sums.  Also saves a data file
    <revision>_<tracename>.dat holding a line of times for each URL loaded,
    for use by humans when debugging a regression.
    """

        # If the name of the trace is one of the pages in the page list then we are
        # dealing with the results for that page only, not the overall results. So
        # calculate the statistics like a normal GraphingLogProcessor, not the
        # GraphingPageCyclerLogProcessor.
        if trace_name in self._page_list:
            return super(GraphingPageCyclerLogProcessor,
                         self)._CalculateStatistics(value_list, trace_name)

        value_count = len(value_list)
        page_count = len(self._page_list)
        # Chunk value_list into groups, where each sub-list
        # has all the page times of one iteration.
        iterations = [
            value_list[start:start + page_count]
            for start in xrange(0, value_count, page_count)
        ]

        iteration_times = map(sum, iterations)
        page_times_list = map(list, zip(*iterations))
        page_times_dict = dict(zip(self._page_list, page_times_list))

        pagedata = self._SavePageData(page_times_dict, trace_name)
        val, stddev = chromium_utils.FilteredMeanAndStandardDeviation(
            iteration_times)
        return val, stddev, pagedata
Exemple #2
0
    def _CalculateStatistics(self, value_list, trace_name):
        """Handles statistics generation and recording for page-cycler data.

    Sums the timings over all pages for each iteration and returns a tuple
    (mean, standard deviation) of those sums.  Also saves a data file
    <revision>_<tracename>.dat holding a line of times for each URL loaded,
    for use by humans when debugging a regression.
    """
        sums = []
        page_times = {}
        page_count = len(self._page_list)

        iteration_count = len(value_list) / page_count
        for iteration in range(iteration_count):
            start = page_count * iteration
            end = start + page_count
            iteration_times = value_list[start:end]
            sums += [sum(iteration_times)]
            for page_index in range(page_count):
                page = self._page_list[page_index]
                if page not in page_times:
                    page_times[page] = []
                page_times[page].append(iteration_times[page_index])
        if self._ShouldWriteResults():
            self.__SavePageData(page_times, trace_name)
        return chromium_utils.FilteredMeanAndStandardDeviation(sums)
Exemple #3
0
    def _CalculateStatistics(self, value_list, trace_name):
        """Handles statistics generation and recording for page-cycler data.

    Sums the timings over all pages for each iteration and returns a tuple
    (mean, standard deviation) of those sums.  Also saves a data file
    <revision>_<tracename>.dat holding a line of times for each URL loaded,
    for use by humans when debugging a regression.
    """

        # If the name of the trace is one of the pages in the page list then we are
        # dealing with the results for that page only, not the overall results. So
        # calculate the statistics like a normal GraphingLogProcessor, not the
        # GraphingPageCyclerLogProcessor.
        if trace_name in self._page_list:
            return super(GraphingPageCyclerLogProcessor,
                         self)._CalculateStatistics(value_list, trace_name)

        sums = []
        page_times = {}
        page_count = len(self._page_list)

        iteration_count = len(value_list) / page_count
        for iteration in range(iteration_count):
            start = page_count * iteration
            end = start + page_count
            iteration_times = value_list[start:end]
            sums += [sum(iteration_times)]
            for page_index in range(page_count):
                page = self._page_list[page_index]
                if page not in page_times:
                    page_times[page] = []
                page_times[page].append(iteration_times[page_index])
        pagedata = self._SavePageData(page_times, trace_name)
        val, stddev = chromium_utils.FilteredMeanAndStandardDeviation(sums)
        return val, stddev, pagedata
Exemple #4
0
    def _CalculateStatistics(self, value_list, trace_name):
        """Returns a tuple (mean, standard deviation) from a list of values.

    This method may be overridden by subclasses wanting a different standard
    deviation calcuation (or some other sort of error value entirely).

    Args:
      value_list: the list of values to use in the calculation
      trace_name: the trace that produced the data (not used in the base
          implementation, but subclasses may use it)
    """
        return chromium_utils.FilteredMeanAndStandardDeviation(value_list)
Exemple #5
0
    def __SavePageData(self, page_times, trace_name):
        """Save a file holding the timing data for each page loaded.

    Args:
      page_times: a dict mapping a page URL to a list of its times
      trace_name: the trace that produced this set of times
    """
        file_data = []
        for page in self._page_list:
            times = page_times[page]
            mean, stddev = chromium_utils.FilteredMeanAndStandardDeviation(
                times)
            file_data.append('%s (%s+/-%s): %s' %
                             (page, FormatFloat(mean), FormatFloat(stddev),
                              JoinWithSpacesAndNewLine(times)))

        filename = '%s_%s.dat' % (self._revision, trace_name)
        return {filename: file_data}
    def _SavePageData(self, page_times, trace_name):
        """Saves a file holding the timing data for each page loaded.

    Args:
      page_times: a dict mapping a page URL to a list of its times
      trace_name: the trace that produced this set of times

    Returns:
      A dict with one entry, mapping filename to file contents.
    """
        file_data = []
        for page, times in sorted(page_times.iteritems()):
            mean, stddev = chromium_utils.FilteredMeanAndStandardDeviation(
                times)
            file_data.append('%s (%s+/-%s): %s' %
                             (page, _FormatFloat(mean), _FormatFloat(stddev),
                              _JoinWithSpacesAndNewLine(times)))

        filename = '%s_%s.dat' % (self._revision, trace_name)
        return {filename: file_data}
Exemple #7
0
    def __SavePageData(self, page_times, trace_name):
        """Save a file holding the timing data for each page loaded.

    Args:
      page_times: a dict mapping a page URL to a list of its times
      trace_name: the trace that produced this set of times
    """
        file_data = []
        for page in self._page_list:
            times = page_times[page]
            mean, stddev = chromium_utils.FilteredMeanAndStandardDeviation(
                times)
            file_data.append("%s (%s+/-%s): %s" %
                             (page, FormatFloat(mean), FormatFloat(stddev),
                              JoinWithSpacesAndNewLine(times)))

        filename = os.path.join(self._output_dir,
                                '%s_%s.dat' % (self._revision, trace_name))
        fileobj = open(filename, 'w')
        fileobj.write(''.join(file_data))
        fileobj.close()
        os.chmod(filename, READABLE_FILE_PERMISSIONS)
Exemple #8
0
 def testFilteredMeanAndStandardDeviationOne(self):
     sample_data = [4]  # Should not filter max in this case.
     mean, stdd = chromium_utils.FilteredMeanAndStandardDeviation(
         sample_data)
     self.assertEqual(4, mean)
     self.assertEqual(0, stdd)
Exemple #9
0
 def testFilteredMeanAndStandardDeviation(self):
     sample_data = [4, 4, 6, 12345, 100, 20]  # max should be ignored
     mean, stdd = chromium_utils.FilteredMeanAndStandardDeviation(
         sample_data)
     self.assertEqual(26.8, mean)
     self.assertAlmostEqual(37.08585, stdd, 5)