def testDifferentPageMergeSingleValueStillMerges(self): page0 = self.pages[0] all_values = [ scalar.ScalarValue( page0, 'x', 'units', 1, improvement_direction=improvement_direction.DOWN) ] # Sort the results so that their order is predictable for the subsequent # assertions. merged_values = merge_values.MergeLikeValuesFromDifferentPages( all_values) self.assertEquals(1, len(merged_values)) self.assertEquals((None, 'x'), (merged_values[0].page, merged_values[0].name)) self.assertTrue( isinstance(merged_values[0], list_of_scalar_values.ListOfScalarValues)) self.assertEquals([1], merged_values[0].values)
def _ComputePerPageValues(self, all_page_specific_values): all_successful_page_values = all_page_specific_values # We will later need to determine how many values were originally created # for each value name, to apply a workaround meant to clean up the printf # output. num_successful_pages_for_value_name = defaultdict(int) for v in all_successful_page_values: num_successful_pages_for_value_name[v.name] += 1 # By here, due to page repeat options, all_values_from_successful_pages # contains values of the same name not only from mulitple pages, but also # from the same name. So even if, for instance, only one page ran, it may # have run twice, producing two 'x' values. # # So, get rid of the repeated pages by merging. merged_page_values = merge_values.MergeLikeValuesFromSamePage( all_successful_page_values) # Now we have a bunch of values, but there is only one value_name per page. # Suppose page1 and page2 ran, producing values x and y. We want to print # x_by_url for page1 # x_by_url for page2 # x for page1, page2 combined # # y_by_url for page1 # y_by_url for page2 # y for page1, page2 combined # # We already have the x_by_url values in the values array. But, we will need # them indexable by the value name. # # The following dict maps value_name -> list of pages that have values of # that name. per_page_values_by_value_name = defaultdict(list) for value in merged_page_values: per_page_values_by_value_name[value.name].append(value) # We already have the x_by_url values in the values array. But, we also need # the values merged across the pages. And, we will need them indexed by # value name so that we can find them when printing out value names in # alphabetical order. merged_pages_value_by_value_name = {} if not self.had_errors_or_failures: for value in merge_values.MergeLikeValuesFromDifferentPages( all_successful_page_values): assert value.name not in merged_pages_value_by_value_name merged_pages_value_by_value_name[value.name] = value # sorted_value names will govern the order we start printing values. value_names = set([v.name for v in merged_page_values]) sorted_value_names = sorted(value_names) # Time to walk through the values by name, printing first the by_url values # and then the merged_site value. for value_name in sorted_value_names: per_page_values = per_page_values_by_value_name.get(value_name, []) # Sort the values by their url sorted_per_page_values = list(per_page_values) sorted_per_page_values.sort( key=lambda per_page_values: per_page_values.page.display_name) # Output the _by_url results. num_successful_pages_for_this_value_name = ( num_successful_pages_for_value_name[value_name]) for per_page_value in sorted_per_page_values: self._ComputePerPageValue( per_page_value, num_successful_pages_for_this_value_name) # Output the combined values. merged_pages_value = merged_pages_value_by_value_name.get( value_name, None) if merged_pages_value: self._computed_summary_values.append(merged_pages_value) self._interleaved_computed_per_page_values_and_summaries.append( merged_pages_value)
def _ComputePerPageValues(self, all_page_specific_values): all_successful_page_values = [ v for v in all_page_specific_values if not (isinstance(v, skip.SkipValue)) ] # We will later need to determine how many values were originally created # for each value name, to apply a workaround meant to clean up the printf # output. num_successful_pages_for_key = defaultdict(int) for v in all_successful_page_values: num_successful_pages_for_key[self._key_func(v)] += 1 # By here, due to page repeat options, all_values_from_successful_pages # contains values of the same name not only from mulitple pages, but also # from the same name. So even if, for instance, only one page ran, it may # have run twice, producing two 'x' values. # # So, get rid of the repeated pages by merging. merged_page_values = merge_values.MergeLikeValuesFromSamePage( all_successful_page_values, self._key_func) # Now we have a bunch of values, but there is only one value_name per page. # Suppose page1 and page2 ran, producing values x and y. We want to print # x for page1 # x for page2 # x for page1, page2 combined # # y for page1 # y for page2 # y for page1, page2 combined # # We already have the x values in the values array. But, we will need # them indexable by summary key. # # The following dict maps summary_key -> list of pages that have values of # that name. per_page_values_by_key = defaultdict(list) for value in merged_page_values: per_page_values_by_key[self._key_func(value)].append(value) # We already have the x values in the values array. But, we also need # the values merged across the pages. And, we will need them indexed by # summary key so that we can find them when printing out value names in # alphabetical order. merged_pages_value_by_key = {} if not self._had_failures: for value in merge_values.MergeLikeValuesFromDifferentPages( merged_page_values, self._key_func): value_key = self._key_func(value) assert value_key not in merged_pages_value_by_key merged_pages_value_by_key[value_key] = value keys = sorted(set([self._key_func(v) for v in merged_page_values])) # Time to walk through the values by key, printing first the page-specific # values and then the merged_site value. for key in keys: per_page_values = per_page_values_by_key.get(key, []) # Sort the values by their URL. sorted_per_page_values = list(per_page_values) sorted_per_page_values.sort( key=lambda per_page_values: per_page_values.page.name) # Output the page-specific results. num_successful_pages_for_this_key = ( num_successful_pages_for_key[key]) for per_page_value in sorted_per_page_values: self._ComputePerPageValue(per_page_value, num_successful_pages_for_this_key) # Output the combined values. merged_pages_value = merged_pages_value_by_key.get(key, None) if merged_pages_value: self._computed_summary_values.append(merged_pages_value) self._interleaved_computed_per_page_values_and_summaries.append( merged_pages_value)