def test_add_sample(self):
        mean_data = 1.0
        count_data = 0
        sum_of_sqd_deviations = 2
        counts_per_bucket = [1, 1, 1, 1]
        bounds = [0.5, 1, 1.5]

        value = 3

        dist_agg_data = aggregation_data_module.DistributionAggregationData(
            mean_data=mean_data,
            count_data=count_data,
            sum_of_sqd_deviations=sum_of_sqd_deviations,
            counts_per_bucket=counts_per_bucket,
            bounds=bounds)

        dist_agg_data.add_sample(value, None, None)
        self.assertEqual(1, dist_agg_data.count_data)
        self.assertEqual(value, dist_agg_data.mean_data)

        count_data = 1
        dist_agg_data = aggregation_data_module.DistributionAggregationData(
            mean_data=mean_data,
            count_data=count_data,
            sum_of_sqd_deviations=sum_of_sqd_deviations,
            counts_per_bucket=counts_per_bucket,
            bounds=bounds)

        dist_agg_data.add_sample(value, None, None)
        self.assertEqual(2, dist_agg_data.count_data)
        self.assertEqual(2.0, dist_agg_data.mean_data)
        self.assertEqual(4.0, dist_agg_data.sum_of_sqd_deviations)
        self.assertIsNot(0, dist_agg_data.count_data)
    def test_init_bad_bounds(self):
        # Check that bounds are unique
        with self.assertRaises(AssertionError):
            aggregation_data_module.DistributionAggregationData(
                mean_data=mock.Mock(),
                count_data=mock.Mock(),
                min_=mock.Mock(),
                max_=mock.Mock(),
                sum_of_sqd_deviations=mock.Mock(),
                counts_per_bucket=[0, 0, 0, 0],
                bounds=[1, 2, 2])

        # Check that bounds are sorted
        with self.assertRaises(AssertionError):
            aggregation_data_module.DistributionAggregationData(
                mean_data=mock.Mock(),
                count_data=mock.Mock(),
                min_=mock.Mock(),
                max_=mock.Mock(),
                sum_of_sqd_deviations=mock.Mock(),
                counts_per_bucket=[0, 0, 0, 0],
                bounds=[1, 3, 2])

        # Check that all bounds are positive
        with self.assertRaises(AssertionError):
            aggregation_data_module.DistributionAggregationData(
                mean_data=mock.Mock(),
                count_data=mock.Mock(),
                min_=mock.Mock(),
                max_=mock.Mock(),
                sum_of_sqd_deviations=mock.Mock(),
                counts_per_bucket=[0, 0, 0, 0],
                bounds=[-1, 1, 2])
    def test_variance(self):
        mean_data = mock.Mock()
        count_data = 0
        _min = mock.Mock()
        _max = mock.Mock()
        sum_of_sqd_deviations = mock.Mock()
        counts_per_bucket = [1, 1, 1]
        bounds = [1.0 / 2.0, 1]
        dist_agg_data = aggregation_data_module.DistributionAggregationData(
            mean_data=mean_data,
            count_data=count_data,
            min_=_min,
            max_=_max,
            sum_of_sqd_deviations=sum_of_sqd_deviations,
            counts_per_bucket=counts_per_bucket,
            bounds=bounds)
        self.assertEqual(0, dist_agg_data.variance)

        count_data = 2
        sum_of_sqd_deviations = 2
        dist_agg_data = aggregation_data_module.DistributionAggregationData(
            mean_data=mean_data,
            count_data=count_data,
            min_=_min,
            max_=_max,
            sum_of_sqd_deviations=sum_of_sqd_deviations,
            counts_per_bucket=counts_per_bucket,
            bounds=bounds)
        self.assertEqual(2.0, dist_agg_data.variance)
    def test_init_bad_bucket_counts(self):
        # Check that len(counts_per_bucket) == len(bounds) + 1
        with self.assertRaises(AssertionError):
            aggregation_data_module.DistributionAggregationData(
                mean_data=mock.Mock(),
                count_data=mock.Mock(),
                min_=mock.Mock(),
                max_=mock.Mock(),
                sum_of_sqd_deviations=mock.Mock(),
                counts_per_bucket=[0, 0, 0],
                bounds=[1, 2, 3])

        # Check that counts aren't negative
        with self.assertRaises(AssertionError):
            aggregation_data_module.DistributionAggregationData(
                mean_data=mock.Mock(),
                count_data=mock.Mock(),
                min_=mock.Mock(),
                max_=mock.Mock(),
                sum_of_sqd_deviations=mock.Mock(),
                counts_per_bucket=[0, 2, -2, 0],
                bounds=[1, 2, 3])

        # And check that we don't throw given the right args
        aggregation_data_module.DistributionAggregationData(
            mean_data=mock.Mock(),
            count_data=mock.Mock(),
            min_=mock.Mock(),
            max_=mock.Mock(),
            sum_of_sqd_deviations=mock.Mock(),
            counts_per_bucket=[0, 0, 0, 0],
            bounds=[1, 2, 3])
    def test_increment_bucket_count(self):
        mean_data = mock.Mock()
        count_data = mock.Mock()
        _min = 0
        _max = 1
        sum_of_sqd_deviations = mock.Mock()
        counts_per_bucket = [0]
        bounds = []

        value = 1

        dist_agg_data = aggregation_data_module.DistributionAggregationData(
            mean_data=mean_data,
            count_data=count_data,
            min_= _min,
            max_ = _max,
            sum_of_sqd_deviations=sum_of_sqd_deviations,
            counts_per_bucket=counts_per_bucket,
            bounds=bounds
        )

        dist_agg_data.increment_bucket_count(value=value)
        self.assertEqual([1], dist_agg_data.counts_per_bucket)

        counts_per_bucket = [1, 1]
        bounds = [1/4, 3/2]

        dist_agg_data = aggregation_data_module.DistributionAggregationData(
            mean_data=mean_data,
            count_data=count_data,
            min_= _min,
            max_ = _max,
            sum_of_sqd_deviations=sum_of_sqd_deviations,
            counts_per_bucket=counts_per_bucket,
            bounds=bounds
        )

        dist_agg_data.increment_bucket_count(value=value)
        self.assertEqual([1, 2], dist_agg_data.counts_per_bucket)

        bounds = [1/4, 1/2]

        dist_agg_data = aggregation_data_module.DistributionAggregationData(
            mean_data=mean_data,
            count_data=count_data,
            min_= _min,
            max_ = _max,
            sum_of_sqd_deviations=sum_of_sqd_deviations,
            counts_per_bucket=counts_per_bucket,
            bounds=bounds
        )

        dist_agg_data.increment_bucket_count(value=value)
        self.assertEqual([1, 3], dist_agg_data.counts_per_bucket)
    def test_add_sample_attachment(self):
        mean_data = 1.0
        count_data = 1
        _min = 0
        _max = 1
        sum_of_sqd_deviations = 2
        counts_per_bucket = [1, 1, 1, 1]
        bounds = [0.5, 1, 1.5]

        value = 3
        timestamp = time.time()
        attachments = {"One": "one", "Two": "two"}
        exemplar_1 = aggregation_data_module.Exemplar(4, timestamp,
                                                      attachments)

        dist_agg_data = aggregation_data_module.DistributionAggregationData(
            mean_data=mean_data,
            count_data=count_data,
            min_=_min,
            max_=_max,
            sum_of_sqd_deviations=sum_of_sqd_deviations,
            counts_per_bucket=counts_per_bucket,
            bounds=bounds,
            exemplars=[None, None, None, exemplar_1])

        self.assertEqual(dist_agg_data.exemplars[3], exemplar_1)

        dist_agg_data.add_sample(value, timestamp, attachments)
        self.assertEqual(0, dist_agg_data.min)
        self.assertEqual(3, dist_agg_data.max)
        self.assertEqual(2, dist_agg_data.count_data)
        self.assertEqual(2.0, dist_agg_data.mean_data)
        # Check that adding a sample overwrites the bucket's exemplar
        self.assertNotEqual(dist_agg_data.exemplars[3], exemplar_1)
        self.assertEqual(dist_agg_data.exemplars[3].value, 3)
        self.assertEqual(dist_agg_data.exemplars[3].attachments, attachments)

        count_data = 4
        dist_agg_data = aggregation_data_module.DistributionAggregationData(
            mean_data=mean_data,
            count_data=count_data,
            min_=_min,
            max_=_max,
            sum_of_sqd_deviations=sum_of_sqd_deviations,
            counts_per_bucket=[2, 1, 2, 1, 1, 1],
            bounds=[1, 2, 3, 4, 5])

        dist_agg_data.add_sample(value, timestamp, attachments)
        self.assertEqual(5, dist_agg_data.count_data)
        self.assertEqual(1.4, dist_agg_data.mean_data)
        self.assertEqual(5.2, dist_agg_data.sum_of_sqd_deviations)
        self.assertIsNot(0, dist_agg_data.count_data)
        self.assertEqual(3, dist_agg_data.exemplars[3].value)
    def test_constructor(self):
        mean_data = 1
        count_data = 0
        _min = 0
        _max = 1
        sum_of_sqd_deviations = mock.Mock()
        counts_per_bucket = [1, 1, 1]
        bounds = [1.0 / 2.0, 1]

        dist_agg_data = aggregation_data_module.DistributionAggregationData(
            mean_data=mean_data,
            count_data=count_data,
            min_=_min,
            max_=_max,
            sum_of_sqd_deviations=sum_of_sqd_deviations,
            counts_per_bucket=counts_per_bucket,
            bounds=bounds)

        self.assertEqual(1, dist_agg_data.mean_data)
        self.assertEqual(0, dist_agg_data.count_data)
        self.assertEqual(0, dist_agg_data.min)
        self.assertEqual(1, dist_agg_data.max)
        self.assertEqual(sum_of_sqd_deviations,
                         dist_agg_data.sum_of_sqd_deviations)
        self.assertEqual([1, 1, 1], dist_agg_data.counts_per_bucket)
        self.assertEqual([1.0 / 2.0, 1], dist_agg_data.bounds)

        self.assertIsNotNone(dist_agg_data.sum)
        self.assertEqual(0, dist_agg_data.variance)
 def test_to_point(self):
     timestamp = datetime(1970, 1, 1)
     ex_9 = aggregation_data_module.Exemplar(
         9, timestamp, {'trace_id': 'dead', 'span_id': 'beef'}
     )
     ex_99 = aggregation_data_module.Exemplar(
         99, timestamp, {'trace_id': 'dead', 'span_id': 'bef0'}
     )
     dist_agg_data = aggregation_data_module.DistributionAggregationData(
         mean_data=50,
         count_data=99,
         sum_of_sqd_deviations=80850.0,
         counts_per_bucket=[0, 9, 90, 0],
         bounds=[1, 10, 100],
         exemplars=[None, ex_9, ex_99, None],
     )
     converted_point = dist_agg_data.to_point(timestamp)
     self.assertTrue(isinstance(converted_point.value,
                                value.ValueDistribution))
     self.assertEqual(converted_point.value.count, 99)
     self.assertEqual(converted_point.value.sum, 4950)
     self.assertEqual(converted_point.value.sum_of_squared_deviation,
                      80850.0)
     self.assertEqual([bb.count for bb in converted_point.value.buckets],
                      [0, 9, 90, 0])
     self.assertEqual(converted_point.value.bucket_options.type_.bounds,
                      [1, 10, 100])
     self.assertTrue(
         exemplars_equal(
             ex_9,
             converted_point.value.buckets[1].exemplar))
     self.assertTrue(
         exemplars_equal(
             ex_99,
             converted_point.value.buckets[2].exemplar))
    def test_constructor_with_exemplar(self):
        timestamp = time.time()
        attachments = {"One": "one", "Two": "two"}
        exemplars = [
            aggregation_data_module.Exemplar(.07, timestamp, attachments),
            aggregation_data_module.Exemplar(.7, timestamp, attachments),
            aggregation_data_module.Exemplar(7, timestamp, attachments)
        ]
        mean_data = 2.59
        count_data = 3
        sum_of_sqd_deviations = mock.Mock()
        counts_per_bucket = [1, 1, 1]
        bounds = [1.0 / 2.0, 1]

        dist_agg_data = aggregation_data_module.DistributionAggregationData(
            mean_data=mean_data,
            count_data=count_data,
            sum_of_sqd_deviations=sum_of_sqd_deviations,
            exemplars=exemplars,
            counts_per_bucket=counts_per_bucket,
            bounds=bounds)

        self.assertEqual(dist_agg_data.mean_data, mean_data)
        self.assertEqual(dist_agg_data.count_data, count_data)
        self.assertEqual(dist_agg_data.sum_of_sqd_deviations,
                         sum_of_sqd_deviations)
        self.assertEqual(dist_agg_data.counts_per_bucket, counts_per_bucket)
        self.assertEqual(dist_agg_data.bounds, bounds)
        self.assertEqual(dist_agg_data.sum, mean_data * count_data)
        for ii, ex in enumerate(exemplars):
            self.assertEqual(dist_agg_data.exemplars[ii], ex)
Exemplo n.º 10
0
    def __init__(self,
                 boundaries=None,
                 distribution=None,
                 aggregation_type=Type.DISTRIBUTION):
        if boundaries:
            if not all(boundaries[ii] < boundaries[ii + 1]
                       for ii in range(len(boundaries) - 1)):
                raise ValueError("bounds must be sorted in increasing order")
            for ii, bb in enumerate(boundaries):
                if bb > 0:
                    break
            else:
                ii += 1
            if ii:
                logger.warning("Dropping %s non-positive bucket boundaries",
                               ii)
            boundaries = boundaries[ii:]

        super(DistributionAggregation,
              self).__init__(buckets=boundaries,
                             aggregation_type=aggregation_type)
        self._boundaries = bucket_boundaries.BucketBoundaries(boundaries)
        self._distribution = distribution or {}
        self.aggregation_data = aggregation_data.DistributionAggregationData(
            0, 0, 0, None, boundaries)
Exemplo n.º 11
0
    def test_add_sample(self):
        mean_data = 1.0
        count_data = 0
        _min = 0
        _max = 1
        sum_of_sqd_deviations = 2
        counts_per_bucket = [1, 1, 1, 1]
        bounds = [0, 0.5, 1, 1.5]

        value = 3

        dist_agg_data = aggregation_data_module.DistributionAggregationData(
            mean_data=mean_data,
            count_data=count_data,
            min_=_min,
            max_=_max,
            sum_of_sqd_deviations=sum_of_sqd_deviations,
            counts_per_bucket=counts_per_bucket,
            bounds=bounds)

        dist_agg_data.add_sample(value=value)
        self.assertEqual(0, dist_agg_data.min)
        self.assertEqual(3, dist_agg_data.max)
        self.assertEqual(1, dist_agg_data.count_data)
        self.assertEqual(value, dist_agg_data.mean_data)

        count_data = 1
        dist_agg_data = aggregation_data_module.DistributionAggregationData(
            mean_data=mean_data,
            count_data=count_data,
            min_=_min,
            max_=_max,
            sum_of_sqd_deviations=sum_of_sqd_deviations,
            counts_per_bucket=counts_per_bucket,
            bounds=bounds)

        dist_agg_data.add_sample(value=value)
        self.assertEqual(2, dist_agg_data.count_data)
        self.assertEqual(2.0, dist_agg_data.mean_data)
        self.assertEqual(4.0, dist_agg_data.sum_of_sqd_deviations)
        self.assertIsNot(0, dist_agg_data.count_data)

        value_2 = -1
        dist_agg_data.add_sample(value=value_2)
        self.assertEqual(value_2, dist_agg_data.min)
    def test_increment_bucket_count(self):
        mean_data = mock.Mock()
        count_data = mock.Mock()
        sum_of_sqd_deviations = mock.Mock()
        counts_per_bucket = [0]
        bounds = []

        value = 1

        dist_agg_data = aggregation_data_module.DistributionAggregationData(
            mean_data=mean_data,
            count_data=count_data,
            sum_of_sqd_deviations=sum_of_sqd_deviations,
            counts_per_bucket=counts_per_bucket,
            bounds=bounds)

        dist_agg_data.increment_bucket_count(value=value)
        self.assertEqual([1], dist_agg_data.counts_per_bucket)

        counts_per_bucket = [1, 1, 1]
        bounds = [1.0 / 4.0, 3.0 / 2.0]

        dist_agg_data = aggregation_data_module.DistributionAggregationData(
            mean_data=mean_data,
            count_data=count_data,
            sum_of_sqd_deviations=sum_of_sqd_deviations,
            counts_per_bucket=counts_per_bucket,
            bounds=bounds)

        dist_agg_data.increment_bucket_count(value=value)
        self.assertEqual([1, 2, 1], dist_agg_data.counts_per_bucket)

        bounds = [1.0 / 4.0, 1.0 / 2.0]

        dist_agg_data = aggregation_data_module.DistributionAggregationData(
            mean_data=mean_data,
            count_data=count_data,
            sum_of_sqd_deviations=sum_of_sqd_deviations,
            counts_per_bucket=counts_per_bucket,
            bounds=bounds)

        dist_agg_data.increment_bucket_count(value=value)
        self.assertEqual([1, 2, 2], dist_agg_data.counts_per_bucket)
Exemplo n.º 13
0
 def __init__(self,
              boundaries=None,
              distribution=None,
              aggregation_type=Type.DISTRIBUTION):
     super(DistributionAggregation,
           self).__init__(buckets=boundaries,
                          aggregation_type=aggregation_type)
     self._boundaries = bucket_boundaries.BucketBoundaries(boundaries)
     self._distribution = distribution or {}
     self.aggregation_data = aggregation_data.DistributionAggregationData(
         0, 0, float('inf'), float('-inf'), 0, None, boundaries)
    def test_init_bad_exemplars(self):
        # Check that we don't allow exemplars without bounds
        with self.assertRaises(ValueError):
            aggregation_data_module.DistributionAggregationData(
                mean_data=mock.Mock(),
                count_data=mock.Mock(),
                sum_of_sqd_deviations=mock.Mock(),
                counts_per_bucket=mock.Mock(),
                bounds=None,
                exemplars=[mock.Mock()])

        # Check that the exemplar count matches the bucket count
        with self.assertRaises(ValueError):
            aggregation_data_module.DistributionAggregationData(
                mean_data=mock.Mock(),
                count_data=mock.Mock(),
                sum_of_sqd_deviations=mock.Mock(),
                counts_per_bucket=mock.Mock(),
                bounds=[0, 1],
                exemplars=[mock.Mock(), mock.Mock()])
 def test_to_point_no_histogram(self):
     timestamp = datetime(1970, 1, 1)
     dist_agg_data = aggregation_data_module.DistributionAggregationData(
         mean_data=50,
         count_data=99,
         sum_of_sqd_deviations=80850.0,
     )
     converted_point = dist_agg_data.to_point(timestamp)
     self.assertTrue(isinstance(converted_point.value,
                                value.ValueDistribution))
     self.assertEqual(converted_point.value.count, 99)
     self.assertEqual(converted_point.value.sum, 4950)
     self.assertEqual(converted_point.value.sum_of_squared_deviation,
                      80850.0)
     self.assertIsNone(converted_point.value.buckets)
     self.assertIsNone(converted_point.value.bucket_options._type)
    def test_create_timeseries_from_distribution(self):
        """Check for explicit 0-bound bucket for SD export."""
        agg = aggregation_module.DistributionAggregation(
            aggregation_type=aggregation_module.Type.DISTRIBUTION)

        view = view_module.View(
            name="example.org/test_view",
            description="example.org/test_view",
            columns=['tag_key'],
            measure=mock.Mock(),
            aggregation=agg,
        )

        v_data = view_data_module.ViewData(
            view=view,
            start_time=TEST_TIME_STR,
            end_time=TEST_TIME_STR,
        )

        # Aggregation over (10 * range(10)) for buckets [2, 4, 6, 8]
        dad = aggregation_data_module.DistributionAggregationData(
            mean_data=4.5,
            count_data=100,
            sum_of_sqd_deviations=825,
            counts_per_bucket=[20, 20, 20, 20, 20],
            bounds=[2, 4, 6, 8],
            exemplars={mock.Mock()
                       for ii in range(5)})
        v_data._tag_value_aggregation_data_map = {('tag_value', ): dad}

        v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME)

        exporter = stackdriver.StackdriverStatsExporter()
        time_series_list = exporter.create_time_series_list(v_data)
        self.assertEqual(len(time_series_list), 1)
        [time_series] = time_series_list

        self.check_labels(time_series.metric.labels, {'tag_key': 'tag_value'},
                          include_opencensus=True)
        self.assertEqual(len(time_series.points), 1)
        [point] = time_series.points
        dv = point.value.distribution_value
        self.assertEqual(100, dv.count)
        self.assertEqual(825.0, dv.sum_of_squared_deviation)
        self.assertEqual([0, 20, 20, 20, 20, 20], dv.bucket_counts)
        self.assertEqual([0, 2, 4, 6, 8],
                         dv.bucket_options.explicit_buckets.bounds)
    def test_create_timeseries_from_distribution(self):
        """Check for explicit 0-bound bucket for SD export."""

        v_data = mock.Mock(spec=view_data_module.ViewData)
        v_data.view.name = "example.org/test_view"
        v_data.view.columns = ['tag_key']
        v_data.view.aggregation.aggregation_type = \
            aggregation_module.Type.DISTRIBUTION
        v_data.start_time = TEST_TIME
        v_data.end_time = TEST_TIME

        # Aggregation over (10 * range(10)) for buckets [2, 4, 6, 8]
        dad = aggregation_data_module.DistributionAggregationData(
            mean_data=4.5,
            count_data=100,
            min_=0,
            max_=9,
            sum_of_sqd_deviations=825,
            counts_per_bucket=[20, 20, 20, 20, 20],
            bounds=[2, 4, 6, 8],
            exemplars={mock.Mock()
                       for ii in range(5)})
        v_data.tag_value_aggregation_data_map = {('tag_value', ): dad}

        exporter = stackdriver.StackdriverStatsExporter(
            options=mock.Mock(),
            client=mock.Mock(),
        )
        time_series_list = exporter.create_time_series_list(v_data, "", "")
        self.assertEqual(len(time_series_list), 1)
        [time_series] = time_series_list

        self.assertCorrectLabels(time_series.metric.labels,
                                 {'tag_key': 'tag_value'},
                                 include_opencensus=True)
        self.assertEqual(len(time_series.points), 1)
        [point] = time_series.points
        dv = point.value.distribution_value
        self.assertEqual(100, dv.count)
        self.assertEqual(4.5, dv.mean)
        self.assertEqual(825.0, dv.sum_of_squared_deviation)
        self.assertEqual([0, 20, 20, 20, 20, 20], dv.bucket_counts)
        self.assertEqual([0, 2, 4, 6, 8],
                         dv.bucket_options.explicit_buckets.bounds)
Exemplo n.º 18
0
    def test_constructor_with_exemplar(self):
        timestamp = time.time()
        attachments = {"One": "one", "Two": "two"}
        exemplar_1 = aggregation_data_module.Exemplar(4, timestamp,
                                                      attachments)
        exemplar_2 = aggregation_data_module.Exemplar(5, timestamp,
                                                      attachments)
        mean_data = 1
        count_data = 0
        _min = 0
        _max = 1
        sum_of_sqd_deviations = mock.Mock()
        counts_per_bucket = [1, 1, 1, 1]
        bounds = [0, 1.0 / 2.0, 1]
        exemplars = [exemplar_1, exemplar_2]

        dist_agg_data = aggregation_data_module.DistributionAggregationData(
            mean_data=mean_data,
            count_data=count_data,
            min_=_min,
            max_=_max,
            sum_of_sqd_deviations=sum_of_sqd_deviations,
            exemplars=exemplars,
            counts_per_bucket=counts_per_bucket,
            bounds=bounds)

        self.assertEqual(1, dist_agg_data.mean_data)
        self.assertEqual(0, dist_agg_data.count_data)
        self.assertEqual(0, dist_agg_data.min)
        self.assertEqual(1, dist_agg_data.max)
        self.assertEqual(sum_of_sqd_deviations,
                         dist_agg_data.sum_of_sqd_deviations)
        self.assertEqual([1, 1, 1, 1], dist_agg_data.counts_per_bucket)
        self.assertEqual([exemplar_1, exemplar_2], dist_agg_data.exemplars[3])
        self.assertEqual([0, 1.0 / 2.0, 1], dist_agg_data.bounds)

        self.assertIsNotNone(dist_agg_data.sum)
        self.assertEqual(0, dist_agg_data.variance)
    def test_create_timeseries_from_distribution(self):
        """Check for explicit 0-bound bucket for SD export."""

        v_data = mock.Mock(spec=view_data_module.ViewData)
        v_data.view.name = "example.org/test_view"
        v_data.start_time = '2018-11-21T00:12:34.56Z'
        v_data.end_time = '2018-11-21T00:23:45.67Z'

        # Aggregation over (10 * range(10)) for buckets [2, 4, 6, 8]
        dad = aggregation_data_module.DistributionAggregationData(
            mean_data=4.5,
            count_data=100,
            min_=0,
            max_=9,
            sum_of_sqd_deviations=825,
            counts_per_bucket=[20, 20, 20, 20, 20],
            bounds=[2, 4, 6, 8],
            exemplars={mock.Mock()
                       for ii in range(5)})
        v_data.tag_value_aggregation_data_map = ({'tag_key': dad})

        exporter = stackdriver.StackdriverStatsExporter(
            options=mock.Mock(),
            client=mock.Mock(),
        )
        time_series = exporter.create_time_series_list(v_data, "", "")

        self.assertEqual(len(time_series.points), 1)
        [point] = time_series.points
        dv = point.value.distribution_value
        self.assertEqual(100, dv.count)
        self.assertEqual(4.5, dv.mean)
        self.assertEqual(825.0, dv.sum_of_squared_deviation)
        self.assertEqual([0, 20, 20, 20, 20, 20], dv.bucket_counts)
        self.assertEqual([0, 2, 4, 6, 8],
                         dv.bucket_options.explicit_buckets.bounds)
Exemplo n.º 20
0
 def new_aggregation_data(self, measure=None):
     """Get a new AggregationData for this aggregation."""
     return aggregation_data.DistributionAggregationData(
         0, 0, 0, None, self._boundaries)