def test_build_filtered_aggregator(self):
        filter_ = filters.Filter(dimension="dim", value="val")
        agg_input = {
            "agg1": aggregators.filtered(filter_, aggregators.count("metric1")),
            "agg2": aggregators.filtered(filter_, aggregators.longsum("metric2")),
            "agg3": aggregators.filtered(filter_, aggregators.doublesum("metric3")),
            "agg4": aggregators.filtered(filter_, aggregators.min("metric4")),
            "agg5": aggregators.filtered(filter_, aggregators.max("metric5")),
            "agg6": aggregators.filtered(filter_, aggregators.hyperunique("metric6")),
            "agg7": aggregators.filtered(filter_, aggregators.cardinality("dim1")),
            "agg8": aggregators.filtered(filter_, aggregators.cardinality(["dim1", "dim2"], by_row=True)),
        }
        base = {"type": "filtered", "filter": {"type": "selector", "dimension": "dim", "value": "val"}}

        aggs = [
            {"name": "agg1", "type": "count", "fieldName": "metric1"},
            {"name": "agg2", "type": "longSum", "fieldName": "metric2"},
            {"name": "agg3", "type": "doubleSum", "fieldName": "metric3"},
            {"name": "agg4", "type": "min", "fieldName": "metric4"},
            {"name": "agg5", "type": "max", "fieldName": "metric5"},
            {"name": "agg6", "type": "hyperUnique", "fieldName": "metric6"},
            {"name": "agg7", "type": "cardinality", "fieldNames": ["dim1"], "byRow": False},
            {"name": "agg8", "type": "cardinality", "fieldNames": ["dim1", "dim2"], "byRow": True},
        ]
        expected = []
        for agg in aggs:
            exp = deepcopy(base)
            exp.update({"aggregator": agg})
            expected.append(exp)

        built_agg = aggregators.build_aggregators(agg_input)
        expected = sorted(built_agg, key=lambda k: itemgetter("name")(itemgetter("aggregator")(k)))
        actual = sorted(expected, key=lambda k: itemgetter("name")(itemgetter("aggregator")(k)))
        assert expected == actual
 def test_filtered_aggregator(self):
     filter_ = filters.Filter(dimension='dim', value='val')
     aggs = [aggregators.count('metric1'),
             aggregators.longsum('metric2'),
             aggregators.doublesum('metric3'),
             aggregators.doublemin('metric4'),
             aggregators.doublemax('metric5'),
             aggregators.hyperunique('metric6'),
             aggregators.cardinality('dim1'),
             aggregators.cardinality(['dim1', 'dim2'], by_row=True),
             aggregators.thetasketch('dim1'),
             aggregators.thetasketch('metric7'),
             aggregators.thetasketch('metric8', isinputthetasketch=True, size=8192)
            ]
     for agg in aggs:
         expected = {
             'type': 'filtered',
             'filter': {
                 'type': 'selector',
                 'dimension': 'dim',
                 'value': 'val'
             },
             'aggregator': agg
         }
         actual = aggregators.filtered(filter_, agg)
         assert actual == expected
Beispiel #3
0
    def __init__(self, name, count_filter=None):
        super(_HelperCalculation, self).__init__()
        self.outer_aggregations = {}
        # If the unique aggregation should count *all* of the unique values,
        # we can just use a simple "count" on the outer groupby
        if not count_filter or isinstance(count_filter, EmptyFilter):
            self.outer_aggregations[name] = count('count')
        else:
            # If the unique aggregation should only count unique values when
            # they meet a specific criteria, then we need to do more work.
            # Conceptually, to include a row if it meets a specific filter, we
            # would store a 1 for that row and sum the new column in the outer
            # groupby. Unfortunately, druid does not provide an aggregator that
            # returns a constant, so we must use a post aggregator on the inner
            # groupby to convert the value into a constant 1.

            # Choose an aggregation that is guaranteed to not be 0
            inner_agg = filtered_aggregator(filter=count_filter,
                                            agg=count('count'))
            inner_agg_key = '%s%s_agg' % (name, self.SUFFIX)
            self.add_aggregation(inner_agg_key, inner_agg)

            # Divide the value by itself during post aggregation so that the
            # inner groupby returns a 1 or 0 for this row
            const_formula = '%s / %s' % (inner_agg_key, inner_agg_key)
            post_agg_key = '%s%s_post_agg' % (name, self.SUFFIX)
            self.add_post_aggregation_from_formula(post_agg_key, const_formula)

            # Sum the constant column in the outer groupby to get the exact
            # unique count for a filtered set
            self.outer_aggregations[name] = longsum(post_agg_key)
Beispiel #4
0
    def _parse_metric(self):
        if self._metric == 'uv':
            return {
                "aggregations": {
                    "result": cardinality(self._field)
                },
                "metric": "result"
            }

        elif self._metric == 'exact_uv':
            return {
                "aggregations": {
                    "result": thetasketch(self._field)
                },
                "metric": "result"
            }

        elif self._metric == 'pv':
            return {
                "aggregations": {
                    "result": count(self._field)
                },
                "metric": "result"
            }

        elif self._metric == 'longsum':
            return {
                "aggregations": {
                    "result": longsum(self._field)
                },
                "metric": "result"
            }

        else:
            raise ParseArgException("Parse metric failed")
 def test_filtered_aggregator(self):
     filter_ = filters.Filter(dimension="dim", value="val")
     aggs = [
         aggregators.count("metric1"),
         aggregators.longsum("metric2"),
         aggregators.doublesum("metric3"),
         aggregators.doublemin("metric4"),
         aggregators.doublemax("metric5"),
         aggregators.hyperunique("metric6"),
         aggregators.cardinality("dim1"),
         aggregators.cardinality(["dim1", "dim2"], by_row=True),
         aggregators.thetasketch("dim1"),
         aggregators.thetasketch("metric7"),
         aggregators.thetasketch("metric8",
                                 isinputthetasketch=True,
                                 size=8192),
     ]
     for agg in aggs:
         expected = {
             "type": "filtered",
             "filter": {
                 "type": "selector",
                 "dimension": "dim",
                 "value": "val"
             },
             "aggregator": agg,
         }
         actual = aggregators.filtered(filter_, agg)
         assert actual == expected
    def test_build_aggregators(self):
        agg_input = {
            'agg1': aggregators.count('metric1'),
            'agg2': aggregators.longsum('metric2'),
            'agg3': aggregators.doublesum('metric3'),
            'agg4': aggregators.doublemin('metric4'),
            'agg5': aggregators.doublemax('metric5'),
            'agg6': aggregators.hyperunique('metric6'),
            'agg7': aggregators.cardinality('dim1'),
            'agg8': aggregators.cardinality(['dim1', 'dim2'], by_row=True),
            'agg9': aggregators.thetasketch('dim1'),
            'agg10': aggregators.thetasketch('metric7'),
            'agg11': aggregators.thetasketch('metric8', isinputthetasketch = True, size=8192)
        }
        built_agg = aggregators.build_aggregators(agg_input)
        expected = [
            {'name': 'agg1', 'type': 'count', 'fieldName': 'metric1'},
            {'name': 'agg2', 'type': 'longSum', 'fieldName': 'metric2'},
            {'name': 'agg3', 'type': 'doubleSum', 'fieldName': 'metric3'},
            {'name': 'agg4', 'type': 'doubleMin', 'fieldName': 'metric4'},
            {'name': 'agg5', 'type': 'doubleMax', 'fieldName': 'metric5'},
            {'name': 'agg6', 'type': 'hyperUnique', 'fieldName': 'metric6'},
            {'name': 'agg7', 'type': 'cardinality', 'fieldNames': ['dim1'], 'byRow': False},
            {'name': 'agg8', 'type': 'cardinality', 'fieldNames': ['dim1', 'dim2'], 'byRow': True},
            {'name': 'agg9', 'type': 'thetaSketch', 'fieldName': 'dim1', 'isInputThetaSketch': False, 'size': 16384},
            {'name': 'agg10', 'type': 'thetaSketch', 'fieldName': 'metric7', 'isInputThetaSketch': False, 'size': 16384},
            {'name': 'agg11', 'type': 'thetaSketch', 'fieldName': 'metric8', 'isInputThetaSketch': True, 'size': 8192}

        ]
        assert (sorted(built_agg, key=itemgetter('name')) ==
                sorted(expected, key=itemgetter('name')))
Beispiel #7
0
 def druid_timeseries_query_args( self ):
     return {
         'datasource': 'banner_activity_minutely',
         'granularity': self._granularity,
         'intervals': self._interval,
         'aggregations':  { 'impressions': longsum( 'normalized_request_count' ) },
         'filter': self.druid_filter()
     }
Beispiel #8
0
 def test_build_aggregators(self):
     agg_input = {
         'agg1': aggregators.count('metric1'),
         'agg2': aggregators.longsum('metric2'),
         'agg3': aggregators.doublesum('metric3'),
         'agg4': aggregators.min('metric4'),
         'agg5': aggregators.max('metric5'),
         'agg6': aggregators.hyperunique('metric6'),
         'agg7': aggregators.cardinality('dim1'),
         'agg8': aggregators.cardinality(['dim1', 'dim2'], by_row=True)
     }
     built_agg = aggregators.build_aggregators(agg_input)
     expected = [
         {
             'name': 'agg1',
             'type': 'count',
             'fieldName': 'metric1'
         },
         {
             'name': 'agg2',
             'type': 'longSum',
             'fieldName': 'metric2'
         },
         {
             'name': 'agg3',
             'type': 'doubleSum',
             'fieldName': 'metric3'
         },
         {
             'name': 'agg4',
             'type': 'min',
             'fieldName': 'metric4'
         },
         {
             'name': 'agg5',
             'type': 'max',
             'fieldName': 'metric5'
         },
         {
             'name': 'agg6',
             'type': 'hyperUnique',
             'fieldName': 'metric6'
         },
         {
             'name': 'agg7',
             'type': 'cardinality',
             'fieldNames': ['dim1'],
             'byRow': False
         },
         {
             'name': 'agg8',
             'type': 'cardinality',
             'fieldNames': ['dim1', 'dim2'],
             'byRow': True
         },
     ]
     assert (sorted(built_agg, key=itemgetter('name')) == sorted(
         expected, key=itemgetter('name')))
Beispiel #9
0
    def __init__(self, dimension, field):
        super(AverageCalculation, self).__init__(dimension, field, self.SUFFIX)
        # Calculate the count for this field.
        count_key = '%s_event_count%s' % (field, self.SUFFIX)
        count_agg = filtered_aggregator(filter=self.dimension_filter,
                                        agg=longsum('count'))
        self.add_aggregation(count_key, count_agg)

        avg_formula = '%s / %s' % (self.sum_key, count_key)
        self.add_post_aggregation_from_formula(field, avg_formula)
Beispiel #10
0
    def execute(self, context):
        client = DruidBrokerHook(
            druid_broker_conn_id=self.conn_id).get_client()

        self.log.info("Getting raw data from Druid")

        stats = client.groupby(
            datasource='Bids',
            dimensions=['BackendName'],
            granularity='hour',
            intervals=self.intervals,
            aggregations={
                'bids': longsum('Bids'),
                'wins': longsum('Wins'),
            }).export_pandas().rename(columns={
                'timestamp': 'hour',
                'BackendName': 'backend_name',
            })

        stats['hour'] = stats.hour.apply(lambda x: x[11:13])
        self.log.info("Storing raw data from Druid")
        stats.to_csv(self.stats_file, index=False)
Beispiel #11
0
    def test_build_filtered_aggregator(self):
        filter_ = filters.Filter(dimension='dim', value='val')
        agg_input = {
            'agg1': aggregators.filtered(filter_,
                                         aggregators.count('metric1')),
            'agg2': aggregators.filtered(filter_,
                                         aggregators.longsum('metric2')),
            'agg3': aggregators.filtered(filter_,
                                         aggregators.doublesum('metric3')),
            'agg4': aggregators.filtered(filter_,
                                         aggregators.min('metric4')),
            'agg5': aggregators.filtered(filter_,
                                         aggregators.max('metric5')),
            'agg6': aggregators.filtered(filter_,
                                         aggregators.hyperunique('metric6')),
            'agg7': aggregators.filtered(filter_,
                                         aggregators.cardinality('dim1')),
            'agg8': aggregators.filtered(filter_,
                                         aggregators.cardinality(['dim1', 'dim2'], by_row=True)),
        }
        base = {
            'type': 'filtered',
            'filter': {
                'type': 'selector',
                'dimension': 'dim',
                'value': 'val'
            }
        }

        aggs = [
            {'name': 'agg1', 'type': 'count', 'fieldName': 'metric1'},
            {'name': 'agg2', 'type': 'longSum', 'fieldName': 'metric2'},
            {'name': 'agg3', 'type': 'doubleSum', 'fieldName': 'metric3'},
            {'name': 'agg4', 'type': 'min', 'fieldName': 'metric4'},
            {'name': 'agg5', 'type': 'max', 'fieldName': 'metric5'},
            {'name': 'agg6', 'type': 'hyperUnique', 'fieldName': 'metric6'},
            {'name': 'agg7', 'type': 'cardinality', 'fieldNames': ['dim1'], 'byRow': False},
            {'name': 'agg8', 'type': 'cardinality', 'fieldNames': ['dim1', 'dim2'], 'byRow': True},
        ]
        expected = []
        for agg in aggs:
            exp = deepcopy(base)
            exp.update({'aggregator': agg})
            expected.append(exp)

        built_agg = aggregators.build_aggregators(agg_input)
        expected = sorted(built_agg, key=lambda k: itemgetter('name')(
            itemgetter('aggregator')(k)))
        actual = sorted(expected, key=lambda k: itemgetter('name')(
            itemgetter('aggregator')(k)))
        assert expected == actual
Beispiel #12
0
    def _parse_metric(self):
        if self._metric == 'uv':
            return {"aggregations": {"result": cardinality(self._field)}}

        elif self._metric == 'pv':
            return {"aggregations": {"result": count(self._field)}}

        elif self._metric == 'longsum':
            return {"aggregations": {"result": longsum(self._field)}}

        elif self._metric == 'doublesum':
            return {"aggregations": {"result": doublesum(self._field)}}

        else:
            raise ParseArgException("Parse metric failed")
Beispiel #13
0
 def test_build_aggregators(self):
     agg_input = {
         'agg1': aggregators.count('metric1'),
         'agg2': aggregators.longsum('metric2'),
         'agg3': aggregators.doublesum('metric3'),
         'agg4': aggregators.min('metric4'),
         'agg5': aggregators.max('metric5'),
         'agg6': aggregators.hyperunique('metric6')
     }
     built_agg = aggregators.build_aggregators(agg_input)
     expected = [
         {'name': 'agg1', 'type': 'count', 'fieldName': 'metric1'},
         {'name': 'agg2', 'type': 'longSum', 'fieldName': 'metric2'},
         {'name': 'agg3', 'type': 'doubleSum', 'fieldName': 'metric3'},
         {'name': 'agg4', 'type': 'min', 'fieldName': 'metric4'},
         {'name': 'agg5', 'type': 'max', 'fieldName': 'metric5'},
         {'name': 'agg6', 'type': 'hyperUnique', 'fieldName': 'metric6'},
     ]
     assert (sorted(built_agg, key=itemgetter('name')) ==
             sorted(expected, key=itemgetter('name')))
Beispiel #14
0
 def test_filtered_aggregator(self):
     filter_ = filters.Filter(dimension='dim', value='val')
     aggs = [aggregators.count('metric1'),
             aggregators.longsum('metric2'),
             aggregators.doublesum('metric3'),
             aggregators.min('metric4'),
             aggregators.max('metric5'),
             aggregators.hyperunique('metric6')]
     for agg in aggs:
         expected = {
             'type': 'filtered',
             'filter': {
                 'type': 'selector',
                 'dimension': 'dim',
                 'value': 'val'
             },
             'aggregator': agg
         }
         actual = aggregators.filtered(filter_, agg)
         assert actual == expected
 def test_filtered_aggregator(self):
     filter_ = filters.Filter(dimension="dim", value="val")
     aggs = [
         aggregators.count("metric1"),
         aggregators.longsum("metric2"),
         aggregators.doublesum("metric3"),
         aggregators.min("metric4"),
         aggregators.max("metric5"),
         aggregators.hyperunique("metric6"),
         aggregators.cardinality("dim1"),
         aggregators.cardinality(["dim1", "dim2"], by_row=True),
     ]
     for agg in aggs:
         expected = {
             "type": "filtered",
             "filter": {"type": "selector", "dimension": "dim", "value": "val"},
             "aggregator": agg,
         }
         actual = aggregators.filtered(filter_, agg)
         assert actual == expected
 def test_build_aggregators(self):
     agg_input = {
         "agg1": aggregators.count("metric1"),
         "agg2": aggregators.longsum("metric2"),
         "agg3": aggregators.doublesum("metric3"),
         "agg4": aggregators.min("metric4"),
         "agg5": aggregators.max("metric5"),
         "agg6": aggregators.hyperunique("metric6"),
         "agg7": aggregators.cardinality("dim1"),
         "agg8": aggregators.cardinality(["dim1", "dim2"], by_row=True),
     }
     built_agg = aggregators.build_aggregators(agg_input)
     expected = [
         {"name": "agg1", "type": "count", "fieldName": "metric1"},
         {"name": "agg2", "type": "longSum", "fieldName": "metric2"},
         {"name": "agg3", "type": "doubleSum", "fieldName": "metric3"},
         {"name": "agg4", "type": "min", "fieldName": "metric4"},
         {"name": "agg5", "type": "max", "fieldName": "metric5"},
         {"name": "agg6", "type": "hyperUnique", "fieldName": "metric6"},
         {"name": "agg7", "type": "cardinality", "fieldNames": ["dim1"], "byRow": False},
         {"name": "agg8", "type": "cardinality", "fieldNames": ["dim1", "dim2"], "byRow": True},
     ]
     assert sorted(built_agg, key=itemgetter("name")) == sorted(expected, key=itemgetter("name"))
Beispiel #17
0
    def add_count_for_field(self, field):
        assert field in self.aggregations or field in self.post_aggregations, (
            'Cannot add count for field that does not exist: %s' % field)

        agg_filter = None
        if field in self.aggregations:
            agg_filter = build_filter_from_aggregation(
                self.aggregations[field])
        else:
            # Collect the aggregations that produce the post-aggregations value.
            aggregations = extract_aggregations_for_post_aggregation(
                field, self.aggregations, self.post_aggregations)
            agg_filter = build_query_filter_from_aggregations(aggregations)

        # Count the number of rows that stream through the aggregations computed
        # for this field.
        count_agg = longsum('count')

        # If an aggregation filter exists, use it to limit the count.
        if agg_filter is not None:
            count_agg = filtered_aggregator(filter=agg_filter, agg=count_agg)

        key = self.count_field_name(field)
        self.add_aggregation(key, count_agg)
Beispiel #18
0
 def test_build_aggregators(self):
     agg_input = {
         'agg1': aggregators.count('metric1'),
         'agg2': aggregators.longsum('metric2'),
         'agg3': aggregators.doublesum('metric3'),
         'agg4': aggregators.min('metric4'),
         'agg5': aggregators.max('metric5'),
         'agg6': aggregators.hyperunique('metric6'),
         'agg7': aggregators.cardinality('dim1'),
         'agg8': aggregators.cardinality(['dim1', 'dim2'], by_row=True)
     }
     built_agg = aggregators.build_aggregators(agg_input)
     expected = [
         {'name': 'agg1', 'type': 'count', 'fieldName': 'metric1'},
         {'name': 'agg2', 'type': 'longSum', 'fieldName': 'metric2'},
         {'name': 'agg3', 'type': 'doubleSum', 'fieldName': 'metric3'},
         {'name': 'agg4', 'type': 'min', 'fieldName': 'metric4'},
         {'name': 'agg5', 'type': 'max', 'fieldName': 'metric5'},
         {'name': 'agg6', 'type': 'hyperUnique', 'fieldName': 'metric6'},
         {'name': 'agg7', 'type': 'cardinality', 'fieldNames': ['dim1'], 'byRow': False},
         {'name': 'agg8', 'type': 'cardinality', 'fieldNames': ['dim1', 'dim2'], 'byRow': True},
     ]
     assert (sorted(built_agg, key=itemgetter('name')) ==
             sorted(expected, key=itemgetter('name')))
Beispiel #19
0
def get_druid_data(dimensions=None, filter_list=[], filter_type="and",
                   order_by=["target_area_name"],
                   datasource=settings.DRUID_SPRAYDAY_DATASOURCE):
    """
    Runs a query against Druid, returns data with metrics
    Inputs:
        dimensions => list of dimensions to group by
        filter_list => list of list of things to filter with e.g.
                        filter_list=[['target_area_id', operator.ne, 1],
                                     ['sprayable', operator.eq, "true"],
                                     ['dimension', operator, "value"]])
        filter_type => type of Druid filter to perform,
        order_by => field(s) to order the data by
    """
    query = PyDruid(get_druid_broker_url(), 'druid/v2')
    params = dict(
        datasource=datasource,
        granularity='all',
        intervals=settings.DRUID_INTERVAL,
        aggregations={
            'num_not_sprayable': aggregators.filtered(
                filters.Filter(
                    type='and',
                    fields=[filters.Dimension('sprayable') == 'false']
                ),
                aggregators.longsum('count')
            ),
            'num_not_sprayed': aggregators.filtered(
                filters.Filter(
                    type='and',
                    fields=[filters.Dimension('sprayable') == 'true',
                            filters.Dimension('sprayed') ==
                            settings.MSPRAY_WAS_NOT_SPRAYED_VALUE]
                ),
                aggregators.longsum('count')
            ),
            'num_sprayed': aggregators.filtered(
                filters.Dimension('sprayed') ==
                settings.MSPRAY_WAS_SPRAYED_VALUE,
                aggregators.longsum('count')
            ),
            'num_new': aggregators.filtered(
                filters.Dimension('is_new') == 'true',
                aggregators.longsum('count')
            ),
            'num_new_no_duplicates': aggregators.filtered(
                filters.Filter(
                    type='and',
                    fields=[filters.Dimension('is_duplicate') == 'false',
                            filters.Dimension('is_new') == 'true']
                ),
                aggregators.longsum('count')
            ),
            'num_duplicate': aggregators.filtered(
                filters.Dimension('is_duplicate') == 'true',
                aggregators.longsum('count')
            ),
            'num_sprayed_no_duplicates': aggregators.filtered(
                filters.Filter(
                    type='and',
                    fields=[filters.Dimension('is_duplicate') == 'false',
                            filters.Dimension('sprayed') ==
                            settings.MSPRAY_WAS_SPRAYED_VALUE]
                ),
                aggregators.longsum('count')
            ),
            'num_not_sprayed_no_duplicates': aggregators.filtered(
                filters.Filter(
                    type='and',
                    fields=[filters.Dimension('is_duplicate') == 'false',
                            filters.Dimension('sprayable') == 'true',
                            filters.Dimension('sprayed') ==
                            settings.MSPRAY_WAS_NOT_SPRAYED_VALUE]
                ),
                aggregators.longsum('count')
            ),
            'num_sprayed_duplicates': aggregators.filtered(
                filters.Filter(
                    type='and',
                    fields=[filters.Dimension('is_duplicate') == 'true',
                            filters.Dimension('sprayable') == 'true',
                            filters.Dimension('sprayed') ==
                            settings.MSPRAY_WAS_SPRAYED_VALUE]
                ),
                aggregators.longsum('count')
            ),
            'num_not_sprayable_no_duplicates': aggregators.filtered(
                filters.Filter(
                    type='and',
                    fields=[filters.Dimension('is_duplicate') == 'false',
                            filters.Dimension('sprayable') == 'false']
                ),
                aggregators.longsum('count')
            ),
            'num_refused': aggregators.filtered(
                filters.Filter(
                    type='and',
                    fields=[filters.Dimension('is_duplicate') == 'false',
                            filters.Dimension('is_refused') == 'true',
                            filters.Dimension('sprayed') ==
                            settings.MSPRAY_WAS_NOT_SPRAYED_VALUE]
                ),
                aggregators.longsum('count')
            ),
        },
        post_aggregations={
            'num_found': Field('num_sprayed_no_duplicates') +
            Field('num_sprayed_duplicates') +
            Field('num_not_sprayed_no_duplicates')
        },
        limit_spec={
            "type": "default",
            "limit": 50000,
            "columns": order_by
        }
    )
    if filter_list:
        fields = []
        for this_filter in filter_list:
            compare_dim = filters.Dimension(this_filter[0])
            comparison_operator = this_filter[1]  # e.g. operator.eq
            compare_dim_value = this_filter[2]
            fields.append(comparison_operator(compare_dim, compare_dim_value))
        params['filter'] = filters.Filter(
            type=filter_type,
            fields=fields
        )

    if dimensions is None:
        params['dimensions'] = ['target_area_id', 'target_area_name',
                                'target_area_structures']
    else:
        params['dimensions'] = dimensions

    try:
        request = query.groupby(**params)
    except OSError:
        return []
    else:
        return request.result
Beispiel #20
0
 def test_build_aggregators(self):
     agg_input = {
         "agg1":
         aggregators.count("metric1"),
         "agg2":
         aggregators.longsum("metric2"),
         "agg3":
         aggregators.doublesum("metric3"),
         "agg4":
         aggregators.doublemin("metric4"),
         "agg5":
         aggregators.doublemax("metric5"),
         "agg6":
         aggregators.hyperunique("metric6"),
         "agg7":
         aggregators.cardinality("dim1"),
         "agg8":
         aggregators.cardinality(["dim1", "dim2"], by_row=True),
         "agg9":
         aggregators.thetasketch("dim1"),
         "agg10":
         aggregators.thetasketch("metric7"),
         "agg11":
         aggregators.thetasketch("metric8",
                                 isinputthetasketch=True,
                                 size=8192),
     }
     built_agg = aggregators.build_aggregators(agg_input)
     expected = [
         {
             "name": "agg1",
             "type": "count",
             "fieldName": "metric1"
         },
         {
             "name": "agg2",
             "type": "longSum",
             "fieldName": "metric2"
         },
         {
             "name": "agg3",
             "type": "doubleSum",
             "fieldName": "metric3"
         },
         {
             "name": "agg4",
             "type": "doubleMin",
             "fieldName": "metric4"
         },
         {
             "name": "agg5",
             "type": "doubleMax",
             "fieldName": "metric5"
         },
         {
             "name": "agg6",
             "type": "hyperUnique",
             "fieldName": "metric6"
         },
         {
             "name": "agg7",
             "type": "cardinality",
             "fieldNames": ["dim1"],
             "byRow": False,
         },
         {
             "name": "agg8",
             "type": "cardinality",
             "fieldNames": ["dim1", "dim2"],
             "byRow": True,
         },
         {
             "name": "agg9",
             "type": "thetaSketch",
             "fieldName": "dim1",
             "isInputThetaSketch": False,
             "size": 16384,
         },
         {
             "name": "agg10",
             "type": "thetaSketch",
             "fieldName": "metric7",
             "isInputThetaSketch": False,
             "size": 16384,
         },
         {
             "name": "agg11",
             "type": "thetaSketch",
             "fieldName": "metric8",
             "isInputThetaSketch": True,
             "size": 8192,
         },
     ]
     assert sorted(built_agg,
                   key=itemgetter("name")) == sorted(expected,
                                                     key=itemgetter("name"))
Beispiel #21
0
    def test_build_filtered_aggregator(self):
        filter_ = filters.Filter(dimension="dim", value="val")
        agg_input = {
            "agg1":
            aggregators.filtered(filter_, aggregators.count("metric1")),
            "agg2":
            aggregators.filtered(filter_, aggregators.longsum("metric2")),
            "agg3":
            aggregators.filtered(filter_, aggregators.doublesum("metric3")),
            "agg4":
            aggregators.filtered(filter_, aggregators.doublemin("metric4")),
            "agg5":
            aggregators.filtered(filter_, aggregators.doublemax("metric5")),
            "agg6":
            aggregators.filtered(filter_, aggregators.hyperunique("metric6")),
            "agg7":
            aggregators.filtered(filter_, aggregators.cardinality("dim1")),
            "agg8":
            aggregators.filtered(
                filter_, aggregators.cardinality(["dim1", "dim2"],
                                                 by_row=True)),
            "agg9":
            aggregators.filtered(filter_, aggregators.thetasketch("dim1")),
            "agg10":
            aggregators.filtered(filter_, aggregators.thetasketch("metric7")),
            "agg11":
            aggregators.filtered(
                filter_,
                aggregators.thetasketch("metric8",
                                        isinputthetasketch=True,
                                        size=8192),
            ),
        }
        base = {
            "type": "filtered",
            "filter": {
                "type": "selector",
                "dimension": "dim",
                "value": "val"
            },
        }

        aggs = [
            {
                "name": "agg1",
                "type": "count",
                "fieldName": "metric1"
            },
            {
                "name": "agg2",
                "type": "longSum",
                "fieldName": "metric2"
            },
            {
                "name": "agg3",
                "type": "doubleSum",
                "fieldName": "metric3"
            },
            {
                "name": "agg4",
                "type": "doubleMin",
                "fieldName": "metric4"
            },
            {
                "name": "agg5",
                "type": "doubleMax",
                "fieldName": "metric5"
            },
            {
                "name": "agg6",
                "type": "hyperUnique",
                "fieldName": "metric6"
            },
            {
                "name": "agg7",
                "type": "cardinality",
                "fieldNames": ["dim1"],
                "byRow": False,
            },
            {
                "name": "agg8",
                "type": "cardinality",
                "fieldNames": ["dim1", "dim2"],
                "byRow": True,
            },
            {
                "name": "agg9",
                "type": "thetaSketch",
                "fieldName": "dim1",
                "isInputThetaSketch": False,
                "size": 16384,
            },
            {
                "name": "agg10",
                "type": "thetaSketch",
                "fieldName": "metric7",
                "isInputThetaSketch": False,
                "size": 16384,
            },
            {
                "name": "agg11",
                "type": "thetaSketch",
                "fieldName": "metric8",
                "isInputThetaSketch": True,
                "size": 8192,
            },
        ]
        expected = []
        for agg in aggs:
            exp = deepcopy(base)
            exp.update({"aggregator": agg})
            expected.append(exp)

        built_agg = aggregators.build_aggregators(agg_input)
        expected = sorted(built_agg,
                          key=lambda k: itemgetter("name")
                          (itemgetter("aggregator")(k)))
        actual = sorted(expected,
                        key=lambda k: itemgetter("name")
                        (itemgetter("aggregator")(k)))
        assert expected == actual
    def test_build_filtered_aggregator(self):
        filter_ = filters.Filter(dimension='dim', value='val')
        agg_input = {
            'agg1': aggregators.filtered(filter_,
                                         aggregators.count('metric1')),
            'agg2': aggregators.filtered(filter_,
                                         aggregators.longsum('metric2')),
            'agg3': aggregators.filtered(filter_,
                                         aggregators.doublesum('metric3')),
            'agg4': aggregators.filtered(filter_,
                                         aggregators.doublemin('metric4')),
            'agg5': aggregators.filtered(filter_,
                                         aggregators.doublemax('metric5')),
            'agg6': aggregators.filtered(filter_,
                                         aggregators.hyperunique('metric6')),
            'agg7': aggregators.filtered(filter_,
                                         aggregators.cardinality('dim1')),
            'agg8': aggregators.filtered(filter_,
                                         aggregators.cardinality(['dim1', 'dim2'], by_row=True)),
            'agg9': aggregators.filtered(filter_,
                                         aggregators.thetasketch('dim1')),
            'agg10': aggregators.filtered(filter_,
                                         aggregators.thetasketch('metric7')),
            'agg11': aggregators.filtered(filter_,
                                         aggregators.thetasketch('metric8', isinputthetasketch = True, size=8192)),
        }
        base = {
            'type': 'filtered',
            'filter': {
                'type': 'selector',
                'dimension': 'dim',
                'value': 'val'
            }
        }

        aggs = [
            {'name': 'agg1', 'type': 'count', 'fieldName': 'metric1'},
            {'name': 'agg2', 'type': 'longSum', 'fieldName': 'metric2'},
            {'name': 'agg3', 'type': 'doubleSum', 'fieldName': 'metric3'},
            {'name': 'agg4', 'type': 'doubleMin', 'fieldName': 'metric4'},
            {'name': 'agg5', 'type': 'doubleMax', 'fieldName': 'metric5'},
            {'name': 'agg6', 'type': 'hyperUnique', 'fieldName': 'metric6'},
            {'name': 'agg7', 'type': 'cardinality', 'fieldNames': ['dim1'], 'byRow': False},
            {'name': 'agg8', 'type': 'cardinality', 'fieldNames': ['dim1', 'dim2'], 'byRow': True},
            {'name': 'agg9', 'type': 'thetaSketch', 'fieldName': 'dim1', 'isInputThetaSketch': False, 'size': 16384},
            {'name': 'agg10', 'type': 'thetaSketch', 'fieldName': 'metric7', 'isInputThetaSketch': False, 'size': 16384},
            {'name': 'agg11', 'type': 'thetaSketch', 'fieldName': 'metric8', 'isInputThetaSketch': True, 'size': 8192}

        ]
        expected = []
        for agg in aggs:
            exp = deepcopy(base)
            exp.update({'aggregator': agg})
            expected.append(exp)

        built_agg = aggregators.build_aggregators(agg_input)
        expected = sorted(built_agg, key=lambda k: itemgetter('name')(
            itemgetter('aggregator')(k)))
        actual = sorted(expected, key=lambda k: itemgetter('name')(
            itemgetter('aggregator')(k)))
        assert expected == actual