Пример #1
0
 def test_iterpoints(self):
     start = datetime.datetime(2019, 3, 4, 1, tzinfo=tz.UTC)
     end = datetime.datetime(2019, 3, 4, 2, tzinfo=tz.UTC)
     df = dataframe.DataFrame(start=start, end=end)
     points = [
         dataframe.DataPoint(**TestDataPoint.default_params)
         for _ in range(4)
     ]
     df.add_points(points, 'metric_x')
     expected = [('metric_x',
                  dataframe.DataPoint(**TestDataPoint.default_params))
                 for _ in range(4)]
     self.assertEqual(list(df.iterpoints()), expected)
Пример #2
0
def generate_v2_storage_data(min_length=10,
                             nb_projects=2,
                             project_ids=None,
                             start=None,
                             end=None):

    if not project_ids:
        project_ids = [uuidutils.generate_uuid() for i in range(nb_projects)]
    elif not isinstance(project_ids, list):
        project_ids = [project_ids]

    df = dataframe.DataFrame(start=start, end=end)
    for metric_name, sample in samples.V2_STORAGE_SAMPLE.items():
        datapoints = []
        for project_id in project_ids:
            data = [copy.deepcopy(sample)
                    for i in range(min_length + random.randint(1, 10))]
            for elem in data:
                elem['groupby']['id'] = uuidutils.generate_uuid()
                elem['groupby']['project_id'] = project_id
            datapoints += [dataframe.DataPoint(
                elem['vol']['unit'],
                elem['vol']['qty'],
                elem['rating']['price'],
                elem['groupby'],
                elem['metadata'],
            ) for elem in data]
        df.add_points(datapoints, metric_name)

    return df
Пример #3
0
    def fetch_all(self, metric_name, start, end,
                  project_id=None, q_filter=None):
        met = self.conf[metric_name]

        data = self._fetch_measures(
            metric_name,
            start,
            end,
            project_id=project_id,
            q_filter=q_filter,
        )

        resources_info = None
        if met['metadata']:
            resources_info = self._fetch_metrics(
                metric_name,
                start,
                end,
                project_id=project_id,
                q_filter=q_filter,
            )

        formated_resources = list()
        for d in data:
            if len(d['statistics']):
                metadata, groupby, qty = self._format_data(
                    met, d, resources_info)
                formated_resources.append(dataframe.DataPoint(
                    met['unit'],
                    qty,
                    0,
                    groupby,
                    metadata,
                ))
        return formated_resources
Пример #4
0
    def test_add_point_no_autocommit(self):
        point = dataframe.DataPoint('unit', '0.42', '0.1337', {}, {})
        start = datetime.datetime(2019, 1, 1)
        end = datetime.datetime(2019, 1, 1, 1)
        with mock.patch.object(self.client, 'commit') as func_mock:
            with mock.patch.object(self.client, '_autocommit', new=False):
                with mock.patch.object(self.client, '_chunk_size', new=3):
                    self.client._docs = []
                    for _ in range(5):
                        self.client.add_point(point, 'awesome_type', start,
                                              end)

                    func_mock.assert_not_called()
                    self.assertEqual(self.client._docs,
                                     [{
                                         'start': start,
                                         'end': end,
                                         'type': 'awesome_type',
                                         'unit': point.unit,
                                         'qty': point.qty,
                                         'price': point.price,
                                         'groupby': point.groupby,
                                         'metadata': point.metadata,
                                     } for _ in range(5)])

        self.client._docs = []
Пример #5
0
    def test_add_point_with_autocommit(self):
        point = dataframe.DataPoint('unit', '0.42', '0.1337', {}, {})
        start = datetime.datetime(2019, 1, 1)
        end = datetime.datetime(2019, 1, 1, 1)

        commit_calls = {'count': 0}

        def commit():
            # We can't re-assign nonlocal variables in python2
            commit_calls['count'] += 1
            self.client._docs = []

        with mock.patch.object(self.client, 'commit', new=commit):
            with mock.patch.object(self.client, '_autocommit', new=True):
                with mock.patch.object(self.client, '_chunk_size', new=3):
                    self.client._docs = []
                    for i in range(5):
                        self.client.add_point(point, 'awesome_type', start,
                                              end)

                    self.assertEqual(commit_calls['count'], 1)
                    self.assertEqual(self.client._docs,
                                     [{
                                         'start': start,
                                         'end': end,
                                         'type': 'awesome_type',
                                         'unit': point.unit,
                                         'qty': point.qty,
                                         'price': point.price,
                                         'groupby': point.groupby,
                                         'metadata': point.metadata,
                                     } for _ in range(2)])

        # cleanup
        self.client._docs = []
Пример #6
0
 def test_from_dict_valid_dict(self):
     self.assertEqual(
         dataframe.DataPoint(
             unit="amazing_unit",
             qty=3,
             price=0,
             groupby={
                 "g_one": "one",
                 "g_two": "two"
             },
             metadata={
                 "m_one": "one",
                 "m_two": "two"
             },
         ).as_dict(),
         dataframe.DataPoint.from_dict({
             "vol": {
                 "unit": "amazing_unit",
                 "qty": 3
             },
             "groupby": {
                 "g_one": "one",
                 "g_two": "two"
             },
             "metadata": {
                 "m_one": "one",
                 "m_two": "two"
             },
         }).as_dict(),
     )
Пример #7
0
 def test_properties(self):
     params = copy.deepcopy(self.default_params)
     groupby = {"group_one": "one", "group_two": "two"}
     metadata = {"meta_one": "one", "meta_two": "two"}
     params.update({'groupby': groupby, 'metadata': metadata})
     point = dataframe.DataPoint(**params)
     self.assertEqual(point.groupby, groupby)
     self.assertEqual(point.metadata, metadata)
Пример #8
0
 def _doc_to_datapoint(doc):
     return dataframe.DataPoint(
         doc['unit'],
         doc['qty'],
         doc['price'],
         doc['groupby'],
         doc['metadata'],
     )
Пример #9
0
 def _point_to_dataframe_entry(point):
     groupby = filter(bool, (point.pop('groupby', None) or '').split('|'))
     metadata = filter(bool, (point.pop('metadata', None) or '').split('|'))
     return dataframe.DataPoint(
         point['unit'],
         point['qty'],
         point['price'],
         {key: point.get(key, '') for key in groupby},
         {key: point.get(key, '') for key in metadata},
     )
Пример #10
0
    def fetch_all(self,
                  metric_name,
                  start,
                  end,
                  project_id=None,
                  q_filter=None):

        met = self.conf[metric_name]

        data = self._fetch_metric(
            metric_name,
            start,
            end,
            project_id=project_id,
            q_filter=q_filter,
        )

        data = GnocchiCollector.filter_unecessary_measurements(
            data, met, metric_name)

        resources_info = None
        if met['metadata']:
            resources_info = self._fetch_resources(metric_name,
                                                   start,
                                                   end,
                                                   project_id=project_id,
                                                   q_filter=q_filter)

        formated_resources = list()
        for d in data:
            # Only if aggregates have been found
            LOG.debug(
                "Processing entry [%s] for [%s] in timestamp ["
                "start=%s, end=%s] and project id [%s]", d, metric_name, start,
                end, project_id)
            if d['measures']['measures']['aggregated']:
                try:
                    metadata, groupby, qty = self._format_data(
                        met, d, resources_info)
                except AssociatedResourceNotFound as e:
                    LOG.warning(
                        '[{}] An error occured during data collection '
                        'between {} and {}: {}'.format(project_id, start, end,
                                                       e), )
                    continue
                formated_resources.append(
                    dataframe.DataPoint(
                        met['unit'],
                        qty,
                        0,
                        groupby,
                        metadata,
                    ))
        return formated_resources
Пример #11
0
 def test_point_to_dataframe_entry_valid_point(self):
     self.assertEqual(
         influx.InfluxStorage._point_to_dataframe_entry(self.point),
         dataframe.DataPoint(
             'banana',
             42,
             1,
             {'one': '1', 'two': '2'},
             {'1': 'one', '2': 'two'},
         ),
     )
Пример #12
0
 def test_repr(self):
     start = datetime.datetime(2019, 3, 4, 1, tzinfo=tz.UTC)
     end = datetime.datetime(2019, 3, 4, 2, tzinfo=tz.UTC)
     df = dataframe.DataFrame(start=start, end=end)
     points = [
         dataframe.DataPoint(**TestDataPoint.default_params)
         for _ in range(4)
     ]
     df.add_points(points, 'metric_x')
     self.assertEqual(str(df), "DataFrame(metrics=[metric_x])")
     df.add_points(points, 'metric_y')
     self.assertEqual(str(df), "DataFrame(metrics=[metric_x,metric_y])")
Пример #13
0
 def test_desc(self):
     params = copy.deepcopy(self.default_params)
     params['groupby'] = {'group_one': 'one', 'group_two': 'two'}
     params['metadata'] = {'meta_one': 'one', 'meta_two': 'two'}
     point = dataframe.DataPoint(**params)
     self.assertEqual(
         point.desc, {
             'group_one': 'one',
             'group_two': 'two',
             'meta_one': 'one',
             'meta_two': 'two',
         })
Пример #14
0
    def fetch_all(self, metric_name, start, end, scope_id, q_filter=None):
        """Returns metrics to be valorized."""
        scope_key = CONF.collect.scope_key
        method = self.conf[metric_name]['extra_args']['aggregation_method']
        groupby = self.conf[metric_name].get('groupby', [])
        metadata = self.conf[metric_name].get('metadata', [])
        period = tzutils.diff_seconds(end, start)
        time = end

        query = '{0}({0}_over_time({1}{{{2}="{3}"}}[{4}s])) by ({5})'.format(
            method,
            metric_name,
            scope_key,
            scope_id,
            period,
            ', '.join(groupby + metadata),
        )

        try:
            res = self._conn.get_instant(
                query,
                time.isoformat(),
            )
        except PrometheusResponseError as e:
            raise CollectError(*e.args)

        # If the query returns an empty dataset,
        # return an empty list
        if not res['data']['result']:
            return []

        formatted_resources = []

        for item in res['data']['result']:
            metadata, groupby, qty = self._format_data(
                metric_name,
                scope_key,
                scope_id,
                start,
                end,
                item,
            )

            formatted_resources.append(
                dataframe.DataPoint(
                    self.conf[metric_name]['unit'],
                    qty,
                    0,
                    groupby,
                    metadata,
                ))

        return formatted_resources
Пример #15
0
 def test_as_dict_mutable_standard(self):
     self.assertEqual(
         dataframe.DataPoint(**self.default_params).as_dict(mutable=True), {
             "vol": {
                 "unit": "undefined",
                 "qty": decimal.Decimal(0)
             },
             "rating": {
                 "price": decimal.Decimal(0)
             },
             "groupby": {},
             "metadata": {},
         })
Пример #16
0
 def test_json_standard(self):
     self.assertEqual(
         json.loads(dataframe.DataPoint(**self.default_params).json()), {
             "vol": {
                 "unit": "undefined",
                 "qty": decimal.Decimal(0)
             },
             "rating": {
                 "price": decimal.Decimal(0)
             },
             "groupby": {},
             "metadata": {},
         })
Пример #17
0
    def test_dataframe_add_points(self):
        start = datetime.datetime(2019, 3, 4, 1, tzinfo=tz.UTC)
        end = datetime.datetime(2019, 3, 4, 2, tzinfo=tz.UTC)
        df = dataframe.DataFrame(start=start, end=end)
        a_points = [
            dataframe.DataPoint(**TestDataPoint.default_params)
            for _ in range(2)
        ]
        b_points = [
            dataframe.DataPoint(**TestDataPoint.default_params)
            for _ in range(4)
        ]

        df.add_point(a_points[0], 'service_a')
        df.add_points(a_points[1:], 'service_a')
        df.add_points(b_points[:2], 'service_b')
        df.add_points(b_points[2:3], 'service_b')
        df.add_point(b_points[3], 'service_b')

        self.assertEqual(
            dict(df.as_dict()), {
                'period': {
                    'begin': start,
                    'end': end
                },
                'usage': {
                    'service_a': [
                        dataframe.DataPoint(
                            **TestDataPoint.default_params).as_dict()
                        for _ in range(2)
                    ],
                    'service_b': [
                        dataframe.DataPoint(
                            **TestDataPoint.default_params).as_dict()
                        for _ in range(4)
                    ],
                }
            })
Пример #18
0
 def test_as_dict_mutable_legacy(self):
     self.assertEqual(
         dataframe.DataPoint(**self.default_params).as_dict(legacy=True,
                                                            mutable=True),
         {
             "vol": {
                 "unit": "undefined",
                 "qty": decimal.Decimal(0)
             },
             "rating": {
                 "price": decimal.Decimal(0)
             },
             "desc": {},
         })
Пример #19
0
 def test_json_legacy(self):
     self.assertEqual(
         json.loads(
             dataframe.DataPoint(**self.default_params).json(legacy=True)),
         {
             "vol": {
                 "unit": "undefined",
                 "qty": decimal.Decimal(0)
             },
             "rating": {
                 "price": decimal.Decimal(0)
             },
             "desc": {},
         })
Пример #20
0
 def test_point_to_dataframe_entry_invalid_groupby_metadata(self):
     point = copy.deepcopy(self.point)
     point['groupby'] = 'a'
     point['metadata'] = None
     self.assertEqual(
         influx.InfluxStorage._point_to_dataframe_entry(point),
         dataframe.DataPoint(
             'banana',
             42,
             1,
             {'a': ''},
             {},
         ),
     )
Пример #21
0
    def test_json(self):
        start = datetime.datetime(2019, 3, 4, 1, tzinfo=tz.UTC)
        end = datetime.datetime(2019, 3, 4, 2, tzinfo=tz.UTC)
        df = dataframe.DataFrame(start=start, end=end)
        a_points = [
            dataframe.DataPoint(**TestDataPoint.default_params)
            for _ in range(2)
        ]
        b_points = [
            dataframe.DataPoint(**TestDataPoint.default_params)
            for _ in range(4)
        ]
        df.add_points(a_points, 'service_a')
        df.add_points(b_points, 'service_b')

        self.maxDiff = None
        self.assertEqual(
            json.loads(df.json()),
            json.loads(
                json.dumps({
                    'period': {
                        'begin': start.isoformat(),
                        'end': end.isoformat()
                    },
                    'usage': {
                        'service_a': [
                            dataframe.DataPoint(
                                **TestDataPoint.default_params).as_dict()
                            for _ in range(2)
                        ],
                        'service_b': [
                            dataframe.DataPoint(
                                **TestDataPoint.default_params).as_dict()
                            for _ in range(4)
                        ],
                    }
                })))
Пример #22
0
    def test_format_retrieve(self):
        expected_name = 'http_requests_total'
        expected_data = [
            dataframe.DataPoint('instance', '7', '0', {
                'bar': '',
                'foo': '',
                'project_id': ''
            }, {
                'code': '200',
                'instance': 'localhost:9090'
            }),
            dataframe.DataPoint('instance', '42', '0', {
                'bar': '',
                'foo': '',
                'project_id': ''
            }, {
                'code': '200',
                'instance': 'localhost:9090'
            }),
        ]

        no_response = mock.patch(
            'cloudkitty.common.prometheus_client.PrometheusClient.get_instant',
            return_value=samples.PROMETHEUS_RESP_INSTANT_QUERY,
        )

        with no_response:
            actual_name, actual_data = self.collector.retrieve(
                metric_name='http_requests_total',
                start=samples.FIRST_PERIOD_BEGIN,
                end=samples.FIRST_PERIOD_END,
                project_id=samples.TENANT,
                q_filter=None,
            )

        self.assertEqual(expected_name, actual_name)
        self.assertEqual(expected_data, actual_data)
Пример #23
0
    def create_fake_data(self, begin, end, project_id):

        cpu_point = dataframe.DataPoint(
            unit="nothing",
            qty=1,
            groupby={
                "fake_meta": 1.0,
                "project_id": project_id
            },
            metadata={"dummy": True},
            price=decimal.Decimal('1.337'),
        )
        image_point = dataframe.DataPoint(
            unit="nothing",
            qty=1,
            groupby={
                "fake_meta": 1.0,
                "project_id": project_id
            },
            metadata={"dummy": True},
            price=decimal.Decimal('0.121'),
        )
        data = [
            dataframe.DataFrame(
                start=begin,
                end=end,
                usage=collections.OrderedDict({"cpu": [cpu_point, cpu_point]}),
            ),
            dataframe.DataFrame(
                start=begin,
                end=end,
                usage=collections.OrderedDict(
                    {"image.size": [image_point, image_point]}),
            ),
        ]
        return data
Пример #24
0
 def test_as_dict_immutable(self):
     point_dict = dataframe.DataPoint(**self.default_params).as_dict()
     self.assertIsInstance(point_dict, datastructures.ImmutableDict)
     self.assertEqual(
         dict(point_dict), {
             "vol": {
                 "unit": "undefined",
                 "qty": decimal.Decimal(0)
             },
             "rating": {
                 "price": decimal.Decimal(0)
             },
             "groupby": {},
             "metadata": {},
         })
Пример #25
0
 def test_from_dict_valid_dict_date_as_str(self):
     start = datetime.datetime(2019, 1, 2, 12, tzinfo=tz.UTC)
     end = datetime.datetime(2019, 1, 2, 13, tzinfo=tz.UTC)
     point = dataframe.DataPoint('unit', 0, 0, {'g_one': 'one'},
                                 {'m_two': 'two'})
     usage = {'metric_x': [point]}
     dict_usage = {'metric_x': [point.as_dict(mutable=True)]}
     self.assertEqual(
         dataframe.DataFrame(start, end, usage).as_dict(),
         dataframe.DataFrame.from_dict({
             'period': {
                 'begin': start.isoformat(),
                 'end': end.isoformat()
             },
             'usage': dict_usage,
         }).as_dict(),
     )
Пример #26
0
    def fetch_all(self, metric_name, start, end,
                  project_id=None, q_filter=None):

        met = self.conf[metric_name]

        data = self._fetch_metric(
            metric_name,
            start,
            end,
            project_id=project_id,
            q_filter=q_filter,
        )

        resources_info = None
        if met['metadata']:
            resources_info = self._fetch_resources(
                metric_name,
                start,
                end,
                project_id=project_id,
                q_filter=q_filter
            )
        formated_resources = list()
        for d in data:
            # Only if aggregates have been found
            if d['measures']['measures']['aggregated']:
                try:
                    metadata, groupby, qty = self._format_data(
                        met, d, resources_info)
                except AssociatedResourceNotFound as e:
                    LOG.warning(
                        '[{}] An error occured during data collection '
                        'between {} and {}: {}'.format(
                            project_id, start, end, e),
                    )
                    continue
                formated_resources.append(dataframe.DataPoint(
                    met['unit'],
                    qty,
                    0,
                    groupby,
                    metadata,
                ))
        return formated_resources
Пример #27
0
    def quote(self, ctxt, res_data):
        LOG.debug('Received quote request [%s] from RPC.', res_data)
        worker = APIWorker()

        start = tzutils.localized_now()
        end = tzutils.add_delta(start, timedelta(seconds=CONF.collect.period))

        # Need to prepare data to support the V2 processing format
        usage = {}
        for k in res_data['usage']:
            all_data_points_for_metric = []
            all_quote_data_entries = res_data['usage'][k]
            for p in all_quote_data_entries:
                vol = p['vol']
                desc = p.get('desc', {})

                data_point = dataframe.DataPoint(
                    vol['unit'],
                    vol['qty'],
                    0,
                    desc.get('groupby', []),
                    desc.get('metadata', []),
                )
                all_data_points_for_metric.append(data_point)
            usage[k] = all_data_points_for_metric

        frame = dataframe.DataFrame(
            start=start,
            end=end,
            usage=usage,
        )

        quote_result = worker.quote(frame)
        LOG.debug("Quote result [%s] for input data [%s].",
                  quote_result, res_data)
        return str(quote_result)
Пример #28
0
 def test_create_empty_datapoint(self):
     point = dataframe.DataPoint(**self.default_params)
     self.assertEqual(point.qty, decimal.Decimal(0))
     self.assertEqual(point.price, decimal.Decimal(0))
     self.assertEqual(point.unit, "undefined")
     self.assertEqual(point.groupby, {})
Пример #29
0
 def test_set_price(self):
     point = dataframe.DataPoint(**self.default_params)
     self.assertEqual(point.price, decimal.Decimal(0))
     self.assertEqual(point.set_price(42).price, decimal.Decimal(42))
     self.assertEqual(point.set_price(1337).price, decimal.Decimal(1337))
Пример #30
0
 def test_readonly_attrs(self):
     point = dataframe.DataPoint(**self.default_params)
     for attr in ("qty", "price", "unit"):
         self.assertRaises(AttributeError, setattr, point, attr, 'x')