def __init__(self, series, raise_errors=True):
        """Initialize the ResultSet."""
        self._raw = series
        self._error = self._raw.get('error', None)

        if self.error is not None and raise_errors is True:
            raise InfluxDBClientError(self.error)
Exemple #2
0
 def _open(self):
     conn = InfluxDBClient(host=self.url,
                           port=self.port,
                           username=self.user,
                           password=self._pw,
                           database=self.db)
     if not conn:
         raise InfluxDBClientError(
             "Connection to metrics database was not set.")
     return conn
Exemple #3
0
    def historian_setup(self):
        _log.debug("HISTORIAN SETUP")
        if not self._client:
            raise InfluxDBClientError("Cannot connect to InfluxDB client")

        # Get meta_dicts for all topics if they are already stored
        self._topic_id_map, self._meta_dicts = influxdbutils.get_all_topic_id_and_meta(
            self._client)
        _log.info("_meta_dicts is {}".format(self._meta_dicts))
        _log.info("_topic_id_map is {}".format(self._topic_id_map))
Exemple #4
0
def test_influx_client_error(mocker, caplog):
    query = InfluxQuery()
    mock_client = mocker.MagicMock()
    mock_client.query.side_effect = InfluxDBClientError('client error')
    client_mock = mocker.patch('hpca.utils.influx_query.InfluxDBClient', return_value=mock_client)
    with pytest.raises(InfluxDBClientError) as cm:
        query.submit(query='smthg', data_type='json')
    assert cm.value.args == ('client error',)
    assert caplog.record_tuples == [
        ('hpca.utils.influx_query', logging.ERROR, 'InfluxDB client error:\nclient error'),
    ]
def test_field_invalid_timestamp(client, mock_data_store):
    from influxdb.exceptions import InfluxDBClientError
    import falcon

    invalid_timestamp = '2018-'
    mock_data_store.get_reading.side_effect = InfluxDBClientError("Invalid Timestamp: {}".format(invalid_timestamp))

    params = {"sensor_id": 1, "field": "field1", "timestamp": invalid_timestamp}
    result = client.simulate_post('/data', json=params)

    assert result.status == falcon.HTTP_422
    assert result.json['error message'] == 'Invalid Timestamp: {}'.format(invalid_timestamp)
Exemple #6
0
def write_data_to_db(client, points_information):
    """
    Writes measurement points to the specified Database.

    :param client: An active instance of InfluxDB
    :param points_information: All measurement points that are to be written; List
    """
    print(points_information)
    try:
        client.write_points(points_information)
    except InfluxDBClientError:
        raise InfluxDBClientError(
            f"error for data entry :: {points_information}")
Exemple #7
0
    async def request(self,
                      url,
                      method='GET',
                      params=None,
                      data=None,
                      expected_response_code=200,
                      headers=None):
        url = f'{self._baseurl}/{url}'

        if headers is None:
            headers = self._headers

        if params is None:
            params = {}

        if isinstance(data, (dict, list)):
            data = json.dumps(data)

        kwargs = dict(
            method=method,
            url=url,
            params=params,
            data=data,
            headers=headers,
            verify_ssl=self._verify_ssl,
            timeout=self._timeout,
        )
        if self._username is not None:
            kwargs.update(
                auth=aiohttp.BasicAuth(self._username, self._password or ''))

        if self.proxy:
            connector = ProxyConnector.from_url(self.proxy)
        else:
            connector = None

        async with aiohttp.ClientSession(
                connector=connector, headers=self._session.headers) as session:
            async with session.request(**kwargs) as response:
                data = await response.json()
        # if there's not an error, there must have been a successful response
        if 500 <= response.status < 600:
            raise InfluxDBServerError(data)
        if response.status == expected_response_code:
            return data
        raise InfluxDBClientError(data, response.status)
@pytest.mark.parametrize(
    "mock_client, config_ext, queries, set_query_mock, query_exception",
    [
        (
            DEFAULT_API_VERSION,
            BASE_V1_CONFIG,
            BASE_V1_QUERY,
            _set_query_mock_v1,
            OSError("fail"),
        ),
        (
            DEFAULT_API_VERSION,
            BASE_V1_CONFIG,
            BASE_V1_QUERY,
            _set_query_mock_v1,
            InfluxDBClientError("fail"),
        ),
        (
            DEFAULT_API_VERSION,
            BASE_V1_CONFIG,
            BASE_V1_QUERY,
            _set_query_mock_v1,
            InfluxDBClientError("fail", code=400),
        ),
        (
            API_VERSION_2,
            BASE_V2_CONFIG,
            BASE_V2_QUERY,
            _set_query_mock_v2,
            OSError("fail"),
        ),
Exemple #9
0
def get_topic_values(client, topic_id, start, end,
                     agg_type, agg_period, skip, count, order,
                     use_calendar_time_periods):
    """
    This is a helper for function query_historian in InfluxdbHistorian class.
    Execute query to return a list of values of specific topic(s)
    'topic_id' will be split into 3 tags 'campus', 'building', 'device' and measurement name.


    See Schema description for InfluxDB Historian in README


    Please see
    :py:meth:`volttron.platform.agent.base_historian.BaseQueryHistorianAgent.query_historian`
    for input parameters
    """

    # Make sure topic_id doesn't contain any special character
    if not re.search(TOPIC_REGEX, topic_id):
        raise ValueError("Topic id contains special character(s) that not allowed")

    tags_values = topic_id.rsplit('/', 3)
    measurement = tags_values.pop()
    tags_title = ["device", "building", "campus"]
    tags_conditions = ''

    # Construct tag condition, which is part of WHERE statement
    # E.g: if topic = a/b/c/d, measurement=d and condition of
    #      tag is: "campus='a' and building='b' and device='c'"
    for i, tag in enumerate(tags_values[::-1]):
        tags_conditions += '{}=\'{}\''.format(tags_title[i], tag)
        if i != len(tags_values) - 1:
            tags_conditions += " and "

    if agg_type:
        agg_type = agg_type.upper()
        if agg_type not in SUPPORTED_AGG_TYPE:
            raise ValueError("Aggregation function {} is not supported".format(agg_type))

        query = 'SELECT {}(value) as value FROM {}'.format(agg_type, measurement)
        if tags_conditions:
            query += ' WHERE {}'.format(tags_conditions)
        if not start and not end:
            raise ValueError("Either start time or end time must be provided when executing "
                             "aggregation queries")
    else:
        query = 'SELECT value FROM {}'.format(measurement)
        if tags_conditions:
            query += ' WHERE {}'.format(tags_conditions)

    if start:
        start_time = format_timestamp(start)
        query += ' AND time >= \'%s\'' % start_time
    if end:
        end_time = format_timestamp(end)
        query += ' AND time <= \'%s\'' % end_time

    if agg_period:
        if not re.search(AGG_PERIOD_REGEX, agg_period):
            raise ValueError("Aggregation period {} is in wrong format".format(agg_period))
        elif agg_period[:-1] == 'M':  # Influxdb only support m, h, d and w but not M (month)
            raise InfluxDBClientError("Influxdb hasn't supported GROUP BY month yet")

        if use_calendar_time_periods:
            query += 'GROUP BY time(%s)' % agg_period
        else:
            # @TODO: offset by now() is removed in new version.
            # Using InfluxDB version <1.2.4 to get this work.
            query += 'GROUP BY time(%s, now())' % agg_period
    if order == "LAST_TO_FIRST":
        query += ' ORDER BY time DESC'

    query += ' LIMIT %d' % count
    if skip:
        query += ' OFFSET %d' % skip

    try:
        rs = client.query(query)
        rs = list(rs.get_points())
    except InfluxDBClientError as e:
        _log.error("Query: {}".format(query))
        raise e

    values = []
    for point in rs:
        ts = parser.parse(point['time'])
        ts = format_timestamp(ts)
        values.append((ts, point['value']))

    return values
Exemple #10
0
class TestDatabaseClient(TestMonitoringMixin, TestCase):
    def test_forbidden_queries(self):
        queries = [
            'DROP DATABASE openwisp2',
            'DROP MEASUREMENT test_metric',
            'CREATE DATABASE test',
            'DELETE MEASUREMENT test_metric',
            'ALTER RETENTION POLICY policy',
            'SELECT * INTO metric2 FROM test_metric',
        ]
        for q in queries:
            try:
                timeseries_db.validate_query(q)
            except ValidationError as e:
                self.assertIn('configuration', e.message_dict)
            else:
                self.fail('ValidationError not raised')

    def test_get_custom_query(self):
        c = self._create_chart(test_data=None)
        custom_q = c._default_query.replace('{field_name}', '{fields}')
        q = c.get_query(query=custom_q, fields=['SUM(*)'])
        self.assertIn('SELECT SUM(*) FROM', q)

    def test_is_aggregate_bug(self):
        m = self._create_object_metric(name='summary_avg')
        c = Chart(metric=m, configuration='dummy')
        self.assertFalse(timeseries_db._is_aggregate(c.query))

    def test_is_aggregate_fields_function(self):
        m = self._create_object_metric(name='is_aggregate_func')
        c = Chart(metric=m, configuration='uptime')
        self.assertTrue(timeseries_db._is_aggregate(c.query))

    def test_get_query_fields_function(self):
        c = self._create_chart(test_data=None, configuration='histogram')
        q = c.get_query(fields=['ssh', 'http2', 'apple-music'])
        expected = ('SELECT SUM("ssh") / 1 AS ssh, '
                    'SUM("http2") / 1 AS http2, '
                    'SUM("apple-music") / 1 AS apple_music FROM')
        self.assertIn(expected, q)

    def test_default_query(self):
        c = self._create_chart(test_data=False)
        q = ("SELECT {field_name} FROM {key} WHERE time >= '{time}' AND "
             "content_type = '{content_type}' AND object_id = '{object_id}'")
        self.assertEqual(c.query, q)

    def test_write(self):
        timeseries_db.write('test_write', dict(value=2), database=self.TEST_DB)
        measurement = list(
            timeseries_db.query('select * from test_write',
                                database=self.TEST_DB).get_points())[0]
        self.assertEqual(measurement['value'], 2)

    def test_general_write(self):
        m = self._create_general_metric(name='Sync test')
        m.write(1)
        measurement = list(
            timeseries_db.query('select * from sync_test').get_points())[0]
        self.assertEqual(measurement['value'], 1)

    def test_object_write(self):
        om = self._create_object_metric()
        om.write(3)
        content_type = '.'.join(om.content_type.natural_key())
        q = (f"select * from test_metric WHERE object_id = '{om.object_id}'"
             f" AND content_type = '{content_type}'")
        measurement = timeseries_db.get_list_query(q)[0]
        self.assertEqual(measurement['value'], 3)

    def test_general_same_key_different_fields(self):
        down = self._create_general_metric(name='traffic (download)',
                                           key='traffic',
                                           field_name='download')
        down.write(200)
        up = self._create_general_metric(name='traffic (upload)',
                                         key='traffic',
                                         field_name='upload')
        up.write(100)
        measurement = list(
            timeseries_db.query(
                'select download from traffic').get_points())[0]
        self.assertEqual(measurement['download'], 200)
        measurement = list(
            timeseries_db.query('select upload from traffic').get_points())[0]
        self.assertEqual(measurement['upload'], 100)

    def test_object_same_key_different_fields(self):
        user = self._create_user()
        user_down = self._create_object_metric(
            name='traffic (download)',
            key='traffic',
            field_name='download',
            content_object=user,
        )
        user_down.write(200)
        user_up = self._create_object_metric(
            name='traffic (upload)',
            key='traffic',
            field_name='upload',
            content_object=user,
        )
        user_up.write(100)
        content_type = '.'.join(user_down.content_type.natural_key())
        q = (
            f"select download from traffic WHERE object_id = '{user_down.object_id}'"
            f" AND content_type = '{content_type}'")
        measurement = timeseries_db.get_list_query(q)[0]
        self.assertEqual(measurement['download'], 200)
        q = (
            f"select upload from traffic WHERE object_id = '{user_up.object_id}'"
            f" AND content_type = '{content_type}'")
        measurement = timeseries_db.get_list_query(q)[0]
        self.assertEqual(measurement['upload'], 100)

    def test_delete_metric_data(self):
        m = self._create_general_metric(name='test_metric')
        m.write(100)
        self.assertEqual(m.read()[0]['value'], 100)
        timeseries_db.delete_metric_data(key=m.key)
        self.assertEqual(m.read(), [])
        om = self._create_object_metric(name='dummy')
        om.write(50)
        m.write(100)
        self.assertEqual(m.read()[0]['value'], 100)
        self.assertEqual(om.read()[0]['value'], 50)
        timeseries_db.delete_metric_data()
        self.assertEqual(m.read(), [])
        self.assertEqual(om.read(), [])

    def test_get_query_1d(self):
        c = self._create_chart(test_data=None, configuration='uptime')
        q = c.get_query(time='1d')
        last24 = now() - timedelta(days=1)
        self.assertIn(str(last24)[0:14], q)
        self.assertIn('group by time(10m)', q.lower())

    def test_get_query_30d(self):
        c = self._create_chart(test_data=None, configuration='uptime')
        q = c.get_query(time='30d')
        last30d = now() - timedelta(days=30)
        self.assertIn(str(last30d)[0:10], q)
        self.assertIn('group by time(24h)', q.lower())

    def test_retention_policy(self):
        manage_short_retention_policy()
        rp = timeseries_db.get_list_retention_policies()
        self.assertEqual(len(rp), 2)
        self.assertEqual(rp[1]['name'], SHORT_RP)
        self.assertEqual(rp[1]['default'], False)
        duration = SHORT_RETENTION_POLICY
        self.assertEqual(rp[1]['duration'], duration)

    def test_query_set(self):
        c = self._create_chart(configuration='histogram')
        expected = ("SELECT {fields|SUM|/ 1} FROM {key} "
                    "WHERE time >= '{time}' AND content_type = "
                    "'{content_type}' AND object_id = '{object_id}'")
        self.assertEqual(c.query, expected)
        self.assertEqual(
            ''.join(timeseries_db.queries.default_chart_query[0:2]),
            c._default_query)
        c.metric.object_id = None
        self.assertEqual(timeseries_db.queries.default_chart_query[0],
                         c._default_query)

    def test_read_order(self):
        m = self._create_general_metric(name='dummy')
        m.write(30)
        m.write(40, time=now() - timedelta(days=2))
        with self.subTest('Test ascending read order'):
            metric_data = m.read(limit=2, order='time')
            self.assertEqual(metric_data[0]['value'], 40)
            self.assertEqual(metric_data[1]['value'], 30)
        with self.subTest('Test descending read order'):
            metric_data = m.read(limit=2, order='-time')
            self.assertEqual(metric_data[0]['value'], 30)
            self.assertEqual(metric_data[1]['value'], 40)
        with self.subTest('Test invalid read order'):
            with self.assertRaises(timeseries_db.client_error) as e:
                metric_data = m.read(limit=2, order='invalid')
                self.assertIn('Invalid order "invalid" passed.', str(e))

    def test_read_with_rp(self):
        self._create_admin()
        manage_short_retention_policy()
        with self.subTest(
                'Test metric write on short retention_policy immediate alert'):
            m = self._create_general_metric(name='dummy')
            self._create_alert_settings(metric=m,
                                        custom_operator='<',
                                        custom_threshold=1,
                                        custom_tolerance=0)
            m.write(0, retention_policy=SHORT_RP)
            self.assertEqual(
                m.read(retention_policy=SHORT_RP)[0][m.field_name], 0)
            m.refresh_from_db()
            self.assertEqual(m.is_healthy, False)
            self.assertEqual(m.is_healthy_tolerant, False)
            self.assertEqual(Notification.objects.count(), 1)
        with self.subTest(
                'Test metric write on short retention_policy with deferred alert'
        ):
            m2 = self._create_general_metric(name='dummy2')
            self._create_alert_settings(metric=m2,
                                        custom_operator='<',
                                        custom_threshold=1,
                                        custom_tolerance=1)
            m.write(0,
                    retention_policy=SHORT_RP,
                    time=now() - timedelta(minutes=2))
            self.assertEqual(
                m.read(retention_policy=SHORT_RP)[0][m.field_name], 0)
            m.refresh_from_db()
            self.assertEqual(m.is_healthy, False)
            self.assertEqual(m.is_healthy_tolerant, False)
            self.assertEqual(Notification.objects.count(), 1)

    def test_metric_write_microseconds_precision(self):
        m = self._create_object_metric(name='wlan0',
                                       key='wlan0',
                                       configuration='clients')
        m.write('00:14:5c:00:00:00',
                time=datetime(2020, 7, 31, 22, 5, 47, 235142))
        m.write('00:23:4a:00:00:00',
                time=datetime(2020, 7, 31, 22, 5, 47, 235152))
        self.assertEqual(len(m.read()), 2)

    @patch.object(InfluxDBClient,
                  'write',
                  side_effect=InfluxDBServerError('Server error'))
    @capture_stderr()
    def test_write_retry(self, mock_write):
        with self.assertRaises(TimeseriesWriteException):
            timeseries_db.write('test_write', {'value': 1})
        m = self._create_general_metric(name='Test metric')
        with self.assertRaises(Retry):
            m.write(1)

    @patch.object(
        InfluxDBClient,
        'write',
        side_effect=InfluxDBClientError(
            content=
            '{"error":"partial write: points beyond retention policy dropped=1"}',
            code=400,
        ),
    )
    @capture_stderr()
    def test_write_skip_retry_for_retention_policy(self, mock_write):
        try:
            timeseries_db.write('test_write', {'value': 1})
        except TimeseriesWriteException:
            self.fail(
                'TimeseriesWriteException should not be raised when data '
                'points crosses retention policy')
        m = self._create_general_metric(name='Test metric')
        try:
            m.write(1)
        except Retry:
            self.fail('Writing metric should not be retried when data '
                      'points crosses retention policy')

    @patch.object(InfluxDBClient,
                  'write',
                  side_effect=InfluxDBServerError('Server error'))
    @capture_stderr()
    def test_timeseries_write_params(self, mock_write):
        with freeze_time('Jan 14th, 2020') as frozen_datetime:
            m = self._create_general_metric(name='Test metric')
            with self.assertRaises(Retry) as e:
                m.write(1)
            frozen_datetime.tick(delta=timedelta(minutes=10))
            self.assertEqual(
                now(),
                datetime(2020, 1, 14, tzinfo=tz('UTC')) +
                timedelta(minutes=10))
            task_signature = e.exception.sig
            with patch.object(timeseries_db, 'write') as mock_write:
                self._retry_task(task_signature)
            mock_write.assert_called_with(
                'test_metric',
                {'value': 1},
                database=None,
                retention_policy=None,
                tags={},
                # this should be the original time at the moment of first failure
                timestamp='2020-01-14T00:00:00Z',
                current=False,
            )

    def _retry_task(self, task_signature):
        task_kwargs = task_signature.kwargs
        task_signature.type.run(**task_kwargs)

    @patch.object(InfluxDBClient,
                  'query',
                  side_effect=InfluxDBServerError('Server error'))
    def test_retry_mechanism(self, mock_query):
        max_retries = MONITORING_TIMESERIES_RETRY_OPTIONS.get('max_retries')
        with patch('logging.Logger.info') as mocked_logger:
            try:
                self.test_get_query_fields_function()
            except Exception:
                pass
            self.assertEqual(mocked_logger.call_count, max_retries)
            mocked_logger.assert_called_with(
                'Error while executing method "query":\nServer error\nAttempt '
                f'{max_retries} out of {max_retries}.\n')
Exemple #11
0
    def custom_request(self,
                       url,
                       method='GET',
                       params=None,
                       data=None,
                       expected_response_code=200,
                       headers=None):
        """Make a HTTP request to the InfluxDB API.

        :param url: the path of the HTTP request, e.g. write, query, etc.
        :type url: str
        :param method: the HTTP method for the request, defaults to GET
        :type method: str
        :param params: additional parameters for the request, defaults to None
        :type params: dict
        :param data: the data of the request, defaults to None
        :type data: str
        :param expected_response_code: the expected response code of
            the request, defaults to 200
        :type expected_response_code: int
        :param headers: headers to add to the request
        :type headers: dict
        :returns: the response from the request
        :rtype: :class:`requests.Response`
        :raises InfluxDBServerError: if the response code is any server error
            code (5xx)
        :raises InfluxDBClientError: if the response code is not the
            same as `expected_response_code` and is not a server error code
        """
        url = "{0}/{1}".format(self._baseurl, url)

        if headers is None:
            headers = self._headers

        if params is None:
            params = {}

        if isinstance(data, (dict, list)):
            data = json.dumps(data)

        # Try to send the request more than once by default (see #103)
        retry = True
        _try = 0
        while retry:
            try:
                response = self._session.request(method=method,
                                                 url=url,
                                                 auth=(self._username,
                                                       self._password),
                                                 params=params,
                                                 data=data,
                                                 headers=headers,
                                                 proxies=self._proxies,
                                                 verify=self._verify_ssl,
                                                 timeout=self._timeout)
                break
            except requests.exceptions.ConnectionError as e:
                _try += 1
                if self._retries != 0:
                    retry = _try < self._retries
            except requests.exceptions.ChunkedEncodingError as e:
                logging.warn(
                    "Case of broken HTTP session, retring w/ new session")
                self._session = requests.Session()
                _try += 1
                if self._retries != 0:
                    retry = _try < self._retries
        else:
            raise requests.exceptions.ConnectionError

        if 500 <= response.status_code < 600:
            raise InfluxDBServerError(response.content)
        elif response.status_code == expected_response_code:
            return response
        else:
            raise InfluxDBClientError(response.content, response.status_code)
Exemple #12
0
    def __init__(self, series, raise_errors=True):
        self._raw = series
        self._error = self.raw.get('error', None)

        if self.error is not None and raise_errors is True:
            raise InfluxDBClientError(self.error)
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from unittest import mock

import influxdb
from influxdb.exceptions import InfluxDBClientError
from oslotest import base
from oslo_config import cfg
import six

from monasca_persister.repositories.influxdb.metrics_repository import MetricInfluxdbRepository

db_not_found = InfluxDBClientError(
    content='{"error": "database not found: db"}', code=404)


class TestMetricInfluxdbRepository(base.BaseTestCase):
    def setUp(self):
        super(TestMetricInfluxdbRepository, self).setUp()

    def tearDown(self):
        super(TestMetricInfluxdbRepository, self).tearDown()

    def _test_process_message(self, metrics_repo, data_points, metric, tenant):
        _dp, _tenant = metrics_repo.process_message(metric)
        self.assertIsInstance(_dp, six.string_types)
        self.assertEqual(_tenant, tenant)
        data_points.append(_tenant, _dp)