def setUp(self): self.query_response = { "results": [{ "series": [{ "name": "cpu_load_short", "tags": { "host": "server01", "region": "us-west" }, "columns": ["time", "value"], "values": [["2015-01-29T21:51:28.968422294Z", 0.64]] }, { "name": "cpu_load_short", "tags": { "host": "server02", "region": "us-west" }, "columns": ["time", "value"], "values": [["2015-01-29T21:51:28.968422294Z", 0.65]] }, { "name": "other_serie", "tags": { "host": "server01", "region": "us-west" }, "columns": ["time", "value"], "values": [["2015-01-29T21:51:28.968422294Z", 0.66]] }] }] } self.rs = ResultSet(self.query_response)
def setUp(self): """Set up an instance of TestResultSet.""" self.query_response = { "results": [{ "series": [{ "measurement": "cpu_load_short", "tags": { "host": "server01", "region": "us-west" }, "columns": ["time", "value"], "values": [["2015-01-29T21:51:28.968422294Z", 0.64]] }, { "measurement": "cpu_load_short", "tags": { "host": "server02", "region": "us-west" }, "columns": ["time", "value"], "values": [["2015-01-29T21:51:28.968422294Z", 0.65]] }, { "measurement": "other_serie", "tags": { "host": "server01", "region": "us-west" }, "columns": ["time", "value"], "values": [["2015-01-29T21:51:28.968422294Z", 0.66]] }] }] } self.rs = ResultSet(self.query_response['results'][0])
def setUp(self): """Set up an instance of TestResultSet.""" self.query_response = { "results": [{ "series": [{ "name": "cpu_load_short", "columns": ["time", "value", "host", "region"], "values": [ [ "2015-01-29T21:51:28.968422294Z", 0.64, "server01", "us-west" ], [ "2015-01-29T21:51:28.968422294Z", 0.65, "server02", "us-west" ], ] }, { "name": "other_series", "columns": ["time", "value", "host", "region"], "values": [ [ "2015-01-29T21:51:28.968422294Z", 0.66, "server01", "us-west" ], ] }] }] } self.rs = ResultSet(self.query_response['results'][0])
def test_system_query(self): rs = ResultSet({ 'results': [{ 'series': [{ 'values': [['another', '48h0m0s', 3, False], ['default', '0', 1, False], ['somename', '24h0m0s', 4, True]], 'columns': ['name', 'duration', 'replicaN', 'default'] }] }] }) self.assertEqual(rs.keys(), [('results', None)]) self.assertEqual(list(rs['results']), [{ 'duration': '48h0m0s', 'default': False, 'replicaN': 3, 'name': 'another' }, { 'duration': '0', 'default': False, 'replicaN': 1, 'name': 'default' }, { 'duration': '24h0m0s', 'default': True, 'replicaN': 4, 'name': 'somename' }])
def test_system_query(self): rs = ResultSet( {'series': [ {'values': [['another', '48h0m0s', 3, False], ['default', '0', 1, False], ['somename', '24h0m0s', 4, True]], 'columns': ['name', 'duration', 'replicaN', 'default']}]} ) self.assertEqual( rs.keys(), [('results', None)] ) self.assertEqual( list(rs['results']), [ {'duration': '48h0m0s', 'default': False, 'replicaN': 3, 'name': 'another'}, {'duration': '0', 'default': False, 'replicaN': 1, 'name': 'default'}, {'duration': '24h0m0s', 'default': True, 'replicaN': 4, 'name': 'somename'} ] )
def query(self, query, params=None, epoch=None, expected_response_code=200, database=None, raise_errors=True): """Send a query to InfluxDB. :param query: the actual query string :type query: str :param params: additional parameters for the request, defaults to {} :type params: dict :param expected_response_code: the expected status code of response, defaults to 200 :type expected_response_code: int :param database: database to query, defaults to None :type database: str :param raise_errors: Whether or not to raise exceptions when InfluxDB returns errors, defaults to True :type raise_errors: bool :returns: the queried data :rtype: :class:`~.ResultSet` """ if params is None: params = {} params['q'] = query params['db'] = database or self._database if epoch is not None: params['epoch'] = epoch method = 'POST' if query.split() and query.split()[0].upper() in ('SELECT', 'SHOW'): method = 'GET' response = self.request(url="query", method=method, params=params, data=None, expected_response_code=expected_response_code) data = response.json() results = [ ResultSet(result, raise_errors=raise_errors) for result in data.get('results', []) ] # TODO(aviau): Always return a list. (This would be a breaking change) if len(results) == 1: return results[0] else: return results
def query_result(*args, **kwargs): from influxdb.resultset import ResultSet query_response = { "results": [ {"series": [{"name": "node_hardware", "tags": {"host": "server01"}, "columns": ["time", "val", "host"], "values": [ ["2018-01-22T15:51:28.968422294Z", "ok", "server01"] ]}, {"name": "node_hardware", "tags": {"host": "server02"}, "columns": ["time", "val", "host"], "values": [ ["2018-01-22T15:51:28.968422294Z", "critical", "server02"] ]}, {"name": "node_hardware", "tags": {"host": "server05"}, "columns": ["time", "val", "host"], "values": [ ["2018-01-22T15:51:28.968422294Z", "critical", "server05"] ]}, ] } ] } return ResultSet(query_response["results"][0])
def setup(self): self.test_result_set = ResultSet({ 'series': [{ 'values': [['value', 'integer']], 'name': 'cpu', 'columns': ['fieldKey', 'fieldType'] }, { 'values': [['value', 'integer']], 'name': 'iops', 'columns': ['fieldKey', 'fieldType'] }, { 'values': [['value', 'integer']], 'name': 'load', 'columns': ['fieldKey', 'fieldType'] }, { 'values': [['value', 'integer']], 'name': 'memory', 'columns': ['fieldKey', 'fieldType'] }] }) self.dummy_points = [{ "cpu_load": 0.50, "time": "2009-11-10T23:00:00.123456Z", }, { "cpu_load": 0.50, "time": "2009-11-10T23:00:00.123456Z", }, { "cpu_load": 0.50, "time": "2009-11-10T23:00:00.123456Z", }]
def query_result(*args, **kwargs): from influxdb.resultset import ResultSet query_response = { "results": [{ "series": [{ "name": "node_active", "tags": { "host": "head" }, "columns": ["time", "value", "host"], "values": [["2018-01-22T15:51:28.968422294Z", "on", "head"]] }, { "name": "node_active", "tags": { "host": "compute" }, "columns": ["time", "value", "host"], "values": [["2018-01-22T15:51:28.968422294Z", "off", "compute1"]] }] }] } return ResultSet(query_response["results"][0])
def test_point_from_cols_vals(self): """Test points from columns in TestResultSet object.""" cols = ['col1', 'col2'] vals = [1, '2'] point = ResultSet.point_from_cols_vals(cols, vals) self.assertDictEqual(point, {'col1': 1, 'col2': '2'})
def setUp(self): """Set up an instance of TestResultSet.""" self.query_response = { "results": [ {"series": [{"name": "cpu_load_short", "columns": ["time", "value", "host", "region"], "values": [ ["2015-01-29T21:51:28.968422294Z", 0.64, "server01", "us-west"], ["2015-01-29T21:51:28.968422294Z", 0.65, "server02", "us-west"], ]}, {"name": "other_series", "columns": ["time", "value", "host", "region"], "values": [ ["2015-01-29T21:51:28.968422294Z", 0.66, "server01", "us-west"], ]}]} ] } self.rs = ResultSet(self.query_response['results'][0])
def setUp(self): self.query_response = { "results": [ {"series": [{"measurement": "cpu_load_short", "tags": {"host": "server01", "region": "us-west"}, "columns": ["time", "value"], "values": [ ["2015-01-29T21:51:28.968422294Z", 0.64] ]}, {"measurement": "cpu_load_short", "tags": {"host": "server02", "region": "us-west"}, "columns": ["time", "value"], "values": [ ["2015-01-29T21:51:28.968422294Z", 0.65] ]}, {"measurement": "other_serie", "tags": {"host": "server01", "region": "us-west"}, "columns": ["time", "value"], "values": [ ["2015-01-29T21:51:28.968422294Z", 0.66] ]}]} ] } self.rs = ResultSet(self.query_response['results'][0])
def query_result(*args, **kwargs): from influxdb.resultset import ResultSet query_response = { "results": [ {"series": [{"name": "node_gpu_temp", "columns": ["time", "val", "host", "index"], "values": [ ["2018-01-22T15:51:28.968422294Z", 96, "server01", 0], ["2018-01-22T15:51:28.968422294Z", 100, "server01", 1], ["2018-01-22T15:51:28.968422294Z", 10, "server05", 1], ]}, ] } ] } return ResultSet(query_response["results"][0])
def send_selection_query( self, query: SelectionQuery) -> ResultSet: # type: ignore """Sends a single `SELECT` or `DELETE` query to influx server. Arguments: query {Selection_Query} -- Query which should be executed Raises: ValueError: no SelectionQuery is given. Returns: ResultSet -- Result of the Query, Empty if `DELETE` """ if (not query or not isinstance(query, SelectionQuery)): raise ValueError("a selection query must be given") # check if any buffered table is selected, flushes buffer for table in query.tables: if (table in self.__insert_buffer): self.flush_insert_buffer() break # Convert querys to strings query_str = query.to_query() start_time = time.perf_counter() # Send querys try: result = self.__client.query( # type: ignore query=query_str, epoch='s', database=self.database.name) except (InfluxDBServerError, InfluxDBClientError) as err: # type: ignore ExceptionUtils.exception_info( error=err, extra_message="error when sending select statement" ) # type: ignore # result to maintain structure # raise errors = false since we did catch a error result: ResultSet = ResultSet({}, raise_errors=False) # type: ignore end_time = time.perf_counter() # if nothing is returned add count = 0 and table # also possible by `list(result.get_points())`, but that is lot of compute action if (result): length = len(result.raw['series'][0]['values']) # type: ignore else: length = 0 tables_count: Dict[Table, int] = {} for table in query.tables: tables_count[table] = int(length / len(query.tables)) self.__insert_metrics_to_buffer(query.keyword, tables_count, end_time - start_time) return result # type: ignore
def test_summary(mocker): from influxdb.resultset import ResultSet query_response = { "results": [{ "series": [{ "name": "node_active", "tags": { "host": "head" }, "columns": ["time", "value", "host"], "values": [["2018-01-22T15:51:28.968422294Z", "123", "head"]] }, { "name": "node_active", "tags": { "host": "compute" }, "columns": ["time", "value", "host"], "values": [["2018-01-22T15:51:28.968422294Z", "456", "compute1"]] }] }] } mock = mocker.patch('antilles.cluster.tasks.summary.cache') mock.get.return_value = [ResultSet(query_response["results"][0])] summary.cluster_summary('cluster', ['head']) mock.get.assert_called() mock.set.assert_called_once() mock.reset_mock() summary.group_summary('cluster', ['head']) mock.get.assert_called() mock.set.assert_called_once() mock.reset_mock() mock.get.return_value = ResultSet(query_response["results"][0]) summary.rack_summary('cluster', ['head']) mock.get.assert_called_once() mock.set.assert_called_once()
def test_rack_detail_view(client, mocker): mocker.patch('antilles.cluster.datasource.DataSource.get_metric_data', return_value='0') mocker.patch("django.core.cache.cache.get", return_value=ResultSet({})) response = client.get('/racks/1/') assert response.status_code == HTTP_200_OK assert response.data['rack']['name'] == 'rack1' assert response.data['rack']['energy'] == '0' assert response.data['rack']['nodes'][0]['machinetype'] == 'ibm'
def test_point_from_cols_vals(self): cols = ['col1', 'col2'] vals = [1, '2'] point = ResultSet.point_from_cols_vals(cols, vals) self.assertDictEqual( point, {'col1': 1, 'col2': '2'} )
def test_point_from_cols_vals(self): """Test points from columns in TestResultSet object.""" cols = ['col1', 'col2'] vals = [1, '2'] point = ResultSet.point_from_cols_vals(cols, vals) self.assertDictEqual( point, {'col1': 1, 'col2': '2'} )
def float_influxdb_data(): from influxdb.resultset import ResultSet return ResultSet({ 'series': [{ 'values': [ ['2018-02-02T06:23:22.422247936Z', 107.14], ], 'name': 'cluster_mem', 'columns': ['time', 'last'] }] })
def string_influxdb_data(): from influxdb.resultset import ResultSet return ResultSet({ 'series': [{ 'values': [ ['2018-02-02T06:23:22.422247936Z', "on"], ], 'name': 'node_active', 'columns': ['time', 'last'] }] })
def _read_chunked_response(response, raise_errors=True): result_set = {} for line in response.iter_lines(): if isinstance(line, bytes): line = line.decode('utf-8') data = json.loads(line) for result in data.get('results', []): for _key in result: if isinstance(result[_key], list): result_set.setdefault(_key, []).extend(result[_key]) return ResultSet(result_set, raise_errors=raise_errors)
def influx_get_current(measurement, meter_id): client = connect_influx() raw = client.query(GET_CURRENT_QUERY.format(measurement, meter_id), epoch="m", database=settings.DATABASES['influx']['NAME']).raw if not raw['series']: raise UnknownParameter('Measurement \'{0}\' or energy meter with id \'{1}\' not in database.'.format(measurement, meter_id)) response = { 'measurement': raw['series'][0]['name'], 'data': ResultSet.point_from_cols_vals(cols=raw['series'][0]['columns'], vals=raw['series'][0]['values'][0]) } return response
def _result_set_to_df(result_set: ResultSet): dfs = [] for (measurement, tags), rows in result_set.items(): df = pd.DataFrame(list(rows)) for tag, value in tags.items(): df[tag] = value df = _check_table(measurement.split('_')[0]).__transform__(df) dfs.append(df) if not dfs: return None return pd.concat(dfs, ignore_index=True)
def query(self, query, params=None, database=None, raise_errors=True): params = params or {} params['q'] = query if database: params['db'] = database response = yield self.request('/query', qs=params) result_set = [ ResultSet(result, raise_errors=raise_errors) for result in response.get('results', []) ] print(list(result_set[0]['columns'])) raise gen.Return(result_set)
async def query(self, query, params=None, bind_params=None, epoch=None, expected_response_code=200, database=None, raise_errors=True, chunked=False, chunk_size=0, method='GET'): if params is None: params = {} if bind_params is not None: params_dict = json.loads(params.get('params', '{}')) params_dict.update(bind_params) params['params'] = json.dumps(params_dict) params['q'] = query params['db'] = database or self._database if epoch is not None: params['epoch'] = epoch if chunked: params['chunked'] = 'true' if chunk_size > 0: params['chunk_size'] = chunk_size if query.lower().startswith('select ') and ' into ' in query.lower(): method = 'POST' # noinspection PyTypeChecker data = await self.request( url='query', method=method, params=params, data=None, expected_response_code=expected_response_code) results = [ ResultSet(result, raise_errors=raise_errors) for result in data.get('results', []) ] if len(results) == 1: return results[0] return results
def test__query_measurement(self): influxdb_helper = vcmts_vnf.InfluxDBHelper("localhost", 8086) influxdb_helper._read_client = mock.MagicMock() resulted_generator = mock.MagicMock() resulted_generator.keys.return_value = [] influxdb_helper._read_client.query.return_value = resulted_generator query_result = influxdb_helper._query_measurement('cpu_value') self.assertIsNone(query_result) resulted_generator = mock.MagicMock() resulted_generator.keys.return_value = ["", ""] resulted_generator.get_points.return_value = ResultSet({"": ""}) influxdb_helper._read_client.query.return_value = resulted_generator query_result = influxdb_helper._query_measurement('cpu_value') self.assertIsNotNone(query_result)
def query_result(): from influxdb.resultset import ResultSet query_response = { "results": [{ "series": [ { "name": "node_cpu", "columns": ["time", "val", "host"], "values": [ ["2018-01-22T15:51:28.968422294Z", 90, "server01"], ] }, ] }] } return ResultSet(query_response["results"][0])
def test_get_latest_timestamp(self): """Checks latest timestamp returned.""" measurement_name = 'test' ifclient = InfluxDBClient() response = ResultSet({ "series": [{ "name": measurement_name, "columns": ["time", "Consumption"], "values": [ ["2020-04-14T11:15:00Z", 137.7605], ] }] }) ifclient.query = MagicMock(return_value=response) got = ifclient.get_latest_timestamp(measurement_name) expected = pytz.utc.localize(datetime.datetime(2020, 4, 14, 11, 15)) self.assertEqual(got, expected)
def test_adds_all_values(self): test_result_set = ResultSet({ "time": 1, "message": "success!" }) when2(test_result_set.get_points).thenReturn({ "time": 1, "message": "success!" }) test_dataframe = pandas.DataFrame({'time': [1], 'message': ['success!']}) when2(InfluxDBClient.query, "test_measurement").thenReturn(test_result_set) when2(pandas.DataFrame, {"time": 1, "message": "success!"}).thenReturn(test_dataframe) output = rm.read_query("test", "test_measurement") self.assertEqual(len(output["message"]), 1) self.assertTrue("success!" in output["message"]["1970-01-01 00:00:00.000000001"])
def test_get_current_time(self, mocked_client): # Mock response form InfluxDB diagnostics = ResultSet({'series': [{'name': 'build', 'columns': ['Branch', 'Build Time', 'Commit', 'Version'], 'values': [ ['0.12', '', 'e094138084855d444195b252314dfee9eae34cab', '0.12.1']]}, {'name': 'network', 'columns': ['hostname'], 'values': [['raspberrypi']]}, {'name': 'runtime', 'columns': ['GOARCH', 'GOMAXPROCS', 'GOOS', 'version'], 'values': [['arm', 4, 'linux', 'go1.4.3']]}, {'name': 'system', 'columns': ['PID', 'currentTime', 'started', 'uptime'], 'values': [[561, '2016-04-15T21:29:31.886241629Z', '2016-04-15T21:21:10.677939741Z', '8m21.2083047s']]}]}) InfluxDB.query = mock.MagicMock(return_value=diagnostics) # Code to test influx = InfluxDB('host', '9999', 'user', 'password', 'mockdb') current_time = influx.get_current_time() self.assertEqual(current_time, datetime(2016, 4, 15, 21, 29, 31, 886241, tzinfo=tzutc()))
def test_chunked_response(self): """Test chunked reponse for TestInfluxDBClient object.""" example_response = \ u'{"results":[{"statement_id":0,"series":' \ '[{"name":"cpu","columns":["fieldKey","fieldType"],"values":' \ '[["value","integer"]]}],"partial":true}]}\n{"results":' \ '[{"statement_id":0,"series":[{"name":"iops","columns":' \ '["fieldKey","fieldType"],"values":[["value","integer"]]}],' \ '"partial":true}]}\n{"results":[{"statement_id":0,"series":' \ '[{"name":"load","columns":["fieldKey","fieldType"],"values":' \ '[["value","integer"]]}],"partial":true}]}\n{"results":' \ '[{"statement_id":0,"series":[{"name":"memory","columns":' \ '["fieldKey","fieldType"],"values":[["value","integer"]]}]}]}\n' with requests_mock.Mocker() as m: m.register_uri(requests_mock.GET, "http://localhost:8086/query", text=example_response) response = self.cli.query('show series limit 4 offset 0', chunked=True, chunk_size=4) self.assertTrue(len(response) == 4) self.assertEqual( response.__repr__(), ResultSet({ 'series': [{ 'values': [['value', 'integer']], 'name': 'cpu', 'columns': ['fieldKey', 'fieldType'] }, { 'values': [['value', 'integer']], 'name': 'iops', 'columns': ['fieldKey', 'fieldType'] }, { 'values': [['value', 'integer']], 'name': 'load', 'columns': ['fieldKey', 'fieldType'] }, { 'values': [['value', 'integer']], 'name': 'memory', 'columns': ['fieldKey', 'fieldType'] }] }).__repr__())
def query_result(*args, **kwargs): from influxdb.resultset import ResultSet query_response = { "results": [{ "series": [ { "name": "node_disk", "columns": ["time", "val", "host"], "values": [ ["2018-01-22T15:51:28.968422294Z", 20, "server01"], ["2018-01-22T15:51:28.968422294Z", 70, "server02"], ["2018-01-22T15:51:28.968422294Z", 30, "server03"], ] }, ] }] } return ResultSet(query_response["results"][0])
def query(self, query, params=None, epoch=None, expected_response_code=200, database=None, raise_errors=True, chunked=False, chunk_size=0, stream=False): """Send a query to InfluxDB. :param query: the actual query string :type query: str :param params: additional parameters for the request, defaults to {} :type params: dict :param epoch: response timestamps to be in epoch format either 'h', 'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is RFC3339 UTC format with nanosecond precision :type epoch: str :param expected_response_code: the expected status code of response, defaults to 200 :type expected_response_code: int :param database: database to query, defaults to None :type database: str :param raise_errors: Whether or not to raise exceptions when InfluxDB returns errors, defaults to True :type raise_errors: bool :param chunked: Enable to use chunked responses from InfluxDB. Normally all chunks are automaticly combined into one huge ResultSet, unless you use ``stream``. :type chunked: bool :param chunk_size: Size of each chunk to tell InfluxDB to use. :type chunk_size: int :param stream: Will stream the data and return a generator that generates one ResultSet per chunk. This allows for huge datasets with virtually no limit. :type stream: bool :returns: the queried data :rtype: :class:`~.ResultSet` """ if params is None: params = {} params['q'] = query params['db'] = database or self._database if epoch is not None: params['epoch'] = epoch if chunked: params['chunked'] = 'true' if chunk_size > 0: params['chunk_size'] = chunk_size response = self.request(url="query", method='GET', params=params, data=None, expected_response_code=expected_response_code, stream=stream) if chunked: if stream: return self._read_chunked_response_generator( response, raise_errors) else: return self._read_chunked_response(response, raise_errors) data = response.json() results = [ ResultSet(result, raise_errors=raise_errors) for result in data.get('results', []) ] # TODO(aviau): Always return a list. (This would be a breaking change) if len(results) == 1: return results[0] return results
def query(self, query, params=None, bind_params=None, epoch=None, expected_response_code=200, database=None, raise_errors=True, chunked=False, chunk_size=0, method="GET"): """Send a query to InfluxDB. .. danger:: In order to avoid injection vulnerabilities (similar to `SQL injection <https://www.owasp.org/index.php/SQL_Injection>`_ vulnerabilities), do not directly include untrusted data into the ``query`` parameter, use ``bind_params`` instead. :param query: the actual query string :type query: str :param params: additional parameters for the request, defaults to {} :type params: dict :param bind_params: bind parameters for the query: any variable in the query written as ``'$var_name'`` will be replaced with ``bind_params['var_name']``. Only works in the ``WHERE`` clause and takes precedence over ``params['params']`` :type bind_params: dict :param epoch: response timestamps to be in epoch format either 'h', 'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is RFC3339 UTC format with nanosecond precision :type epoch: str :param expected_response_code: the expected status code of response, defaults to 200 :type expected_response_code: int :param database: database to query, defaults to None :type database: str :param raise_errors: Whether or not to raise exceptions when InfluxDB returns errors, defaults to True :type raise_errors: bool :param chunked: Enable to use chunked responses from InfluxDB. With ``chunked`` enabled, one ResultSet is returned per chunk containing all results within that chunk :type chunked: bool :param chunk_size: Size of each chunk to tell InfluxDB to use. :type chunk_size: int :param method: the HTTP method for the request, defaults to GET :type method: str :returns: the queried data :rtype: :class:`~.ResultSet` """ if params is None: params = {} if bind_params is not None: params_dict = json.loads(params.get('params', '{}')) params_dict.update(bind_params) params['params'] = json.dumps(params_dict) params['q'] = query params['db'] = database or self._database if epoch is not None: params['epoch'] = epoch if chunked: params['chunked'] = 'true' if chunk_size > 0: params['chunk_size'] = chunk_size if query.lower().startswith("select ") and " into " in query.lower(): method = "POST" response = self.request(url="query", method=method, params=params, data=None, expected_response_code=expected_response_code) if chunked: return self._read_chunked_response(response) data = response.json() results = [ ResultSet(result, raise_errors=raise_errors) for result in data.get('results', []) ] # TODO(aviau): Always return a list. (This would be a breaking change) if len(results) == 1: return results[0] return results
def query(self, query, params=None, epoch=None, expected_response_code=200, database=None, raise_errors=True, chunked=False, chunk_size=0): """Send a query to InfluxDB. :param query: the actual query string :type query: str :param params: additional parameters for the request, defaults to {} :type params: dict :param epoch: response timestamps to be in epoch format either 'h', 'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is RFC3339 UTC format with nanosecond precision :type epoch: str :param expected_response_code: the expected status code of response, defaults to 200 :type expected_response_code: int :param database: database to query, defaults to None :type database: str :param raise_errors: Whether or not to raise exceptions when InfluxDB returns errors, defaults to True :type raise_errors: bool :param chunked: Enable to use chunked responses from InfluxDB. With ``chunked`` enabled, one ResultSet is returned per chunk containing all results within that chunk :type chunked: bool :param chunk_size: Size of each chunk to tell InfluxDB to use. :type chunk_size: int :returns: the queried data :rtype: :class:`~.ResultSet` """ if params is None: params = {} params['q'] = query params['db'] = database or self._database if epoch is not None: params['epoch'] = epoch if chunked: params['chunked'] = 'true' if chunk_size > 0: params['chunk_size'] = chunk_size response = self.request( url="query", method='GET', params=params, data=None, expected_response_code=expected_response_code ) if chunked: return self._read_chunked_response(response) data = response.json() results = [ ResultSet(result, raise_errors=raise_errors) for result in data.get('results', []) ] # TODO(aviau): Always return a list. (This would be a breaking change) if len(results) == 1: return results[0] return results
class TestResultSet(unittest.TestCase): """Define the ResultSet test object.""" def setUp(self): """Set up an instance of TestResultSet.""" self.query_response = { "results": [ {"series": [{"name": "cpu_load_short", "columns": ["time", "value", "host", "region"], "values": [ ["2015-01-29T21:51:28.968422294Z", 0.64, "server01", "us-west"], ["2015-01-29T21:51:28.968422294Z", 0.65, "server02", "us-west"], ]}, {"name": "other_series", "columns": ["time", "value", "host", "region"], "values": [ ["2015-01-29T21:51:28.968422294Z", 0.66, "server01", "us-west"], ]}]} ] } self.rs = ResultSet(self.query_response['results'][0]) def test_filter_by_name(self): """Test filtering by name in TestResultSet object.""" expected = [ {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z', 'host': 'server01', 'region': 'us-west'}, {'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z', 'host': 'server02', 'region': 'us-west'}, ] self.assertEqual(expected, list(self.rs['cpu_load_short'])) self.assertEqual(expected, list(self.rs.get_points( measurement='cpu_load_short'))) def test_filter_by_tags(self): """Test filter by tags in TestResultSet object.""" expected = [ {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z', 'host': 'server01', 'region': 'us-west'}, {'value': 0.66, 'time': '2015-01-29T21:51:28.968422294Z', 'host': 'server01', 'region': 'us-west'}, ] self.assertEqual( expected, list(self.rs[{"host": "server01"}]) ) self.assertEqual( expected, list(self.rs.get_points(tags={'host': 'server01'})) ) def test_filter_by_name_and_tags(self): """Test filter by name and tags in TestResultSet object.""" self.assertEqual( list(self.rs[('cpu_load_short', {"host": "server01"})]), [{'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z', 'host': 'server01', 'region': 'us-west'}] ) self.assertEqual( list(self.rs[('cpu_load_short', {"region": "us-west"})]), [ {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z', 'host': 'server01', 'region': 'us-west'}, {'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z', 'host': 'server02', 'region': 'us-west'}, ] ) def test_keys(self): """Test keys in TestResultSet object.""" self.assertEqual( self.rs.keys(), [ ('cpu_load_short', None), ('other_series', None), ] ) def test_len(self): """Test length in TestResultSet object.""" self.assertEqual( len(self.rs), 2 ) def test_items(self): """Test items in TestResultSet object.""" items = list(self.rs.items()) items_lists = [(item[0], list(item[1])) for item in items] self.assertEqual( items_lists, [ ( ('cpu_load_short', None), [ {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.64, 'host': 'server01', 'region': 'us-west'}, {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.65, 'host': 'server02', 'region': 'us-west'}]), ( ('other_series', None), [ {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.66, 'host': 'server01', 'region': 'us-west'}])] ) def test_point_from_cols_vals(self): """Test points from columns in TestResultSet object.""" cols = ['col1', 'col2'] vals = [1, '2'] point = ResultSet.point_from_cols_vals(cols, vals) self.assertDictEqual( point, {'col1': 1, 'col2': '2'} ) def test_system_query(self): """Test system query capabilities in TestResultSet object.""" rs = ResultSet( {'series': [ {'values': [['another', '48h0m0s', 3, False], ['default', '0', 1, False], ['somename', '24h0m0s', 4, True]], 'columns': ['name', 'duration', 'replicaN', 'default']}]} ) self.assertEqual( rs.keys(), [('results', None)] ) self.assertEqual( list(rs['results']), [ {'duration': '48h0m0s', 'default': False, 'replicaN': 3, 'name': 'another'}, {'duration': '0', 'default': False, 'replicaN': 1, 'name': 'default'}, {'duration': '24h0m0s', 'default': True, 'replicaN': 4, 'name': 'somename'} ] ) def test_resultset_error(self): """Test returning error in TestResultSet object.""" with self.assertRaises(InfluxDBClientError): ResultSet({ "series": [], "error": "Big error, many problems." })
class TestResultSet(unittest.TestCase): def setUp(self): self.query_response = { "results": [ {"series": [{"measurement": "cpu_load_short", "tags": {"host": "server01", "region": "us-west"}, "columns": ["time", "value"], "values": [ ["2015-01-29T21:51:28.968422294Z", 0.64] ]}, {"measurement": "cpu_load_short", "tags": {"host": "server02", "region": "us-west"}, "columns": ["time", "value"], "values": [ ["2015-01-29T21:51:28.968422294Z", 0.65] ]}, {"measurement": "other_serie", "tags": {"host": "server01", "region": "us-west"}, "columns": ["time", "value"], "values": [ ["2015-01-29T21:51:28.968422294Z", 0.66] ]}]} ] } self.rs = ResultSet(self.query_response['results'][0]) def test_filter_by_name(self): expected = [ {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}, {'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'} ] self.assertEqual(expected, list(self.rs['cpu_load_short'])) self.assertEqual(expected, list(self.rs.get_points( measurement='cpu_load_short'))) def test_filter_by_tags(self): expected = [ {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.64}, {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.66} ] self.assertEqual( expected, list(self.rs[{"host": "server01"}]) ) self.assertEqual( expected, list(self.rs.get_points(tags={'host': 'server01'})) ) def test_filter_by_name_and_tags(self): self.assertEqual( list(self.rs[('cpu_load_short', {"host": "server01"})]), [{'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.64}] ) self.assertEqual( list(self.rs[('cpu_load_short', {"region": "us-west"})]), [ {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}, {'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'} ] ) def test_keys(self): self.assertEqual( self.rs.keys(), [ ('cpu_load_short', {'host': 'server01', 'region': 'us-west'}), ('cpu_load_short', {'host': 'server02', 'region': 'us-west'}), ('other_serie', {'host': 'server01', 'region': 'us-west'}) ] ) def test_len(self): self.assertEqual( len(self.rs), 3 ) def test_items(self): items = list(self.rs.items()) items_lists = [(item[0], list(item[1])) for item in items] self.assertEqual( items_lists, [ ( ('cpu_load_short', {'host': 'server01', 'region': 'us-west'}), [{'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}] ), ( ('cpu_load_short', {'host': 'server02', 'region': 'us-west'}), [{'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'}] ), ( ('other_serie', {'host': 'server01', 'region': 'us-west'}), [{'value': 0.66, 'time': '2015-01-29T21:51:28.968422294Z'}] ) ] ) def test_point_from_cols_vals(self): cols = ['col1', 'col2'] vals = [1, '2'] point = ResultSet.point_from_cols_vals(cols, vals) self.assertDictEqual( point, {'col1': 1, 'col2': '2'} ) def test_system_query(self): rs = ResultSet( {'series': [ {'values': [['another', '48h0m0s', 3, False], ['default', '0', 1, False], ['somename', '24h0m0s', 4, True]], 'columns': ['name', 'duration', 'replicaN', 'default']}]} ) self.assertEqual( rs.keys(), [('results', None)] ) self.assertEqual( list(rs['results']), [ {'duration': '48h0m0s', 'default': False, 'replicaN': 3, 'name': 'another'}, {'duration': '0', 'default': False, 'replicaN': 1, 'name': 'default'}, {'duration': '24h0m0s', 'default': True, 'replicaN': 4, 'name': 'somename'} ] ) def test_resultset_error(self): with self.assertRaises(InfluxDBClientError): ResultSet({ "series": [], "error": "Big error, many problems." })