コード例 #1
0
    def setup(self):
        self.test_result_set = ResultSet({
            'series': [{
                'values': [['value', 'integer']],
                'name': 'cpu',
                'columns': ['fieldKey', 'fieldType']
            }, {
                'values': [['value', 'integer']],
                'name': 'iops',
                'columns': ['fieldKey', 'fieldType']
            }, {
                'values': [['value', 'integer']],
                'name': 'load',
                'columns': ['fieldKey', 'fieldType']
            }, {
                'values': [['value', 'integer']],
                'name': 'memory',
                'columns': ['fieldKey', 'fieldType']
            }]
        })

        self.dummy_points = [{
            "cpu_load": 0.50,
            "time": "2009-11-10T23:00:00.123456Z",
        }, {
            "cpu_load": 0.50,
            "time": "2009-11-10T23:00:00.123456Z",
        }, {
            "cpu_load": 0.50,
            "time": "2009-11-10T23:00:00.123456Z",
        }]
コード例 #2
0
    def setUp(self):
        """Set up an instance of TestResultSet."""
        self.query_response = {
            "results": [{
                "series": [{
                    "measurement": "cpu_load_short",
                    "tags": {
                        "host": "server01",
                        "region": "us-west"
                    },
                    "columns": ["time", "value"],
                    "values": [["2015-01-29T21:51:28.968422294Z", 0.64]]
                }, {
                    "measurement": "cpu_load_short",
                    "tags": {
                        "host": "server02",
                        "region": "us-west"
                    },
                    "columns": ["time", "value"],
                    "values": [["2015-01-29T21:51:28.968422294Z", 0.65]]
                }, {
                    "measurement": "other_serie",
                    "tags": {
                        "host": "server01",
                        "region": "us-west"
                    },
                    "columns": ["time", "value"],
                    "values": [["2015-01-29T21:51:28.968422294Z", 0.66]]
                }]
            }]
        }

        self.rs = ResultSet(self.query_response['results'][0])
コード例 #3
0
    def query(self,
              query,
              params=None,
              epoch=None,
              expected_response_code=200,
              database=None,
              raise_errors=True):
        """Send a query to InfluxDB.

        :param query: the actual query string
        :type query: str

        :param params: additional parameters for the request, defaults to {}
        :type params: dict

        :param expected_response_code: the expected status code of response,
            defaults to 200
        :type expected_response_code: int

        :param database: database to query, defaults to None
        :type database: str

        :param raise_errors: Whether or not to raise exceptions when InfluxDB
            returns errors, defaults to True
        :type raise_errors: bool

        :returns: the queried data
        :rtype: :class:`~.ResultSet`
        """
        if params is None:
            params = {}

        params['q'] = query
        params['db'] = database or self._database

        if epoch is not None:
            params['epoch'] = epoch

        method = 'POST'
        if query.split() and query.split()[0].upper() in ('SELECT', 'SHOW'):
            method = 'GET'

        response = self.request(url="query",
                                method=method,
                                params=params,
                                data=None,
                                expected_response_code=expected_response_code)

        data = response.json()

        results = [
            ResultSet(result, raise_errors=raise_errors)
            for result in data.get('results', [])
        ]

        # TODO(aviau): Always return a list. (This would be a breaking change)
        if len(results) == 1:
            return results[0]
        else:
            return results
コード例 #4
0
    def send_selection_query(
            self, query: SelectionQuery) -> ResultSet:  # type: ignore
        """Sends a single `SELECT` or `DELETE` query to influx server.

        Arguments:
            query {Selection_Query} -- Query which should be executed

        Raises:
            ValueError: no SelectionQuery is given.

        Returns:
            ResultSet -- Result of the Query, Empty if `DELETE`
        """
        if (not query or not isinstance(query, SelectionQuery)):
            raise ValueError("a selection query must be given")

        # check if any buffered table is selected, flushes buffer
        for table in query.tables:
            if (table in self.__insert_buffer):
                self.flush_insert_buffer()
                break

        # Convert querys to strings
        query_str = query.to_query()

        start_time = time.perf_counter()
        # Send querys
        try:
            result = self.__client.query(  # type: ignore
                query=query_str,
                epoch='s',
                database=self.database.name)

        except (InfluxDBServerError,
                InfluxDBClientError) as err:  # type: ignore
            ExceptionUtils.exception_info(
                error=err, extra_message="error when sending select statement"
            )  # type: ignore
            # result to maintain structure
            # raise errors = false since we did catch a error
            result: ResultSet = ResultSet({},
                                          raise_errors=False)  # type: ignore

        end_time = time.perf_counter()

        # if nothing is returned add count = 0 and table
        # also possible by `list(result.get_points())`, but that is lot of compute action
        if (result):
            length = len(result.raw['series'][0]['values'])  # type: ignore
        else:
            length = 0

        tables_count: Dict[Table, int] = {}
        for table in query.tables:
            tables_count[table] = int(length / len(query.tables))

        self.__insert_metrics_to_buffer(query.keyword, tables_count,
                                        end_time - start_time)

        return result  # type: ignore
コード例 #5
0
def query_result(*args, **kwargs):
    from influxdb.resultset import ResultSet
    query_response = {
        "results": [{
            "series": [{
                "name":
                "node_active",
                "tags": {
                    "host": "head"
                },
                "columns": ["time", "value", "host"],
                "values": [["2018-01-22T15:51:28.968422294Z", "on", "head"]]
            }, {
                "name":
                "node_active",
                "tags": {
                    "host": "compute"
                },
                "columns": ["time", "value", "host"],
                "values":
                [["2018-01-22T15:51:28.968422294Z", "off", "compute1"]]
            }]
        }]
    }
    return ResultSet(query_response["results"][0])
コード例 #6
0
def query_result(*args, **kwargs):
    from influxdb.resultset import ResultSet
    query_response = {
        "results": [
            {"series": [{"name": "node_hardware",
                         "tags": {"host": "server01"},
                         "columns": ["time", "val", "host"],
                         "values": [
                             ["2018-01-22T15:51:28.968422294Z",
                              "ok",
                              "server01"]
                         ]},
                        {"name": "node_hardware",
                         "tags": {"host": "server02"},
                         "columns": ["time", "val", "host"],
                         "values": [
                             ["2018-01-22T15:51:28.968422294Z",
                              "critical",
                              "server02"]
                         ]},
                        {"name": "node_hardware",
                         "tags": {"host": "server05"},
                         "columns": ["time", "val", "host"],
                         "values": [
                             ["2018-01-22T15:51:28.968422294Z",
                              "critical",
                              "server05"]
                         ]},
                        ]
             }
        ]
    }
    return ResultSet(query_response["results"][0])
コード例 #7
0
ファイル: resultset_test.py プロジェクト: bralicea/Trades
    def setUp(self):
        """Set up an instance of TestResultSet."""
        self.query_response = {
            "results": [{
                "series": [{
                    "name":
                    "cpu_load_short",
                    "columns": ["time", "value", "host", "region"],
                    "values": [
                        [
                            "2015-01-29T21:51:28.968422294Z", 0.64, "server01",
                            "us-west"
                        ],
                        [
                            "2015-01-29T21:51:28.968422294Z", 0.65, "server02",
                            "us-west"
                        ],
                    ]
                }, {
                    "name":
                    "other_series",
                    "columns": ["time", "value", "host", "region"],
                    "values": [
                        [
                            "2015-01-29T21:51:28.968422294Z", 0.66, "server01",
                            "us-west"
                        ],
                    ]
                }]
            }]
        }

        self.rs = ResultSet(self.query_response['results'][0])
コード例 #8
0
def query_result(*args, **kwargs):
    from influxdb.resultset import ResultSet
    query_response = {
        "results": [
            {"series": [{"name": "node_gpu_temp",
                         "columns": ["time", "val", "host", "index"],
                         "values": [
                             ["2018-01-22T15:51:28.968422294Z",
                              96,
                              "server01",
                              0],
                             ["2018-01-22T15:51:28.968422294Z",
                              100,
                              "server01",
                              1],
                             ["2018-01-22T15:51:28.968422294Z",
                              10,
                              "server05",
                              1],
                         ]},
                        ]
             }
        ]
    }
    return ResultSet(query_response["results"][0])
コード例 #9
0
 def setUp(self):
     self.query_response = {
         "results": [{
             "series": [{
                 "name": "cpu_load_short",
                 "tags": {
                     "host": "server01",
                     "region": "us-west"
                 },
                 "columns": ["time", "value"],
                 "values": [["2015-01-29T21:51:28.968422294Z", 0.64]]
             }, {
                 "name": "cpu_load_short",
                 "tags": {
                     "host": "server02",
                     "region": "us-west"
                 },
                 "columns": ["time", "value"],
                 "values": [["2015-01-29T21:51:28.968422294Z", 0.65]]
             }, {
                 "name": "other_serie",
                 "tags": {
                     "host": "server01",
                     "region": "us-west"
                 },
                 "columns": ["time", "value"],
                 "values": [["2015-01-29T21:51:28.968422294Z", 0.66]]
             }]
         }]
     }
     self.rs = ResultSet(self.query_response)
コード例 #10
0
    def test_system_query(self):
        rs = ResultSet({
            'results': [{
                'series': [{
                    'values': [['another', '48h0m0s', 3, False],
                               ['default', '0', 1, False],
                               ['somename', '24h0m0s', 4, True]],
                    'columns': ['name', 'duration', 'replicaN', 'default']
                }]
            }]
        })

        self.assertEqual(rs.keys(), [('results', None)])

        self.assertEqual(list(rs['results']), [{
            'duration': '48h0m0s',
            'default': False,
            'replicaN': 3,
            'name': 'another'
        }, {
            'duration': '0',
            'default': False,
            'replicaN': 1,
            'name': 'default'
        }, {
            'duration': '24h0m0s',
            'default': True,
            'replicaN': 4,
            'name': 'somename'
        }])
コード例 #11
0
ファイル: test_summary.py プロジェクト: shanxichen/Antilles
def test_summary(mocker):
    from influxdb.resultset import ResultSet
    query_response = {
        "results": [{
            "series": [{
                "name":
                "node_active",
                "tags": {
                    "host": "head"
                },
                "columns": ["time", "value", "host"],
                "values": [["2018-01-22T15:51:28.968422294Z", "123", "head"]]
            }, {
                "name":
                "node_active",
                "tags": {
                    "host": "compute"
                },
                "columns": ["time", "value", "host"],
                "values":
                [["2018-01-22T15:51:28.968422294Z", "456", "compute1"]]
            }]
        }]
    }
    mock = mocker.patch('antilles.cluster.tasks.summary.cache')
    mock.get.return_value = [ResultSet(query_response["results"][0])]

    summary.cluster_summary('cluster', ['head'])

    mock.get.assert_called()
    mock.set.assert_called_once()

    mock.reset_mock()

    summary.group_summary('cluster', ['head'])

    mock.get.assert_called()
    mock.set.assert_called_once()

    mock.reset_mock()

    mock.get.return_value = ResultSet(query_response["results"][0])
    summary.rack_summary('cluster', ['head'])

    mock.get.assert_called_once()
    mock.set.assert_called_once()
コード例 #12
0
def test_rack_detail_view(client, mocker):
    mocker.patch('antilles.cluster.datasource.DataSource.get_metric_data',
                 return_value='0')
    mocker.patch("django.core.cache.cache.get", return_value=ResultSet({}))
    response = client.get('/racks/1/')
    assert response.status_code == HTTP_200_OK
    assert response.data['rack']['name'] == 'rack1'
    assert response.data['rack']['energy'] == '0'
    assert response.data['rack']['nodes'][0]['machinetype'] == 'ibm'
コード例 #13
0
def float_influxdb_data():
    from influxdb.resultset import ResultSet
    return ResultSet({
        'series': [{
            'values': [
                ['2018-02-02T06:23:22.422247936Z', 107.14],
            ],
            'name': 'cluster_mem',
            'columns': ['time', 'last']
        }]
    })
コード例 #14
0
def string_influxdb_data():
    from influxdb.resultset import ResultSet
    return ResultSet({
        'series': [{
            'values': [
                ['2018-02-02T06:23:22.422247936Z', "on"],
            ],
            'name': 'node_active',
            'columns': ['time', 'last']
        }]
    })
コード例 #15
0
ファイル: client.py プロジェクト: psy0rz/influxdb-python
 def _read_chunked_response(response, raise_errors=True):
     result_set = {}
     for line in response.iter_lines():
         if isinstance(line, bytes):
             line = line.decode('utf-8')
         data = json.loads(line)
         for result in data.get('results', []):
             for _key in result:
                 if isinstance(result[_key], list):
                     result_set.setdefault(_key, []).extend(result[_key])
     return ResultSet(result_set, raise_errors=raise_errors)
コード例 #16
0
    def query(self, query, params=None, database=None, raise_errors=True):
        params = params or {}
        params['q'] = query
        if database:
            params['db'] = database

        response = yield self.request('/query', qs=params)
        result_set = [
            ResultSet(result, raise_errors=raise_errors)
            for result in response.get('results', [])
        ]
        print(list(result_set[0]['columns']))
        raise gen.Return(result_set)
コード例 #17
0
    async def query(self,
                    query,
                    params=None,
                    bind_params=None,
                    epoch=None,
                    expected_response_code=200,
                    database=None,
                    raise_errors=True,
                    chunked=False,
                    chunk_size=0,
                    method='GET'):
        if params is None:
            params = {}

        if bind_params is not None:
            params_dict = json.loads(params.get('params', '{}'))
            params_dict.update(bind_params)
            params['params'] = json.dumps(params_dict)

        params['q'] = query
        params['db'] = database or self._database

        if epoch is not None:
            params['epoch'] = epoch

        if chunked:
            params['chunked'] = 'true'
            if chunk_size > 0:
                params['chunk_size'] = chunk_size

        if query.lower().startswith('select ') and ' into ' in query.lower():
            method = 'POST'

        # noinspection PyTypeChecker
        data = await self.request(
            url='query',
            method=method,
            params=params,
            data=None,
            expected_response_code=expected_response_code)

        results = [
            ResultSet(result, raise_errors=raise_errors)
            for result in data.get('results', [])
        ]

        if len(results) == 1:
            return results[0]

        return results
コード例 #18
0
    def test__query_measurement(self):
        influxdb_helper = vcmts_vnf.InfluxDBHelper("localhost", 8086)
        influxdb_helper._read_client = mock.MagicMock()

        resulted_generator = mock.MagicMock()
        resulted_generator.keys.return_value = []
        influxdb_helper._read_client.query.return_value = resulted_generator
        query_result = influxdb_helper._query_measurement('cpu_value')
        self.assertIsNone(query_result)

        resulted_generator = mock.MagicMock()
        resulted_generator.keys.return_value = ["", ""]
        resulted_generator.get_points.return_value = ResultSet({"": ""})
        influxdb_helper._read_client.query.return_value = resulted_generator
        query_result = influxdb_helper._query_measurement('cpu_value')
        self.assertIsNotNone(query_result)
コード例 #19
0
    def test_adds_all_values(self):
        test_result_set = ResultSet({
            "time": 1,
            "message": "success!"
        })
        when2(test_result_set.get_points).thenReturn({
            "time": 1,
            "message": "success!"
        })
        test_dataframe = pandas.DataFrame({'time': [1], 'message': ['success!']})
        when2(InfluxDBClient.query, "test_measurement").thenReturn(test_result_set)
        when2(pandas.DataFrame, {"time": 1, "message": "success!"}).thenReturn(test_dataframe)

        output = rm.read_query("test", "test_measurement")

        self.assertEqual(len(output["message"]), 1)
        self.assertTrue("success!" in output["message"]["1970-01-01 00:00:00.000000001"])
コード例 #20
0
def query_result():
    from influxdb.resultset import ResultSet
    query_response = {
        "results": [{
            "series": [
                {
                    "name":
                    "node_cpu",
                    "columns": ["time", "val", "host"],
                    "values": [
                        ["2018-01-22T15:51:28.968422294Z", 90, "server01"],
                    ]
                },
            ]
        }]
    }
    return ResultSet(query_response["results"][0])
コード例 #21
0
 def test_get_latest_timestamp(self):
     """Checks latest timestamp returned."""
     measurement_name = 'test'
     ifclient = InfluxDBClient()
     response = ResultSet({
         "series": [{
             "name": measurement_name,
             "columns": ["time", "Consumption"],
             "values": [
                 ["2020-04-14T11:15:00Z", 137.7605],
             ]
         }]
     })
     ifclient.query = MagicMock(return_value=response)
     got = ifclient.get_latest_timestamp(measurement_name)
     expected = pytz.utc.localize(datetime.datetime(2020, 4, 14, 11, 15))
     self.assertEqual(got, expected)
コード例 #22
0
    def test_get_current_time(self, mocked_client):
        # Mock response form InfluxDB
        diagnostics = ResultSet({'series': [{'name': 'build',
                                             'columns': ['Branch', 'Build Time', 'Commit', 'Version'],
                                             'values': [
                                                 ['0.12', '', 'e094138084855d444195b252314dfee9eae34cab', '0.12.1']]},
                                            {'name': 'network', 'columns': ['hostname'], 'values': [['raspberrypi']]},
                                            {'name': 'runtime', 'columns': ['GOARCH', 'GOMAXPROCS', 'GOOS', 'version'],
                                             'values': [['arm', 4, 'linux', 'go1.4.3']]},
                                            {'name': 'system', 'columns': ['PID', 'currentTime', 'started', 'uptime'],
                                             'values': [[561, '2016-04-15T21:29:31.886241629Z',
                                                         '2016-04-15T21:21:10.677939741Z', '8m21.2083047s']]}]})
        InfluxDB.query = mock.MagicMock(return_value=diagnostics)

        # Code to test
        influx = InfluxDB('host', '9999', 'user', 'password', 'mockdb')
        current_time = influx.get_current_time()
        self.assertEqual(current_time, datetime(2016, 4, 15, 21, 29, 31, 886241, tzinfo=tzutc()))
コード例 #23
0
def query_result(*args, **kwargs):
    from influxdb.resultset import ResultSet
    query_response = {
        "results": [{
            "series": [
                {
                    "name":
                    "node_disk",
                    "columns": ["time", "val", "host"],
                    "values": [
                        ["2018-01-22T15:51:28.968422294Z", 20, "server01"],
                        ["2018-01-22T15:51:28.968422294Z", 70, "server02"],
                        ["2018-01-22T15:51:28.968422294Z", 30, "server03"],
                    ]
                },
            ]
        }]
    }
    return ResultSet(query_response["results"][0])
コード例 #24
0
    def test_chunked_response(self):
        """Test chunked reponse for TestInfluxDBClient object."""
        example_response = \
            u'{"results":[{"statement_id":0,"series":' \
            '[{"name":"cpu","columns":["fieldKey","fieldType"],"values":' \
            '[["value","integer"]]}],"partial":true}]}\n{"results":' \
            '[{"statement_id":0,"series":[{"name":"iops","columns":' \
            '["fieldKey","fieldType"],"values":[["value","integer"]]}],' \
            '"partial":true}]}\n{"results":[{"statement_id":0,"series":' \
            '[{"name":"load","columns":["fieldKey","fieldType"],"values":' \
            '[["value","integer"]]}],"partial":true}]}\n{"results":' \
            '[{"statement_id":0,"series":[{"name":"memory","columns":' \
            '["fieldKey","fieldType"],"values":[["value","integer"]]}]}]}\n'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            response = self.cli.query('show series limit 4 offset 0',
                                      chunked=True,
                                      chunk_size=4)
            self.assertTrue(len(response) == 4)
            self.assertEqual(
                response.__repr__(),
                ResultSet({
                    'series': [{
                        'values': [['value', 'integer']],
                        'name': 'cpu',
                        'columns': ['fieldKey', 'fieldType']
                    }, {
                        'values': [['value', 'integer']],
                        'name': 'iops',
                        'columns': ['fieldKey', 'fieldType']
                    }, {
                        'values': [['value', 'integer']],
                        'name': 'load',
                        'columns': ['fieldKey', 'fieldType']
                    }, {
                        'values': [['value', 'integer']],
                        'name': 'memory',
                        'columns': ['fieldKey', 'fieldType']
                    }]
                }).__repr__())
コード例 #25
0
 def test_get_latest_timestamp_missing_measurement(self):
     """Checks latest timestamp returned if measurement arg is missing."""
     ifclient = InfluxDBClient()
     ifclient.query = MagicMock(return_value=ResultSet({}))
     got = ifclient.get_latest_timestamp('random')
     self.assertIsNone(got)
コード例 #26
0
ファイル: client.py プロジェクト: psy0rz/influxdb-python
    def query(self,
              query,
              params=None,
              epoch=None,
              expected_response_code=200,
              database=None,
              raise_errors=True,
              chunked=False,
              chunk_size=0,
              stream=False):
        """Send a query to InfluxDB.

        :param query: the actual query string
        :type query: str

        :param params: additional parameters for the request,
            defaults to {}
        :type params: dict

        :param epoch: response timestamps to be in epoch format either 'h',
            'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is
            RFC3339 UTC format with nanosecond precision
        :type epoch: str

        :param expected_response_code: the expected status code of response,
            defaults to 200
        :type expected_response_code: int

        :param database: database to query, defaults to None
        :type database: str

        :param raise_errors: Whether or not to raise exceptions when InfluxDB
            returns errors, defaults to True
        :type raise_errors: bool

        :param chunked: Enable to use chunked responses from InfluxDB.
            Normally all chunks are automaticly combined into one huge
            ResultSet, unless you use ``stream``.
        :type chunked: bool

        :param chunk_size: Size of each chunk to tell InfluxDB to use.
        :type chunk_size: int

        :param stream: Will stream the data and return a generator that
            generates one ResultSet per chunk.
            This allows for huge datasets with virtually no limit.

        :type stream: bool

        :returns: the queried data
        :rtype: :class:`~.ResultSet`
        """
        if params is None:
            params = {}

        params['q'] = query
        params['db'] = database or self._database

        if epoch is not None:
            params['epoch'] = epoch

        if chunked:
            params['chunked'] = 'true'
            if chunk_size > 0:
                params['chunk_size'] = chunk_size

        response = self.request(url="query",
                                method='GET',
                                params=params,
                                data=None,
                                expected_response_code=expected_response_code,
                                stream=stream)

        if chunked:
            if stream:
                return self._read_chunked_response_generator(
                    response, raise_errors)
            else:
                return self._read_chunked_response(response, raise_errors)

        data = response.json()

        results = [
            ResultSet(result, raise_errors=raise_errors)
            for result in data.get('results', [])
        ]

        # TODO(aviau): Always return a list. (This would be a breaking change)
        if len(results) == 1:
            return results[0]

        return results
コード例 #27
0
def node_gpu_data():
    return [
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 12],
                ],
                'tags': {
                    'index': '0',
                    'host': 'antilles_head'
                },
                'name': 'node_gpu_mem_pct',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 1],
                ],
                'tags': {
                    'index': '0',
                    'host': 'antilles_head'
                },
                'name': 'node_gpu_process',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 43],
                ],
                'tags': {
                    'index': '0',
                    'host': 'antilles_head'
                },
                'name': 'node_gpu_temp',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 'Tesla P100-PCIE-16GB'],
                ],
                'tags': {
                    'index': '0',
                    'host': 'antilles_head'
                },
                'name':
                'node_gpu_type',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 20],
                ],
                'tags': {
                    'index': '0',
                    'host': 'antilles_head'
                },
                'name': 'node_gpu_util',
                'columns': ['time', 'value']
            }]
        }),
    ]
コード例 #28
0
ファイル: client.py プロジェクト: JustusAdam/influxdb-python
    def query(self,
              query,
              params=None,
              bind_params=None,
              epoch=None,
              expected_response_code=200,
              database=None,
              raise_errors=True,
              chunked=False,
              chunk_size=0,
              method="GET"):
        """Send a query to InfluxDB.

        .. danger::
            In order to avoid injection vulnerabilities (similar to `SQL
            injection <https://www.owasp.org/index.php/SQL_Injection>`_
            vulnerabilities), do not directly include untrusted data into the
            ``query`` parameter, use ``bind_params`` instead.

        :param query: the actual query string
        :type query: str

        :param params: additional parameters for the request,
            defaults to {}
        :type params: dict

        :param bind_params: bind parameters for the query:
            any variable in the query written as ``'$var_name'`` will be
            replaced with ``bind_params['var_name']``. Only works in the
            ``WHERE`` clause and takes precedence over ``params['params']``
        :type bind_params: dict

        :param epoch: response timestamps to be in epoch format either 'h',
            'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is
            RFC3339 UTC format with nanosecond precision
        :type epoch: str

        :param expected_response_code: the expected status code of response,
            defaults to 200
        :type expected_response_code: int

        :param database: database to query, defaults to None
        :type database: str

        :param raise_errors: Whether or not to raise exceptions when InfluxDB
            returns errors, defaults to True
        :type raise_errors: bool

        :param chunked: Enable to use chunked responses from InfluxDB.
            With ``chunked`` enabled, one ResultSet is returned per chunk
            containing all results within that chunk
        :type chunked: bool

        :param chunk_size: Size of each chunk to tell InfluxDB to use.
        :type chunk_size: int

        :param method: the HTTP method for the request, defaults to GET
        :type method: str

        :returns: the queried data
        :rtype: :class:`~.ResultSet`
        """
        if params is None:
            params = {}

        if bind_params is not None:
            params_dict = json.loads(params.get('params', '{}'))
            params_dict.update(bind_params)
            params['params'] = json.dumps(params_dict)

        params['q'] = query
        params['db'] = database or self._database

        if epoch is not None:
            params['epoch'] = epoch

        if chunked:
            params['chunked'] = 'true'
            if chunk_size > 0:
                params['chunk_size'] = chunk_size

        if query.lower().startswith("select ") and " into " in query.lower():
            method = "POST"

        response = self.request(url="query",
                                method=method,
                                params=params,
                                data=None,
                                expected_response_code=expected_response_code)

        if chunked:
            return self._read_chunked_response(response)

        data = response.json()

        results = [
            ResultSet(result, raise_errors=raise_errors)
            for result in data.get('results', [])
        ]

        # TODO(aviau): Always return a list. (This would be a breaking change)
        if len(results) == 1:
            return results[0]

        return results
コード例 #29
0
def node_data():
    return [
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 36],
                ],
                'tags': {
                    'host': 'antilles_head'
                },
                'name': 'node_cpu',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 2048],
                ],
                'tags': {
                    'host': 'antilles_head'
                },
                'name': 'node_disk_total',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 1024],
                ],
                'tags': {
                    'host': 'antilles_head'
                },
                'name': 'node_disk',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 4096],
                ],
                'tags': {
                    'host': 'antilles_head'
                },
                'name': 'node_mem_total',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 1024],
                ],
                'tags': {
                    'host': 'antilles_head'
                },
                'name': 'node_mem',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 2000],
                ],
                'tags': {
                    'host': 'antilles_head'
                },
                'name': 'node_network_in',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 1500],
                ],
                'tags': {
                    'host': 'antilles_head'
                },
                'name': 'node_network_out',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 32],
                ],
                'tags': {
                    'host': 'antilles_head'
                },
                'name': 'node_cpu_num',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 'on'],
                ],
                'tags': {
                    'host': 'antilles_head'
                },
                'name': 'node_active',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 36],
                ],
                'tags': {
                    'host': 'antilles_compute'
                },
                'name': 'node_cpu',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 2048],
                ],
                'tags': {
                    'host': 'antilles_compute'
                },
                'name': 'node_disk_total',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 1024],
                ],
                'tags': {
                    'host': 'antilles_compute'
                },
                'name': 'node_disk',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 4096],
                ],
                'tags': {
                    'host': 'antilles_compute'
                },
                'name': 'node_mem_total',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 1024],
                ],
                'tags': {
                    'host': 'antilles_compute'
                },
                'name': 'node_mem',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 2000],
                ],
                'tags': {
                    'host': 'antilles_compute'
                },
                'name': 'node_network_in',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 1500],
                ],
                'tags': {
                    'host': 'antilles_compute'
                },
                'name': 'node_network_out',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 32],
                ],
                'tags': {
                    'host': 'antilles_compute'
                },
                'name': 'node_cpu_num',
                'columns': ['time', 'value']
            }]
        }),
        ResultSet({
            'series': [{
                'values': [
                    ['2018-02-02T06:23:22.422247936Z', 'off'],
                ],
                'tags': {
                    'host': 'antilles_compute'
                },
                'name': 'node_active',
                'columns': ['time', 'value']
            }]
        }),
    ]
コード例 #30
0
ファイル: client.py プロジェクト: maxdolle/influxdb-python
    def query(self,
              query,
              params=None,
              epoch=None,
              expected_response_code=200,
              database=None,
              raise_errors=True,
              chunked=False,
              chunk_size=0):
        """Send a query to InfluxDB.

        :param query: the actual query string
        :type query: str

        :param params: additional parameters for the request,
            defaults to {}
        :type params: dict

        :param epoch: response timestamps to be in epoch format either 'h',
            'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is
            RFC3339 UTC format with nanosecond precision
        :type epoch: str

        :param expected_response_code: the expected status code of response,
            defaults to 200
        :type expected_response_code: int

        :param database: database to query, defaults to None
        :type database: str

        :param raise_errors: Whether or not to raise exceptions when InfluxDB
            returns errors, defaults to True
        :type raise_errors: bool

        :param chunked: Enable to use chunked responses from InfluxDB.
            With ``chunked`` enabled, one ResultSet is returned per chunk
            containing all results within that chunk
        :type chunked: bool

        :param chunk_size: Size of each chunk to tell InfluxDB to use.
        :type chunk_size: int

        :returns: the queried data
        :rtype: :class:`~.ResultSet`
        """
        if params is None:
            params = {}

        params['q'] = query
        params['db'] = database or self._database

        if epoch is not None:
            params['epoch'] = epoch

        if chunked:
            params['chunked'] = 'true'
            if chunk_size > 0:
                params['chunk_size'] = chunk_size

        response = self.request(
            url="query",
            method='GET',
            params=params,
            data=None,
            expected_response_code=expected_response_code
        )

        if chunked:
            return self._read_chunked_response(response)

        data = response.json()

        results = [
            ResultSet(result, raise_errors=raise_errors)
            for result
            in data.get('results', [])
        ]

        # TODO(aviau): Always return a list. (This would be a breaking change)
        if len(results) == 1:
            return results[0]

        return results