Exemplo n.º 1
1
def main(host='localhost', port=8086):
    """Instantiate a connection to the InfluxDB."""
    user = '******'
    password = '******'
    dbname = 'example'
    dbuser = '******'
    dbuser_password = '******'
    query = 'select Float_value from cpu_load_short;'
    query_where = 'select Int_value from cpu_load_short where host=$host;'
    bind_params = {'host': 'server01'}
    json_body = [
        {
            "measurement": "cpu_load_short",
            "tags": {
                "host": "server01",
                "region": "us-west"
            },
            "time": "2009-11-10T23:00:00Z",
            "fields": {
                "Float_value": 0.64,
                "Int_value": 3,
                "String_value": "Text",
                "Bool_value": True
            }
        }
    ]

    client = InfluxDBClient(host, port, user, password, dbname)

    print("Create database: " + dbname)
    client.create_database(dbname)

    print("Create a retention policy")
    client.create_retention_policy('awesome_policy', '3d', 3, default=True)

    print("Switch user: "******"Write points: {0}".format(json_body))
    client.write_points(json_body)

    print("Querying data: " + query)
    result = client.query(query)

    print("Result: {0}".format(result))

    print("Querying data: " + query_where)
    result = client.query(query_where, bind_params=bind_params)

    print("Result: {0}".format(result))

    print("Switch user: "******"Drop database: " + dbname)
    client.drop_database(dbname)
Exemplo n.º 2
1
class TestInfluxDBClient(unittest.TestCase):
    """Set up the TestInfluxDBClient object."""

    def setUp(self):
        """Initialize an instance of TestInfluxDBClient object."""
        # By default, raise exceptions on warnings
        warnings.simplefilter('error', FutureWarning)

        self.cli = InfluxDBClient('localhost', 8086, 'username', 'password')
        self.dummy_points = [
            {
                "measurement": "cpu_load_short",
                "tags": {
                    "host": "server01",
                    "region": "us-west"
                },
                "time": "2009-11-10T23:00:00.123456Z",
                "fields": {
                    "value": 0.64
                }
            }
        ]

        self.dsn_string = 'influxdb://*****:*****@my.host.fr:1886/db'

    def test_scheme(self):
        """Set up the test schema for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        self.assertEqual('http://host:8086', cli._baseurl)

        cli = InfluxDBClient(
            'host', 8086, 'username', 'password', 'database', ssl=True
        )
        self.assertEqual('https://host:8086', cli._baseurl)

        cli = InfluxDBClient(
            'host', 8086, 'username', 'password', 'database', ssl=True,
            path="somepath"
        )
        self.assertEqual('https://host:8086/somepath', cli._baseurl)

        cli = InfluxDBClient(
            'host', 8086, 'username', 'password', 'database', ssl=True,
            path=None
        )
        self.assertEqual('https://host:8086', cli._baseurl)

        cli = InfluxDBClient(
            'host', 8086, 'username', 'password', 'database', ssl=True,
            path="/somepath"
        )
        self.assertEqual('https://host:8086/somepath', cli._baseurl)

    def test_dsn(self):
        """Set up the test datasource name for TestInfluxDBClient object."""
        cli = InfluxDBClient.from_dsn('influxdb://192.168.0.1:1886')
        self.assertEqual('http://192.168.0.1:1886', cli._baseurl)

        cli = InfluxDBClient.from_dsn(self.dsn_string)
        self.assertEqual('http://my.host.fr:1886', cli._baseurl)
        self.assertEqual('uSr', cli._username)
        self.assertEqual('pWd', cli._password)
        self.assertEqual('db', cli._database)
        self.assertFalse(cli._use_udp)

        cli = InfluxDBClient.from_dsn('udp+' + self.dsn_string)
        self.assertTrue(cli._use_udp)

        cli = InfluxDBClient.from_dsn('https+' + self.dsn_string)
        self.assertEqual('https://my.host.fr:1886', cli._baseurl)

        cli = InfluxDBClient.from_dsn('https+' + self.dsn_string,
                                      **{'ssl': False})
        self.assertEqual('http://my.host.fr:1886', cli._baseurl)

    def test_switch_database(self):
        """Test switch database in TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        cli.switch_database('another_database')
        self.assertEqual('another_database', cli._database)

    def test_switch_user(self):
        """Test switch user in TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        cli.switch_user('another_username', 'another_password')
        self.assertEqual('another_username', cli._username)
        self.assertEqual('another_password', cli._password)

    def test_write(self):
        """Test write in TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/write",
                status_code=204
            )
            cli = InfluxDBClient(database='db')
            cli.write(
                {"database": "mydb",
                 "retentionPolicy": "mypolicy",
                 "points": [{"measurement": "cpu_load_short",
                             "tags": {"host": "server01",
                                      "region": "us-west"},
                             "time": "2009-11-10T23:00:00Z",
                             "fields": {"value": 0.64}}]}
            )

            self.assertEqual(
                m.last_request.body,
                b"cpu_load_short,host=server01,region=us-west "
                b"value=0.64 1257894000000000000\n",
            )

    def test_write_points(self):
        """Test write points for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/write",
                status_code=204
            )

            cli = InfluxDBClient(database='db')
            cli.write_points(
                self.dummy_points,
            )
            self.assertEqual(
                'cpu_load_short,host=server01,region=us-west '
                'value=0.64 1257894000123456000\n',
                m.last_request.body.decode('utf-8'),
            )

    def test_write_points_toplevel_attributes(self):
        """Test write points attrs for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/write",
                status_code=204
            )

            cli = InfluxDBClient(database='db')
            cli.write_points(
                self.dummy_points,
                database='testdb',
                tags={"tag": "hello"},
                retention_policy="somepolicy"
            )
            self.assertEqual(
                'cpu_load_short,host=server01,region=us-west,tag=hello '
                'value=0.64 1257894000123456000\n',
                m.last_request.body.decode('utf-8'),
            )

    def test_write_points_batch(self):
        """Test write points batch for TestInfluxDBClient object."""
        dummy_points = [
            {"measurement": "cpu_usage", "tags": {"unit": "percent"},
             "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}},
            {"measurement": "network", "tags": {"direction": "in"},
             "time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}},
            {"measurement": "network", "tags": {"direction": "out"},
             "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}}
        ]
        expected_last_body = (
            "network,direction=out,host=server01,region=us-west "
            "value=12.0 1257894000000000000\n"
        )

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST,
                           "http://localhost:8086/write",
                           status_code=204)
            cli = InfluxDBClient(database='db')
            cli.write_points(points=dummy_points,
                             database='db',
                             tags={"host": "server01",
                                   "region": "us-west"},
                             batch_size=2)
        self.assertEqual(m.call_count, 2)
        self.assertEqual(expected_last_body,
                         m.last_request.body.decode('utf-8'))

    def test_write_points_udp(self):
        """Test write points UDP for TestInfluxDBClient object."""
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        port = random.randint(4000, 8000)
        s.bind(('0.0.0.0', port))

        cli = InfluxDBClient(
            'localhost', 8086, 'root', 'root',
            'test', use_udp=True, udp_port=port
        )
        cli.write_points(self.dummy_points)

        received_data, addr = s.recvfrom(1024)

        self.assertEqual(
            'cpu_load_short,host=server01,region=us-west '
            'value=0.64 1257894000123456000\n',
            received_data.decode()
        )

    @raises(Exception)
    def test_write_points_fails(self):
        """Test write points fail for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 500):
            cli.write_points([])

    def test_write_points_with_precision(self):
        """Test write points with precision for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/write",
                status_code=204
            )

            cli = InfluxDBClient(database='db')

            cli.write_points(self.dummy_points, time_precision='n')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123456000\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='u')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123456\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='ms')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='s')
            self.assertEqual(
                b"cpu_load_short,host=server01,region=us-west "
                b"value=0.64 1257894000\n",
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='m')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 20964900\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='h')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 349415\n',
                m.last_request.body,
            )

    def test_write_points_with_precision_udp(self):
        """Test write points with precision for TestInfluxDBClient object."""
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        port = random.randint(4000, 8000)
        s.bind(('0.0.0.0', port))

        cli = InfluxDBClient(
            'localhost', 8086, 'root', 'root',
            'test', use_udp=True, udp_port=port
        )

        cli.write_points(self.dummy_points, time_precision='n')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 1257894000123456000\n',
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='u')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 1257894000123456\n',
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='ms')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 1257894000123\n',
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='s')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b"cpu_load_short,host=server01,region=us-west "
            b"value=0.64 1257894000\n",
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='m')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 20964900\n',
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='h')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 349415\n',
            received_data,
        )

    def test_write_points_bad_precision(self):
        """Test write points w/bad precision TestInfluxDBClient object."""
        cli = InfluxDBClient()
        with self.assertRaisesRegexp(
            Exception,
            "Invalid time precision is given. "
            "\(use 'n', 'u', 'ms', 's', 'm' or 'h'\)"
        ):
            cli.write_points(
                self.dummy_points,
                time_precision='g'
            )

    @raises(Exception)
    def test_write_points_with_precision_fails(self):
        """Test write points w/precision fail for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 500):
            cli.write_points_with_precision([])

    def test_query(self):
        """Test query method for TestInfluxDBClient object."""
        example_response = (
            '{"results": [{"series": [{"measurement": "sdfsdfsdf", '
            '"columns": ["time", "value"], "values": '
            '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": '
            '[{"measurement": "cpu_load_short", "columns": ["time", "value"], '
            '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}'
        )

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://*****:*****@unittest.skip('Not implemented for 0.9')
    def test_query_chunked(self):
        """Test chunked query for TestInfluxDBClient object."""
        cli = InfluxDBClient(database='db')
        example_object = {
            'points': [
                [1415206250119, 40001, 667],
                [1415206244555, 30001, 7],
                [1415206228241, 20001, 788],
                [1415206212980, 10001, 555],
                [1415197271586, 10001, 23]
            ],
            'measurement': 'foo',
            'columns': [
                'time',
                'sequence_number',
                'val'
            ]
        }
        example_response = \
            json.dumps(example_object) + json.dumps(example_object)

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/db/db/series",
                text=example_response
            )

            self.assertListEqual(
                cli.query('select * from foo', chunked=True),
                [example_object, example_object]
            )

    @raises(Exception)
    def test_query_fail(self):
        """Test query failed for TestInfluxDBClient object."""
        with _mocked_session(self.cli, 'get', 401):
            self.cli.query('select column_one from foo;')

    def test_ping(self):
        """Test ping querying InfluxDB version."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/ping",
                status_code=204,
                headers={'X-Influxdb-Version': '1.2.3'}
            )
            version = self.cli.ping()
            self.assertEqual(version, '1.2.3')

    def test_create_database(self):
        """Test create database for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text='{"results":[{}]}'
            )
            self.cli.create_database('new_db')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'create database "new_db"'
            )

    def test_create_numeric_named_database(self):
        """Test create db w/numeric name for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text='{"results":[{}]}'
            )
            self.cli.create_database('123')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'create database "123"'
            )

    @raises(Exception)
    def test_create_database_fails(self):
        """Test create database fail for TestInfluxDBClient object."""
        with _mocked_session(self.cli, 'post', 401):
            self.cli.create_database('new_db')

    def test_drop_database(self):
        """Test drop database for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text='{"results":[{}]}'
            )
            self.cli.drop_database('new_db')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'drop database "new_db"'
            )

    def test_drop_measurement(self):
        """Test drop measurement for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text='{"results":[{}]}'
            )
            self.cli.drop_measurement('new_measurement')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'drop measurement "new_measurement"'
            )

    def test_drop_numeric_named_database(self):
        """Test drop numeric db for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text='{"results":[{}]}'
            )
            self.cli.drop_database('123')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'drop database "123"'
            )

    def test_get_list_database(self):
        """Test get list of databases for TestInfluxDBClient object."""
        data = {'results': [
            {'series': [
                {'name': 'databases',
                 'values': [
                     ['new_db_1'],
                     ['new_db_2']],
                 'columns': ['name']}]}
        ]}

        with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
            self.assertListEqual(
                self.cli.get_list_database(),
                [{'name': 'new_db_1'}, {'name': 'new_db_2'}]
            )

    @raises(Exception)
    def test_get_list_database_fails(self):
        """Test get list of dbs fail for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 401):
            cli.get_list_database()

    def test_get_list_measurements(self):
        """Test get list of measurements for TestInfluxDBClient object."""
        data = {
            "results": [{
                "series": [
                    {"name": "measurements",
                     "columns": ["name"],
                     "values": [["cpu"], ["disk"]
                                ]}]}
            ]
        }

        with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
            self.assertListEqual(
                self.cli.get_list_measurements(),
                [{'name': 'cpu'}, {'name': 'disk'}]
            )

    def test_create_retention_policy_default(self):
        """Test create default ret policy for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.create_retention_policy(
                'somename', '1d', 4, default=True, database='db'
            )

            self.assertEqual(
                m.last_request.qs['q'][0],
                'create retention policy "somename" on '
                '"db" duration 1d replication 4 shard duration 0s default'
            )

    def test_create_retention_policy(self):
        """Test create retention policy for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.create_retention_policy(
                'somename', '1d', 4, database='db'
            )

            self.assertEqual(
                m.last_request.qs['q'][0],
                'create retention policy "somename" on '
                '"db" duration 1d replication 4 shard duration 0s'
            )

    def test_alter_retention_policy(self):
        """Test alter retention policy for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text=example_response
            )
            # Test alter duration
            self.cli.alter_retention_policy('somename', 'db',
                                            duration='4d')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy "somename" on "db" duration 4d'
            )
            # Test alter replication
            self.cli.alter_retention_policy('somename', 'db',
                                            replication=4)
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy "somename" on "db" replication 4'
            )

            # Test alter shard duration
            self.cli.alter_retention_policy('somename', 'db',
                                            shard_duration='1h')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy "somename" on "db" shard duration 1h'
            )

            # Test alter default
            self.cli.alter_retention_policy('somename', 'db',
                                            default=True)
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy "somename" on "db" default'
            )

    @raises(Exception)
    def test_alter_retention_policy_invalid(self):
        """Test invalid alter ret policy for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.alter_retention_policy('somename', 'db')

    def test_drop_retention_policy(self):
        """Test drop retention policy for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.drop_retention_policy('somename', 'db')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'drop retention policy "somename" on "db"'
            )

    @raises(Exception)
    def test_drop_retention_policy_fails(self):
        """Test failed drop ret policy for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'delete', 401):
            cli.drop_retention_policy('default', 'db')

    def test_get_list_retention_policies(self):
        """Test get retention policies for TestInfluxDBClient object."""
        example_response = \
            '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\
            ' "columns": ["name", "duration", "replicaN"]}]}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://*****:*****@mock.patch('requests.Session.request')
    def test_request_retry(self, mock_request):
        """Test that two connection errors will be handled."""
        class CustomMock(object):
            """Create custom mock object for test."""

            def __init__(self):
                self.i = 0

            def connection_error(self, *args, **kwargs):
                """Handle a connection error for the CustomMock object."""
                self.i += 1

                if self.i < 3:
                    raise requests.exceptions.ConnectionError

                r = requests.Response()
                r.status_code = 204
                return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database='db')
        cli.write_points(
            self.dummy_points
        )

    @mock.patch('requests.Session.request')
    def test_request_retry_raises(self, mock_request):
        """Test that three requests errors will not be handled."""
        class CustomMock(object):
            """Create custom mock object for test."""

            def __init__(self):
                self.i = 0

            def connection_error(self, *args, **kwargs):
                """Handle a connection error for the CustomMock object."""
                self.i += 1

                if self.i < 4:
                    raise requests.exceptions.HTTPError
                else:
                    r = requests.Response()
                    r.status_code = 200
                    return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database='db')

        with self.assertRaises(requests.exceptions.HTTPError):
            cli.write_points(self.dummy_points)

    @mock.patch('requests.Session.request')
    def test_random_request_retry(self, mock_request):
        """Test that a random number of connection errors will be handled."""
        class CustomMock(object):
            """Create custom mock object for test."""

            def __init__(self, retries):
                self.i = 0
                self.retries = retries

            def connection_error(self, *args, **kwargs):
                """Handle a connection error for the CustomMock object."""
                self.i += 1

                if self.i < self.retries:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 204
                    return r

        retries = random.randint(1, 5)
        mock_request.side_effect = CustomMock(retries).connection_error

        cli = InfluxDBClient(database='db', retries=retries)
        cli.write_points(self.dummy_points)

    @mock.patch('requests.Session.request')
    def test_random_request_retry_raises(self, mock_request):
        """Test a random number of conn errors plus one will not be handled."""
        class CustomMock(object):
            """Create custom mock object for test."""

            def __init__(self, retries):
                self.i = 0
                self.retries = retries

            def connection_error(self, *args, **kwargs):
                """Handle a connection error for the CustomMock object."""
                self.i += 1

                if self.i < self.retries + 1:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 200
                    return r

        retries = random.randint(1, 5)
        mock_request.side_effect = CustomMock(retries).connection_error

        cli = InfluxDBClient(database='db', retries=retries)

        with self.assertRaises(requests.exceptions.ConnectionError):
            cli.write_points(self.dummy_points)

    def test_get_list_users(self):
        """Test get users for TestInfluxDBClient object."""
        example_response = (
            '{"results":[{"series":[{"columns":["user","admin"],'
            '"values":[["test",false]]}]}]}'
        )

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )

            self.assertListEqual(
                self.cli.get_list_users(),
                [{'user': '******', 'admin': False}]
            )

    def test_get_list_users_empty(self):
        """Test get empty userlist for TestInfluxDBClient object."""
        example_response = (
            '{"results":[{"series":[{"columns":["user","admin"]}]}]}'
        )
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )

            self.assertListEqual(self.cli.get_list_users(), [])

    def test_grant_admin_privileges(self):
        """Test grant admin privs for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.grant_admin_privileges('test')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'grant all privileges to "test"'
            )

    @raises(Exception)
    def test_grant_admin_privileges_invalid(self):
        """Test grant invalid admin privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.grant_admin_privileges('')

    def test_revoke_admin_privileges(self):
        """Test revoke admin privs for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.revoke_admin_privileges('test')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'revoke all privileges from "test"'
            )

    @raises(Exception)
    def test_revoke_admin_privileges_invalid(self):
        """Test revoke invalid admin privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.revoke_admin_privileges('')

    def test_grant_privilege(self):
        """Test grant privs for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.grant_privilege('read', 'testdb', 'test')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'grant read on "testdb" to "test"'
            )

    @raises(Exception)
    def test_grant_privilege_invalid(self):
        """Test grant invalid privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.grant_privilege('', 'testdb', 'test')

    def test_revoke_privilege(self):
        """Test revoke privs for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.revoke_privilege('read', 'testdb', 'test')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'revoke read on "testdb" from "test"'
            )

    @raises(Exception)
    def test_revoke_privilege_invalid(self):
        """Test revoke invalid privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.revoke_privilege('', 'testdb', 'test')

    def test_get_list_privileges(self):
        """Tst get list of privs for TestInfluxDBClient object."""
        data = {'results': [
            {'series': [
                {'columns': ['database', 'privilege'],
                 'values': [
                     ['db1', 'READ'],
                     ['db2', 'ALL PRIVILEGES'],
                     ['db3', 'NO PRIVILEGES']]}
            ]}
        ]}

        with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
            self.assertListEqual(
                self.cli.get_list_privileges('test'),
                [{'database': 'db1', 'privilege': 'READ'},
                 {'database': 'db2', 'privilege': 'ALL PRIVILEGES'},
                 {'database': 'db3', 'privilege': 'NO PRIVILEGES'}]
            )

    @raises(Exception)
    def test_get_list_privileges_fails(self):
        """Test failed get list of privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 401):
            cli.get_list_privileges('test')

    def test_invalid_port_fails(self):
        """Test invalid port fail for TestInfluxDBClient object."""
        with self.assertRaises(ValueError):
            InfluxDBClient('host', '80/redir', 'username', 'password')

    def test_chunked_response(self):
        """Test chunked reponse for TestInfluxDBClient object."""
        example_response = \
            u'{"results":[{"statement_id":0,"series":' \
            '[{"name":"cpu","columns":["fieldKey","fieldType"],"values":' \
            '[["value","integer"]]}],"partial":true}]}\n{"results":' \
            '[{"statement_id":0,"series":[{"name":"iops","columns":' \
            '["fieldKey","fieldType"],"values":[["value","integer"]]}],' \
            '"partial":true}]}\n{"results":[{"statement_id":0,"series":' \
            '[{"name":"load","columns":["fieldKey","fieldType"],"values":' \
            '[["value","integer"]]}],"partial":true}]}\n{"results":' \
            '[{"statement_id":0,"series":[{"name":"memory","columns":' \
            '["fieldKey","fieldType"],"values":[["value","integer"]]}]}]}\n'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )
            response = self.cli.query('show series limit 4 offset 0',
                                      chunked=True, chunk_size=4)
            self.assertTrue(len(response) == 4)
            self.assertEqual(response.__repr__(), ResultSet(
                {'series': [{'values': [['value', 'integer']],
                             'name': 'cpu',
                             'columns': ['fieldKey', 'fieldType']},
                            {'values': [['value', 'integer']],
                             'name': 'iops',
                             'columns': ['fieldKey', 'fieldType']},
                            {'values': [['value', 'integer']],
                             'name': 'load',
                             'columns': ['fieldKey', 'fieldType']},
                            {'values': [['value', 'integer']],
                             'name': 'memory',
                             'columns': ['fieldKey', 'fieldType']}]}
            ).__repr__())
def main(file,
         hostname='localhost',
         port=8086,
         username='******',
         password='******',
         database='freifunk'):
    jsondata = {}
    jsondata = read_jsonfile(file)
    series = create_series(jsondata)

    client = InfluxDBClient(hostname, port, username, password, database)

    print("Create database: " + database)
    try:
        client.create_database(database)
    except InfluxDBClientError:
        print("Database already existing, skipping creation")
        pass

    print("Create a retention policy")
    try:
        retention_policy = 'freifunk_policy'
        client.create_retention_policy(retention_policy, '3d', 1, default=True)
    except InfluxDBClientError:
        print("Retention policy existing, skipping creation")
        pass

    client.write_points(series, retention_policy=retention_policy)
    print("Data written to influxdb!")
Exemplo n.º 4
0
def start():
    """Opens connections with logger, InfluxDB and MQTT broker."""
    global logger
    global mqtt_client
    global house_data
    global config
    global influx_client
    logger = LoggerClient.open("InfluxDBHub")
    mqtt_client = Utils.getpahoclient(logger, __configure_mqtt)
    config = Utils.getconfig("influxdb", logger)
    influx_client = InfluxDBClient(config["host"], config["port"],
                                   config["user"], config["password"],
                                   config["database"])
    if not {"name": config["database"]} in influx_client.get_list_database():
        influx_client.create_database(config["database"])
    if not any([
            x["name"] == "raspimon_policy"
            for x in influx_client.get_list_retention_policies()
    ]):
        influx_client.create_retention_policy('raspimon_policy',
                                              config["retention_policy"],
                                              1,
                                              default=True)
    else:
        influx_client.alter_retention_policy(
            'raspimon_policy',
            duration=config["retention_policy"],
            replication=1,
            default=True)
Exemplo n.º 5
0
class InfluxDBPublisher(yaml.YAMLObject, Publisher, metaclass=PublisherMeta):
    """ InfluxDB Publisher

    Publishing service that sends data to an InfluxDB instance.
    """
    yaml_tag = u'!InfluxDBPublisher'
    def __init__(self, host, port, database, username, password):
        self.host = host
        self.port = port
        self.database = database
        self.username = username
        self.password = password

    def activate(self):
        self.influxdb_client = InfluxDBClient(self.host, self.port, self.username, self.password, None)
        databases = self.influxdb_client.get_list_database()
        if len(list(filter(lambda x: x['name'] == self.database, databases))) == 0:
            self.influxdb_client.create_database(self.database)
            self.influxdb_client.create_retention_policy('oneweek', '1w', 1, self.database, default=True)
        self.influxdb_client.switch_database(self.database)

    def publish(self, data: list):
        for d in data:
            self.influxdb_client.write_points([
                {
                    'measurement': d.name,
                    'tags': {
                        'tag': d.tag,
                    },
                    'fields': {
                        'value': d.value
                    }
                }
            ])
Exemplo n.º 6
0
def main(host='localhost',
         port=8086,
         database='',
         user='',
         password='',
         rtSource='autogen',
         retentionPolicy='history',
         groupTime='1h',
         duration='INF'):
    """ Instantiate a connection to the InfluxDB. """
    client = InfluxDBClient(host, port, user, password, database, timeout=None)

    if not list(
            client.query("show retention policies").get_points(
                tags={"name": retentionPolicy})):
        client.create_retention_policy(retentionPolicy,
                                       duration,
                                       1,
                                       database,
                                       default=False,
                                       shard_duration=u'1d')
        print("Retention policy " + retentionPolicy + " created")

    measurements = getMeasurements(client)
    for measurement in measurements:
        if not checkCq(client, database, retentionPolicy, measurement):
            print("--> " + measurement)
            fields = getFields(client, measurement)
            nfields = countFields(fields.get_points())

            line = ""
            for field in fields.get_points():
                if field["fieldType"] == "string":
                    line = line + "first(\"" + field[
                        "fieldKey"] + "\") as \"" + field["fieldKey"] + "\","
                else:
                    if nfields <= 15:
                        line = line + "mean(\"" + field[
                            "fieldKey"] + "\") as \"" + field[
                                "fieldKey"] + "\","
                    else:
                        line = line + "mean(\"" + field[
                            "fieldKey"] + "\") as \"" + field[
                                "fieldKey"] + "\","
                        line = line + "max(\"" + field[
                            "fieldKey"] + "\") as \"" + field[
                                "fieldKey"] + "_max\","

            CQNAME = "\"" + database + "_" + retentionPolicy + "_" + measurement + "\""
            FROM = "\"" + database + "\".\"" + rtSource + "\".\"" + measurement + "\""
            INTO = "\"" + database + "\".\"" + retentionPolicy + "\".\"" + measurement + "\""
            Query = "SELECT " + line[:
                                     -1] + " INTO " + INTO + " FROM " + FROM + " WHERE time < now()-" + groupTime + " GROUP BY time(" + groupTime + "),*"
            QueryCQ = "CREATE CONTINUOUS QUERY " + CQNAME + " ON " + database + " BEGIN " + Query + " END "

            print("\tCharging data on " + INTO)
            client.query(Query)

            print("\tCreating CQ: " + CQNAME)
            client.query(QueryCQ)
Exemplo n.º 7
0
class DBManager(object):
    def __init__(self, server):
        self.server = server
        self.influx = InfluxDBClient(host=self.server.url,
                                     port=self.server.port,
                                     username=self.server.username,
                                     password=self.server.password,
                                     ssl=self.server.ssl,
                                     database='varken',
                                     verify_ssl=self.server.verify_ssl)
        version = self.influx.request(
            'ping', expected_response_code=204).headers['X-Influxdb-Version']
        databases = [db['name'] for db in self.influx.get_list_database()]
        self.logger = getLogger()
        self.logger.info('Influxdb version: %s', version)

        if 'varken' not in databases:
            self.logger.info("Creating varken database")
            self.influx.create_database('varken')

            self.logger.info("Creating varken retention policy (30d/1h)")
            self.influx.create_retention_policy('varken 30d/1h', '30d', '1',
                                                'varken', False, '1h')

    def write_points(self, data):
        d = data
        self.logger.debug('Writing Data to InfluxDB %s', d)
        try:
            self.influx.write_points(d)
        except (InfluxDBServerError, ConnectionError) as e:
            self.logger.error(
                'Error writing data to influxdb. Dropping this set of data. '
                'Check your database! Error: %s', e)
Exemplo n.º 8
0
def main():
    parser = argparse.ArgumentParser(description='DB setup')
    parser.add_argument('--user', help='Username for DB')
    parser.add_argument('--password', help='Password for DB')
    parser.add_argument('--host', help='Host for connecting to DB')
    parser.add_argument('--dbname', help='Database to insert data to')
    parser.add_argument('--port', type=int, help='Port for connecting to DB')
    parser.add_argument('--policyname', help='Policy for DB')
    args = parser.parse_args()

    user = args.user
    password = args.password
    host = args.host
    port = args.port
    db_name = args.dbname
    policy_name = args.policyname

    client = InfluxDBClient(host=host,
                            username=user,
                            password=password,
                            port=port)

    try:
        client.create_database(db_name)
        client.create_retention_policy(policy_name,
                                       '1d',
                                       database=db_name,
                                       replication='1',
                                       default=False)
    except Exception as exception:
        logger.error(exception)
        sys.exit(1)

    logger.info(client.get_list_database())
Exemplo n.º 9
0
def main(host='localhost', port=8086):
    user = '******'
    password = '******'
    dbname = 'veseventsdb'
    dbuser = '******'
    dbuser_password = '******'
    query = 'select value from cpu_load_short;'
    json_body = [{
        "measurement": "cpu_load_short",
        "tags": {
            "host": "server01",
            "region": "us-west"
        },
        "time": "2009-11-10T23:00:00Z",
        "fields": {
            "Float_value": 0.64,
            "Int_value": 3,
            "String_value": "Text",
            "Bool_value": True
        }
    }]

    client = InfluxDBClient(host, port, user, password, dbname)

    print("Create database: " + dbname)
    client.create_database(dbname)

    print("Create a retention policy")
    client.create_retention_policy('awesome_policy', '6h', 3, default=True)
Exemplo n.º 10
0
 def create_db(self,host, port, user, password, dbname):
     client = InfluxDBClient(host, port, user, password, dbname)
     logger.debug("self ="+str(self))
     try:
         client.drop_database(dbname)
     except Exception as e:
         print("didnt drop old db")    
         logger.error("Didn't drop old db:"+str(e))
         return False
     try:
          client.create_database(dbname)
     except Exception as e:
         print("db not created - {0}".format(e))
         return False
     except:
         print("unknown error openning db") 
         return False    
     print("db created")
     try:
          client.create_retention_policy('infinite retention', 'INF', 3, default=True)
     except Exception as e:
         print("retention policy not set - {0}".format(e))
         logger.error("retention policy not set - {0}".format(e))
         return False
     except:
         print("unknown error openning db") 
         return False  
     client.close() 
     return True 
Exemplo n.º 11
0
def setup_db_for_use(host,
                     port,
                     db_name='firefly',
                     retention_duration='1h',
                     retention_policy_name="default_firefly_retention"):
    """
    Sets up an instance of InfluxDB to store data to.

    :param host: the ip which can be used to access the db
    :param port: the port by which the db can be accessed
    :param db_name: name of the databse to use inside InfluxDB
    :param retention_duration: specifies how long to retain the stored information
    :param retention_policy_name: name of the retention policy that is to be created
    :return: an active InfluxDB client
    """
    client = InfluxDBClient(host, port)

    if {"name": db_name} not in client.get_list_database():
        client.create_database(db_name)

    client.switch_database(db_name)
    client.create_retention_policy(retention_policy_name,
                                   retention_duration,
                                   1,
                                   default=True)
    return client
Exemplo n.º 12
0
def prepare(host, port):
    client = InfluxDBClient(host, port, user, password, dbname, pool_size=10)

    print("Create database: " + dbname)
    client.create_database(dbname)

    print("Create a retention policy")
    client.create_retention_policy('awesome_policy', '3d', 3, default=True)
Exemplo n.º 13
0
def main():
	try:
		client = InfluxDBClient(HOST, 8086, USER, PWD, DBNAME)

		client.create_database(DBNAME)

		print("Create retention policy")
		client.create_retention_policy('one_day_only', '1d', 1, default=True)
Exemplo n.º 14
0
def send_json(database, json_data, policy_name, policy_duration):
    client = InfluxDBClient(host='xxx')
    client.create_retention_policy(policy_name, policy_duration, '2', database)
    client.switch_database(database)

    try:
        client.write_points(json_data)
    except Exception as e:
        print(e)
Exemplo n.º 15
0
def username(vmanage_session):
    response = vmanage_session.get_request("admin/user")
    items = response.json()['data']

    #login credentials for InfluxDB
    host = 'localhost'
    port = 8086
    USER = '******'
    PASSWORD = '******'
    DBNAME = 'username'

    series = []
    total_records = 0
    json_body = {}

    #loop over the API response variable items and create records to be stored in InfluxDB
    for i in items:
        #print('items')
        #print(i)
        if len(i) == 3:
            json_body = {
                "measurement": "usernames",
                "tags": {
                    "username": str(i['userName']),
                },
                #"time": time.strftime('%m/%d/%Y %H:%M:%S',  time.gmtime(i['entry_time']/1000.)),
                "fields": {
                    "fullname": str(i['description']),
                    "group": str(i['group'])
                }
            }
            series.append(json_body)
            total_records = total_records + 1
        else:
            json_body = {
                "measurement": "usernames",
                "tags": {
                    "username": str(i['userName']),
                },
                #"time": time.strftime('%m/%d/%Y %H:%M:%S',  time.gmtime(i['entry_time']/1000.)),
                "fields": {
                    "group": str(i['group'])
                }
            }
            series.append(json_body)
            total_records = total_records + 1

    client = InfluxDBClient(host, port, USER, PASSWORD, DBNAME)

    print("Create a retention policy")
    retention_policy = 'retention_policy_1'
    client.create_retention_policy(retention_policy, '10d', 3, default=True)

    print("Write points #: {0}".format(total_records))
    client.write_points(series, retention_policy=retention_policy)

    time.sleep(2)
Exemplo n.º 16
0
def main(host='localhost', port=8086, nb_day=15):
    """Instantiate a connection to the backend."""
    nb_day = 15  # number of day to generate time series
    timeinterval_min = 5  # create an event every x minutes
    total_minutes = 1440 * nb_day
    total_records = int(total_minutes / timeinterval_min)
    now = datetime.datetime.today()
    metric = "server_data.cpu_idle"
    series = []

    for i in range(0, total_records):
        past_date = now - datetime.timedelta(minutes=i * timeinterval_min)
        value = random.randint(0, 200)
        hostName = "server-%d" % random.randint(1, 5)
        # pointValues = [int(past_date.strftime('%s')), value, hostName]
        pointValues = {
            "time": int(past_date.strftime('%s')),
            "measurement": metric,
            "fields": {
                "value": value,
            },
            "tags": {
                "hostName": hostName,
            },
        }
        series.append(pointValues)

    print(series)

    client = InfluxDBClient(host, port, USER, PASSWORD, DBNAME)

    print("Create database: " + DBNAME)
    try:
        client.create_database(DBNAME)
    except InfluxDBClientError:
        # Drop and create
        client.drop_database(DBNAME)
        client.create_database(DBNAME)

    print("Create a retention policy")
    retention_policy = 'server_data'
    client.create_retention_policy(retention_policy, '3d', 3, default=True)

    print("Write points #: {0}".format(total_records))
    client.write_points(series, retention_policy=retention_policy)

    time.sleep(2)

    query = "SELECT MEAN(value) FROM {} WHERE \
            time > now() - 10d GROUP BY time(500m)".format(metric)
    result = client.query(query, database=DBNAME)
    print(result)
    print("Result: {0}".format(result))

    print("Drop database: {}".format(DBNAME))
    client.drop_database(DBNAME)
Exemplo n.º 17
0
class DBManager(object):
    def __init__(self, server):
        self.server = server
        self.logger = getLogger()
        if self.server.url == "influxdb.domain.tld":
            self.logger.critical(
                "You have not configured your varken.ini. Please read Wiki page for configuration"
            )
            exit()
        self.influx = InfluxDBClient(host=self.server.url,
                                     port=self.server.port,
                                     username=self.server.username,
                                     password=self.server.password,
                                     ssl=self.server.ssl,
                                     database='varken',
                                     verify_ssl=self.server.verify_ssl)
        try:
            version = self.influx.request(
                'ping',
                expected_response_code=204).headers['X-Influxdb-Version']
            self.logger.info('Influxdb version: %s', version)
        except ConnectionError:
            self.logger.critical(
                "Error testing connection to InfluxDB. Please check your url/hostname"
            )
            exit()

        databases = [db['name'] for db in self.influx.get_list_database()]

        if 'varken' not in databases:
            self.logger.info("Creating varken database")
            self.influx.create_database('varken')

            retention_policies = [
                policy['name']
                for policy in self.influx.get_list_retention_policies(
                    database='varken')
            ]
            if 'varken 30d-1h' not in retention_policies:
                self.logger.info("Creating varken retention policy (30d-1h)")
                self.influx.create_retention_policy(name='varken 30d-1h',
                                                    duration='30d',
                                                    replication='1',
                                                    database='varken',
                                                    default=True,
                                                    shard_duration='1h')

    def write_points(self, data):
        d = data
        self.logger.debug('Writing Data to InfluxDB %s', d)
        try:
            self.influx.write_points(d)
        except (InfluxDBServerError, ConnectionError) as e:
            self.logger.error(
                'Error writing data to influxdb. Dropping this set of data. '
                'Check your database! Error: %s', e)
def send_stats_to_influxdb(parsed_stats, timestamp):
    try:
        db_client = InfluxDBClient(influx_ip, influx_port, influx_username, influx_password, influx_database)
        if not {u'name': u'dslstats'} in db_client.get_list_database():
            db_client.create_database("dslstats")
            db_client.create_retention_policy("dslstats-retention-policy", "52w", "1", default=True)
        json = format_json(parsed_stats, timestamp)
        db_client.write_points(json)
    except Exception:
        raise
Exemplo n.º 19
0
def network_detect_task_query(request):
    """ 查询探测任务 """
    hostname = request.POST.get('hostname', '')
    sn = request.POST.get('sn', '')
    if not sn or not hostname:
        response_data = {'success': False, 'msg': u"未传递主机名和sn号"}
        return HttpResponse(json.dumps(response_data),
                            content_type="application/json")
    client = InfluxDBClient(IP_INFLUXDB, PORT_INFLUXDB, USER_INFLUXDB,
                            PASSWORD_INFLUXDB)
    string_db_name = 'network_detect_task'
    client.create_database(string_db_name)
    string_db_retention_policy = 'auto_delte_1d'
    client.create_retention_policy(string_db_retention_policy,
                                   database=string_db_name,
                                   duration='1d',
                                   replication=REPLICATION_INFLUXDB,
                                   default=True)
    sql = """select * from %s where time>now() - %s and hostname='%s' and sn='%s' limit 1;""" % (
        string_db_name, EXPIRE_NETWORK_DETECT_TASK, hostname, sn)
    sql_result = client.query(sql, database=string_db_name)
    list_task = list(sql_result.get_points())
    if list_task:
        response_data = list_task[0]['data']
        if isinstance(response_data, str):
            response_data = json.loads(response_data)
        return HttpResponse(json.dumps(response_data),
                            content_type="application/json")
    else:
        data = {"hostname": hostname, "sn": sn}
        response_data = do_request(URL_NETWORK_DETECT_TASK, data=data)
        if response_data['success']:
            localtime = time.strftime(
                '%Y-%m-%dT%H:%M:00Z',
                time.localtime(float(time.time()) - 8 * 60 * 60))
            sql_json = [{
                "measurement": string_db_name,
                "tags": {
                    "hostname": hostname,
                    "sn": sn,
                },
                "time": localtime,
                "fields": {
                    "data": json.dumps(response_data)
                }
            }]
            client.write_points(sql_json,
                                database=string_db_name,
                                retention_policy=string_db_retention_policy)
            return HttpResponse(json.dumps(response_data),
                                content_type="application/json")
        else:
            response_data = {'success': False, 'msg': u"获取探测任务失败"}
            return HttpResponse(json.dumps(response_data),
                                content_type="application/json")
Exemplo n.º 20
0
def main(host='localhost', port=8086, nb_day=15):

    nb_day = 15  # number of day to generate time series
    timeinterval_min = 5  # create an event every x minutes
    total_minutes = 1440 * nb_day
    total_records = int(total_minutes / timeinterval_min)
    now = datetime.datetime.today()
    metric = "server_data.cpu_idle"
    series = []

    for i in range(0, total_records):
        past_date = now - datetime.timedelta(minutes=i * timeinterval_min)
        value = random.randint(0, 200)
        hostName = "server-%d" % random.randint(1, 5)
        # pointValues = [int(past_date.strftime('%s')), value, hostName]
        pointValues = {
                "time": int(past_date.strftime('%s')),
                "measurement": metric,
                'fields':  {
                    'value': value,
                },
                'tags': {
                    "hostName": hostName,
                },
            }
        series.append(pointValues)

    print(series)

    client = InfluxDBClient(host, port, USER, PASSWORD, DBNAME)

    print("Create database: " + DBNAME)
    try:
        client.create_database(DBNAME)
    except InfluxDBClientError:
        # Drop and create
        client.drop_database(DBNAME)
        client.create_database(DBNAME)

    print("Create a retention policy")
    retention_policy = 'server_data'
    client.create_retention_policy(retention_policy, '3d', 3, default=True)

    print("Write points #: {0}".format(total_records))
    client.write_points(series, retention_policy=retention_policy)

    time.sleep(2)

    query = "SELECT MEAN(value) FROM {} WHERE time > now() - 10d GROUP BY time(500m)".format(metric)
    result = client.query(query, database=DBNAME)
    print(result)
    print("Result: {0}".format(result))

    print("Drop database: {}".format(DBNAME))
    client.drop_database(DBNAME)
Exemplo n.º 21
0
class InfluxDB(DBAdaptor):
    def __init__(self):
        conf = cper.read_yaml_file('influxdb')
        host = conf['host']
        port = conf['port']
        username = conf['username']
        pwd = conf['password']
        self.database = conf['database']
        use_udp = conf['use_udp']
        udp_port = conf['udp_port']
        self.client = InfluxDBClient(host=host, port=port, username=username, password=pwd, database=self.database)
        if self.client is None:
            print('ERROR! No InfluxDB connection was built!')
        else:
            self.__setting()

    def saveall(self, data):
        rsl = self.client.write_points(data)
        if not rsl:
            print('Fail to saveall!')

    def exec_sql(self, sql):
        rsl = self.client.query(sql)
        points = rsl.get_points()
        return points

    def __setting(self):
        """
        1\ create database if not existed
        2\ create rp if not existed
        :return:
        """
        rsl = self.client.get_list_database()
        if {'name': self.database} not in rsl:
            self.client.create_database(dbname=self.database)
        rsl = self.client.get_list_retention_policies(database=self.database)
        is_rp_7d_existed = False
        is_rp_30d_existed = False
        for i in rsl:
            if i['name'] == 'rp_7d':
                is_rp_7d_existed = True
            elif i['name'] == 'rp_30d':
                is_rp_30d_existed = True
            else:
                continue
        if not is_rp_7d_existed:
            self.client.create_retention_policy(name='rp_7d', duration='168h', replication='1', database=self.database,
                                                default=True, shard_duration='24h')
        if not is_rp_30d_existed:
            self.client.create_retention_policy(name='rp_30d', duration='720h', replication='1', database=self.database,
                                                default=False, shard_duration='24h')

    def __del__(self):
        pass
Exemplo n.º 22
0
    def init_influxdb(self):
        client = InfluxDBClient(
            host=self.influxdb_host,
            port=self.influxdb_port,
            username=self.influxdb_user,
            password=self.influxdb_user_pass,
            database=self.influxdb_database,
        )

        try:
            logging.debug('Testing InfluxDB connection')
            version = client.request(
                'ping',
                expected_response_code=204).headers['X-Influxdb-Version']
            logging.debug(f'Influxdb version: {version}')
        except ConnectionError as e:
            logging.critical(
                f'Error testing connection to InfluxDB. Please check your url/hostname.\nError: {e}'
            )
            raise

        try:
            databases = [db['name'] for db in client.get_list_database()]
            if self.influxdb_database in databases:
                logging.debug(f'Found database: {self.influxdb_database}')
        except InfluxDBClientError as e:
            logging.critical(
                f'Error getting database list! Please check your InfluxDB configuration.\nError: {e}'
            )
            raise

        if self.influxdb_database not in databases:
            logging.info(f'Creating database: {self.influxdb_database}')
            client.create_database(self.influxdb_database)

            retention_policies = [
                policy['name']
                for policy in client.get_list_retention_policies(
                    database=self.influxdb_database)
            ]
            if f'{self.influxdb_database} {self.influxdb_retention}-{self.influxdb_shard}' not in retention_policies:
                logging.info(
                    f'Creating {self.influxdb_database} retention policy ({self.influxdb_retention}-{self.influxdb_shard})'
                )
                client.create_retention_policy(
                    name=
                    f'{self.influxdb_database} {self.influxdb_retention}-{self.influxdb_shard}',
                    duration=self.influxdb_retention,
                    replication='1',
                    database=self.influxdb_database,
                    default=True,
                    shard_duration=self.influxdb_shard)
        return client
Exemplo n.º 23
0
class dbWrapper():
    def __init__(self, host, port, user, password, dbname, dbuser, dbpss):
        self.port = port
        self.host = host
        self.user = user
        self.password = password
        self.dbname = dbname
        self.dbuser = dbuser
        self.dbpss = dbpss
        self.tryReconnect()

    def tryReconnect(self):
        helper.internalLogger.info("Try reconnection to database" +
                                   self.dbname)
        try:
            """Instantiate a connection to the InfluxDB."""
            self.client = InfluxDBClient(self.host, self.port, self.user,
                                         self.password, self.dbname)

            helper.internalLogger.info("Create database: " + self.dbname)
            self.client.create_database(self.dbname)

            helper.internalLogger.info("Create a retention policy")
            self.client.create_retention_policy('awesome_policy',
                                                '3d',
                                                3,
                                                default=True)

            helper.internalLogger.info("Switch user: "******"Ok ok, quitting")
            sys.exit(1)
        except Exception as e:
            e = sys.exc_info()[0]
            helper.internalLogger.error(
                'Unexpected error attempting to access to BD. It will be retried later.'
            )
            helper.einternalLogger.exception(e)

    def addData(self, data):
        try:
            json_body = data
            helper.internalLogger.debug("Adding to DB: {0}".format(data))
            self.client.write_points(json_body)
        except KeyboardInterrupt:
            print("Ok ok, quitting")
            sys.exit(1)
        except Exception as e:
            e = sys.exc_info()[0]
            helper.internalLogger.error('Unexpected error inserting in DB')
            helper.einternalLogger.exception(e)
            self.tryReconnect()
Exemplo n.º 24
0
def loop(ds18b20):
    #mail
    smtp_server =  "smtp.gmail.com"
    portsmtp = 465
    sender_email = "*****@*****.**"
    receiver_email = ["*****@*****.**"]
    password = "******"  

    #database
    USER = '******'
    PASSWORD = '******'
    DBNAME = 'temp'
    host='localhost'
    hostName = 'pi'
    port=8086
    metric = "Server Room Temperatures"
    retention_policy = 'awesome_policy'
    grafana_timezone = pytz.utc

    while True:
        client = InfluxDBClient(host, port, USER, PASSWORD, DBNAME)
        client.create_retention_policy(retention_policy, '30d', 3, default=True)
        json_temp = {
            "tags": {
                "hostName": hostName
            },
            "measurement": metric,
            "fields": {
                "value": read(ds18b20)[0]
            },
            "time": datetime.datetime.now(grafana_timezone).strftime("%m/%d/%Y, %H:%M:%S"),
        }

        #display
        with canvas(device) as draw:
            draw.text((0, 0), str(json_temp['fields']['value']), fill="white",font=font)
        
        with open ('last_breath','w') as file:
            file.write (str(json_temp))

        client.write_points([json_temp], retention_policy=retention_policy)

        if read(ds18b20) != None and read(ds18b20)[0] > 35:
            message = "Temperatura w serwerowni wynosi {}".format(read(ds18b20)[0])

            with smtplib.SMTP_SSL(smtp_server, portsmtp) as server:
                server.login(sender_email, password)
                server.sendmail(sender_email, receiver_email, message)
                server.close()
        time.sleep(180)
def send_to_db(json_body, host='localhost', port=8086):
    """Instantiate a connection to the InfluxDB."""
    user = '******'
    password = '******'
    dbname = 'letna'

    client = InfluxDBClient(host, port, user, password, dbname)

    #print("Create database: " + dbname)
    #client.create_database(dbname)

    #print("Create a retention policy")
    client.create_retention_policy('awesome_policy', '3d', 3, default=True)

    #print("Write points: {0}".format(json_body))
    response = client.write_points(json_body)
Exemplo n.º 26
0
def init_influx():
    """ Initializes influx database. """

    print("Connecting to Influx...")
    influx_client = InfluxDBClient(
        host=config.INFLUX_HOST,
        port=config.INFLUX_PORT,
        database=config.INFLUX_DB_NAME,
    )
    print("Connected to Influx!")

    print("Creating influx database...")
    influx_client.create_database(config.INFLUX_DB_NAME)
    influx_client.create_retention_policy("one_week", "1w", 1, "listenbrainz")

    print("Done!")
Exemplo n.º 27
0
def main(host='localhost', port=8086):
    user = '******'
    password = '******'
    dbname = 'example'
    dbuser = '******'
    dbuser_password = '******'
    query = 'select value from cpu_load_short;'
    json_body = [
        {
            "name": "cpu_load_short",
            "tags": {
                "host": "server01",
                "region": "us-west"
            },
            "timestamp": "2009-11-10T23:00:00Z",
            "fields": {
                "value": 0.64
            }
        }
    ]

    client = InfluxDBClient(host, port, user, password, dbname)

    print("Create database: " + dbname)
    client.create_database(dbname)

    print("Create a retention policy")
    client.create_retention_policy('awesome_policy', '3d', 3, default=True)

    print("Switch user: "******"Write points: {0}".format(json_body))
    client.write_points(json_body)

    print("Queying data: " + query)
    result = client.query(query)

    print("Result: {0}".format(result))

    print("Switch user: "******"Drop database: " + dbname)
    client.drop_database(dbname)
Exemplo n.º 28
0
def main(host='localhost', port=8086):
    user = '******'
    password = '******'
    dbname = 'example'
    dbuser = '******'
    dbuser_password =  '******'
    query = 'select * from test' #table

    client = InfluxDBClient(host, port, user, password, dbname)

    #Create
    print("Create database: " + dbname)
    client.create_database(dbname)

    print("Create a retention policy")
    client.create_retention_policy('awesome_policy', '3d', 3, default=True)

    print("Switch user: "******"Update\n"
    #Update
    loop = 0
    while loop < 1000:
	loop = loop + 1
	time.sleep(2);
	        
	print("Write points: {0}".format(get_Json()))
        client.write_points(get_Json())

    print("\nQueying data: " + query)
    result = client.query(query)
    print("Result: {0}".format(result))


    print "\nRetrieve\n"
    #Retrieve
    result = client.query('SELECT * FROM test WHERE BoolVal=true')
    print("Result: {0}".format(result))

    print("Switch user: "******"Drop database: " + dbname)
Exemplo n.º 29
0
def main(file, hostname='localhost', port=8086, username='******', password='******', database='freifunk'):
    jsondata = {}
    jsondata=read_jsonfile(file)
    series=create_series(jsondata)

    client = InfluxDBClient(hostname, port, username, password, database)

    print("Create database: " + database)
    try:
        client.create_database(database)
    except InfluxDBClientError:
        print("Database already existing, skipping creation")
        pass

    print("Create a retention policy")
    try:
        retention_policy = 'freifunk_policy'
        client.create_retention_policy(retention_policy, '3d', 1, default=True)
    except InfluxDBClientError:
        print("Retention policy existing, skipping creation")
        pass
    client.write_points(series, retention_policy=retention_policy)
    print("Data written to influxdb!")
def start():
    """Opens connections with logger, InfluxDB and MQTT broker."""
    global logger
    global mqtt_client
    global house_data
    global config
    global influx_client
    logger = LoggerClient.open("InfluxDBHub")
    mqtt_client = Utils.getpahoclient(logger, __configure_mqtt)
    config = Utils.getconfig("influxdb", logger)
    influx_client = InfluxDBClient(config["host"], config["port"],
                                   config["user"], config["password"],
                                   config["database"])
    if not {"name":config["database"]} in influx_client.get_list_database():
        influx_client.create_database(config["database"])
    if not any([ x["name"]=="raspimon_policy" for x in influx_client.get_list_retention_policies()]):
        influx_client.create_retention_policy('raspimon_policy',
                                              config["retention_policy"],
                                              1, default=True)
    else:
        influx_client.alter_retention_policy('raspimon_policy',
                                             duration=config["retention_policy"],
                                             replication=1,
                                             default=True)
Exemplo n.º 31
0
def init_db():
    global db
    db = InfluxDBClient('localhost', 8086, 'root', 'root')
    db.create_database('alan',if_not_exists=True)
    db.switch_database('alan')
    db.create_retention_policy('raw_hits', '1h', 1, database='alan', default=True)
    db.create_retention_policy('workingset', '1h', 1, database='alan', default=False)
    db.create_retention_policy('digest', '1d', 1, database='alan', default=False)
    db.create_retention_policy('trends', '1w', 1, database='alan', default=False)
    db.create_retention_policy('historical', '52w', 1, database='alan', default=False)

    # downsample data into counts per 5 for all articles
    # keep for a 1h
    db.query( "DROP CONTINUOUS QUERY pageview_5s_count ON alan" );
    db.query( "CREATE CONTINUOUS QUERY pageview_5s_count ON alan BEGIN SELECT count(\"duration\") INTO alan.workingset.pageview_5s_bins FROM alan.raw_hits.pageview GROUP BY site, section, url, time(5s) END" );
    # work out pages/sec for all content, updated per minute
    # keep for a day
    db.query( "DROP CONTINUOUS QUERY create_pageview_rate ON alan" );
    db.query( "CREATE CONTINUOUS QUERY create_pageview_rate ON alan BEGIN SELECT sum(\"count\")/60 INTO alan.digest.pageview_rate FROM alan.workingset.pageview_5s_bins GROUP BY site, section, url, time(1m) END" );
    
    return db
def main(host='localhost', port=8086, client_num=1):
    print("influxDB test start-------------")
    print "host = {}".format(host)
    print "port = {}".format(port)
    print("LINE PROTOCOL")

    hostnames = []
    for i in xrange(0, NUMBER_OF_HOSTS):
        hostnames.append(uuid.uuid4().hex)

    user = '******'
    password = '******'
    db_name = 'monasca'
    client = InfluxDBClient(host, port, user, password, db_name)

    running_recording = False
    if db_name not in client.get_list_database():
        print "Create database: ".format(db_name)
        client.create_database(db_name)
        print("Create a retention policy")
        client.create_retention_policy('awesome_policy', '3d', 3, default=True)
        print "Start recording top output"
        running_recording = True
        top_process = subprocess.Popen("exec top -b -d 1 > " + './' + 'system_info', shell=True)

    db_user = '******'
    db_user_password = '******'

    print "Switch user: {}".format(db_user)
    client.switch_user(db_user, db_user_password)

    # INSERT
    print "Write points: batch_size = {0}".format(NUMBER_PER_BATCH)
    start_time = datetime.utcnow()
    print "Start time: {0}".format(start_time)

    dimension_keys_values_map = {'service': 'monitoring', 'host': 'localhost',
                                 'cloud': 'cloud_test'}

    print "Inserting {0} measurements".format(NUMBER_OF_MEASUREMENTS)

    metric_count = 0
    for i in xrange(NUMBER_OF_MEASUREMENTS / NUMBER_PER_BATCH):
        batch_set = []
        for j in xrange(NUMBER_PER_BATCH):
            metric_suffix = metric_count % NUMBER_OF_UNIQUE_METRICS
            metric_name = 'metric_KS_{}_'.format(client_num) + str(metric_suffix)
            value = i * NUMBER_PER_BATCH + j
            dims = dict(dimension_keys_values_map)
            host_name = hostnames[(metric_count / NUMBER_OF_UNIQUE_METRICS) % NUMBER_OF_HOSTS]
            dims['host'] = host_name
            dimension_hash_string = ','.join(['%s=%s' % (d, dims[d]) for d in dims])
            new_hash_string = REGION + TENANT_ID + metric_name + dimension_hash_string
            sha1_hash = hashlib.sha1(new_hash_string).hexdigest()
            metric_id = str(sha1_hash)
            line_body = '{0},zone=nova,service=compute,resource_id=34c0ce14-9ce4-4d3d-84a4-172e1ddb26c4,' \
                        'tenant_id=71fea2331bae4d98bb08df071169806d,hostname={1},component=vm,' \
                        'control_plane=ccp,cluster=compute,cloud_name=monasca value={2},' \
                        'metric_id="{3}"'.format(metric_name, host_name, value, str(metric_id))
            batch_set.append(line_body)
            metric_count += 1
        client.write_points(batch_set, batch_size=NUMBER_PER_BATCH,
                            time_precision='ms', protocol='line')
    end_time = datetime.utcnow()
    elapsed = end_time - start_time
    if running_recording:
        os.kill(top_process.pid, 9)
    # Calculate Insert Rate
    print "elapsed time: {0}".format(str(elapsed))
    print "measurements per sec: {0}".format(str(float(NUMBER_OF_MEASUREMENTS) / elapsed.seconds))
Exemplo n.º 33
0
config = config_parser['DEFAULT']


myclient = InfluxDBClient(
    config['influx_remote_host'],
    int(config['influx_remote_port']),
    config['influx_remote_user'],
    config['influx_remote_password'],
    'pm',
    config['influx_remote_https'] == 'true',
    True,
    timeout=30
)

myclient.create_database('pm')
myclient.create_retention_policy('pm_policy', 'INF', 3, default=True)
myclient.create_retention_policy('event_policy', 'INF', 3, default=False)

class PMSeriesHelper(SeriesHelper):
    class Meta:
        client = myclient
        series_name = 'particulates'
        fields = ['pm_25', 'pm_10']
        tags = ['sensor_id']
        bulk_size = 1
        autocommit = True


def store(data):
    PMSeriesHelper(sensor_id=config['sensor_id'], pm_25=data[0], pm_10=data[1])
Exemplo n.º 34
0
class Output(cowrie.core.output.Output):

    def __init__(self):
        cowrie.core.output.Output.__init__(self)

    def start(self):
        try:
            host = CONFIG.get('output_influx', 'host')
        except Exception:
            host = ''

        try:
            port = CONFIG.getint('output_influx', 'port')
        except Exception:
            port = 8086

        try:
            ssl = CONFIG.getboolean('output_influx', 'ssl')
        except Exception:
            ssl = False

        self.client = None
        try:
            self.client = InfluxDBClient(host=host, port=port, ssl=ssl, verify_ssl=ssl)
        except InfluxDBClientError as e:
            log.err("output_influx: I/O error({0}): '{1}'".format(
                e.errno, e.strerror))
            return

        if self.client is None:
            log.err("output_influx: cannot instantiate client!")
            return

        if (CONFIG.has_option('output_influx', 'username') and
                CONFIG.has_option('output_influx', 'password')):
            username = CONFIG.get('output_influx', 'username')
            password = CONFIG.get('output_influx', 'password', raw=True)
            self.client.switch_user(username, password)

        try:
            dbname = CONFIG.get('output_influx', 'database_name')
        except Exception:
            dbname = 'cowrie'

        retention_policy_duration_default = '12w'
        retention_policy_name = dbname + "_retention_policy"

        if CONFIG.has_option('output_influx', 'retention_policy_duration'):
            retention_policy_duration = CONFIG.get(
                'output_influx', 'retention_policy_duration')

            match = re.search(r'^\d+[dhmw]{1}$', retention_policy_duration)
            if not match:
                log.err(("output_influx: invalid retention policy."
                         "Using default '{}'..").format(
                    retention_policy_duration))
                retention_policy_duration = retention_policy_duration_default
        else:
            retention_policy_duration = retention_policy_duration_default

        database_list = self.client.get_list_database()
        dblist = [str(elem['name']) for elem in database_list]

        if dbname not in dblist:
            self.client.create_database(dbname)
            self.client.create_retention_policy(
                retention_policy_name, retention_policy_duration, 1,
                database=dbname, default=True)
        else:
            retention_policies_list = self.client.get_list_retention_policies(
                database=dbname)
            rplist = [str(elem['name']) for elem in retention_policies_list]
            if retention_policy_name not in rplist:
                self.client.create_retention_policy(
                    retention_policy_name, retention_policy_duration, 1,
                    database=dbname, default=True)
            else:
                self.client.alter_retention_policy(
                    retention_policy_name, database=dbname,
                    duration=retention_policy_duration,
                    replication=1, default=True)

        self.client.switch_database(dbname)

    def stop(self):
        pass

    def write(self, entry):
        if self.client is None:
            log.err("output_influx: client object is not instantiated")
            return

        # event id
        eventid = entry['eventid']

        # measurement init
        m = {
            'measurement': eventid.replace('.', '_'),
            'tags': {
                'session': entry['session'],
                'src_ip': entry['src_ip']
            },
            'fields': {
                'sensor': self.sensor
            },
        }

        # event parsing
        if eventid in ['cowrie.command.failed',
                       'cowrie.command.input']:
            m['fields'].update({
                'input': entry['input'],
            })

        elif eventid == 'cowrie.session.connect':
            m['fields'].update({
                'protocol': entry['protocol'],
                'src_port': entry['src_port'],
                'dst_port': entry['dst_port'],
                'dst_ip': entry['dst_ip'],
            })

        elif eventid in ['cowrie.login.success', 'cowrie.login.failed']:
            m['fields'].update({
                'username': entry['username'],
                'password': entry['password'],
            })

        elif eventid == 'cowrie.session.file_download':
            m['fields'].update({
                'shasum': entry.get('shasum'),
                'url': entry.get('url'),
                'outfile': entry.get('outfile')
            })

        elif eventid == 'cowrie.session.file_download.failed':
            m['fields'].update({
                'url': entry.get('url')
            })

        elif eventid == 'cowrie.session.file_upload':
            m['fields'].update({
                'shasum': entry.get('shasum'),
                'outfile': entry.get('outfile'),
            })

        elif eventid == 'cowrie.session.closed':
            m['fields'].update({
                'duration': entry['duration']
            })

        elif eventid == 'cowrie.client.version':
            m['fields'].update({
                'version': ','.join(entry['version']),
            })

        elif eventid == 'cowrie.client.kex':
            m['fields'].update({
                'maccs': ','.join(entry['macCS']),
                'kexalgs': ','.join(entry['kexAlgs']),
                'keyalgs': ','.join(entry['keyAlgs']),
                'compcs': ','.join(entry['compCS']),
                'enccs': ','.join(entry['encCS'])
            })

        elif eventid == 'cowrie.client.size':
            m['fields'].update({
                'height': entry['height'],
                'width': entry['width'],
            })

        elif eventid == 'cowrie.client.var':
            m['fields'].update({
                'name': entry['name'],
                'value': entry['value'],
            })

        elif eventid == 'cowrie.client.fingerprint':
            m['fields'].update({
                'fingerprint': entry['fingerprint']
            })

            # cowrie.direct-tcpip.data, cowrie.direct-tcpip.request
            # cowrie.log.closed
            # are not implemented
        else:
            # other events should be handled
            log.err(
                "output_influx: event '{}' not handled. Skipping..".format(
                    eventid))
            return

        result = self.client.write_points([m])

        if not result:
            log.err("output_influx: error when writing '{}' measurement"
                    "in the db.".format(eventid))
Exemplo n.º 35
0
class TestInfluxDBClient(unittest.TestCase):
    def setUp(self):
        # By default, raise exceptions on warnings
        warnings.simplefilter("error", FutureWarning)

        self.cli = InfluxDBClient("localhost", 8086, "username", "password")
        self.dummy_points = [
            {
                "measurement": "cpu_load_short",
                "tags": {"host": "server01", "region": "us-west"},
                "time": "2009-11-10T23:00:00.123456Z",
                "fields": {"value": 0.64},
            }
        ]

        self.dsn_string = "influxdb://*****:*****@my.host.fr:1886/db"

    def test_scheme(self):
        cli = InfluxDBClient("host", 8086, "username", "password", "database")
        self.assertEqual("http://host:8086", cli._baseurl)

        cli = InfluxDBClient("host", 8086, "username", "password", "database", ssl=True)
        self.assertEqual("https://host:8086", cli._baseurl)

    def test_dsn(self):
        cli = InfluxDBClient.from_DSN("influxdb://192.168.0.1:1886")
        self.assertEqual("http://192.168.0.1:1886", cli._baseurl)

        cli = InfluxDBClient.from_DSN(self.dsn_string)
        self.assertEqual("http://my.host.fr:1886", cli._baseurl)
        self.assertEqual("uSr", cli._username)
        self.assertEqual("pWd", cli._password)
        self.assertEqual("db", cli._database)
        self.assertFalse(cli.use_udp)

        cli = InfluxDBClient.from_DSN("udp+" + self.dsn_string)
        self.assertTrue(cli.use_udp)

        cli = InfluxDBClient.from_DSN("https+" + self.dsn_string)
        self.assertEqual("https://my.host.fr:1886", cli._baseurl)

        cli = InfluxDBClient.from_DSN("https+" + self.dsn_string, **{"ssl": False})
        self.assertEqual("http://my.host.fr:1886", cli._baseurl)

    def test_switch_database(self):
        cli = InfluxDBClient("host", 8086, "username", "password", "database")
        cli.switch_database("another_database")
        self.assertEqual("another_database", cli._database)

    def test_switch_user(self):
        cli = InfluxDBClient("host", 8086, "username", "password", "database")
        cli.switch_user("another_username", "another_password")
        self.assertEqual("another_username", cli._username)
        self.assertEqual("another_password", cli._password)

    def test_write(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST, "http://localhost:8086/write", status_code=204)
            cli = InfluxDBClient(database="db")
            cli.write(
                {
                    "database": "mydb",
                    "retentionPolicy": "mypolicy",
                    "points": [
                        {
                            "measurement": "cpu_load_short",
                            "tags": {"host": "server01", "region": "us-west"},
                            "time": "2009-11-10T23:00:00Z",
                            "fields": {"value": 0.64},
                        }
                    ],
                }
            )

            self.assertEqual(
                m.last_request.body, b"cpu_load_short,host=server01,region=us-west " b"value=0.64 1257894000000000000\n"
            )

    def test_write_points(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST, "http://localhost:8086/write", status_code=204)

            cli = InfluxDBClient(database="db")
            cli.write_points(self.dummy_points)
            self.assertEqual(
                "cpu_load_short,host=server01,region=us-west " "value=0.64 1257894000123456000\n",
                m.last_request.body.decode("utf-8"),
            )

    def test_write_points_toplevel_attributes(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST, "http://localhost:8086/write", status_code=204)

            cli = InfluxDBClient(database="db")
            cli.write_points(self.dummy_points, database="testdb", tags={"tag": "hello"}, retention_policy="somepolicy")
            self.assertEqual(
                "cpu_load_short,host=server01,region=us-west,tag=hello " "value=0.64 1257894000123456000\n",
                m.last_request.body.decode("utf-8"),
            )

    def test_write_points_batch(self):
        dummy_points = [
            {
                "measurement": "cpu_usage",
                "tags": {"unit": "percent"},
                "time": "2009-11-10T23:00:00Z",
                "fields": {"value": 12.34},
            },
            {
                "measurement": "network",
                "tags": {"direction": "in"},
                "time": "2009-11-10T23:00:00Z",
                "fields": {"value": 123.00},
            },
            {
                "measurement": "network",
                "tags": {"direction": "out"},
                "time": "2009-11-10T23:00:00Z",
                "fields": {"value": 12.00},
            },
        ]
        expected_last_body = "network,direction=out,host=server01,region=us-west " "value=12.0 1257894000000000000\n"

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST, "http://localhost:8086/write", status_code=204)
            cli = InfluxDBClient(database="db")
            cli.write_points(
                points=dummy_points, database="db", tags={"host": "server01", "region": "us-west"}, batch_size=2
            )
        self.assertEqual(m.call_count, 2)
        self.assertEqual(expected_last_body, m.last_request.body.decode("utf-8"))

    def test_write_points_udp(self):
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        port = random.randint(4000, 8000)
        s.bind(("0.0.0.0", port))

        cli = InfluxDBClient("localhost", 8086, "root", "root", "test", use_udp=True, udp_port=port)
        cli.write_points(self.dummy_points)

        received_data, addr = s.recvfrom(1024)

        self.assertEqual(
            "cpu_load_short,host=server01,region=us-west " "value=0.64 1257894000123456000\n", received_data.decode()
        )

    def test_write_bad_precision_udp(self):
        cli = InfluxDBClient("localhost", 8086, "root", "root", "test", use_udp=True, udp_port=4444)

        with self.assertRaisesRegexp(Exception, "InfluxDB only supports seconds precision for udp writes"):
            cli.write_points(self.dummy_points, time_precision="ms")

    @raises(Exception)
    def test_write_points_fails(self):
        cli = InfluxDBClient("host", 8086, "username", "password", "db")
        with _mocked_session(cli, "post", 500):
            cli.write_points([])

    def test_write_points_with_precision(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST, "http://localhost:8086/write", status_code=204)

            cli = InfluxDBClient(database="db")

            cli.write_points(self.dummy_points, time_precision="n")
            self.assertEqual(
                b"cpu_load_short,host=server01,region=us-west " b"value=0.64 1257894000123456000\n", m.last_request.body
            )

            cli.write_points(self.dummy_points, time_precision="u")
            self.assertEqual(
                b"cpu_load_short,host=server01,region=us-west " b"value=0.64 1257894000123456\n", m.last_request.body
            )

            cli.write_points(self.dummy_points, time_precision="ms")
            self.assertEqual(
                b"cpu_load_short,host=server01,region=us-west " b"value=0.64 1257894000123\n", m.last_request.body
            )

            cli.write_points(self.dummy_points, time_precision="s")
            self.assertEqual(
                b"cpu_load_short,host=server01,region=us-west " b"value=0.64 1257894000\n", m.last_request.body
            )

            cli.write_points(self.dummy_points, time_precision="m")
            self.assertEqual(
                b"cpu_load_short,host=server01,region=us-west " b"value=0.64 20964900\n", m.last_request.body
            )

            cli.write_points(self.dummy_points, time_precision="h")
            self.assertEqual(
                b"cpu_load_short,host=server01,region=us-west " b"value=0.64 349415\n", m.last_request.body
            )

    def test_write_points_bad_precision(self):
        cli = InfluxDBClient()
        with self.assertRaisesRegexp(
            Exception, "Invalid time precision is given. " "\(use 'n', 'u', 'ms', 's', 'm' or 'h'\)"
        ):
            cli.write_points(self.dummy_points, time_precision="g")

    @raises(Exception)
    def test_write_points_with_precision_fails(self):
        cli = InfluxDBClient("host", 8086, "username", "password", "db")
        with _mocked_session(cli, "post", 500):
            cli.write_points_with_precision([])

    def test_query(self):
        example_response = (
            '{"results": [{"series": [{"measurement": "sdfsdfsdf", '
            '"columns": ["time", "value"], "values": '
            '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": '
            '[{"measurement": "cpu_load_short", "columns": ["time", "value"], '
            '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}'
        )

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://*****:*****@unittest.skip("Not implemented for 0.9")
    def test_query_chunked(self):
        cli = InfluxDBClient(database="db")
        example_object = {
            "points": [
                [1415206250119, 40001, 667],
                [1415206244555, 30001, 7],
                [1415206228241, 20001, 788],
                [1415206212980, 10001, 555],
                [1415197271586, 10001, 23],
            ],
            "measurement": "foo",
            "columns": ["time", "sequence_number", "val"],
        }
        example_response = json.dumps(example_object) + json.dumps(example_object)

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/db/db/series", text=example_response)

            self.assertListEqual(cli.query("select * from foo", chunked=True), [example_object, example_object])

    @raises(Exception)
    def test_query_fail(self):
        with _mocked_session(self.cli, "get", 401):
            self.cli.query("select column_one from foo;")

    def test_create_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text='{"results":[{}]}')
            self.cli.create_database("new_db")
            self.assertEqual(m.last_request.qs["q"][0], 'create database "new_db"')

    def test_create_numeric_named_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text='{"results":[{}]}')
            self.cli.create_database("123")
            self.assertEqual(m.last_request.qs["q"][0], 'create database "123"')

    @raises(Exception)
    def test_create_database_fails(self):
        with _mocked_session(self.cli, "post", 401):
            self.cli.create_database("new_db")

    def test_drop_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text='{"results":[{}]}')
            self.cli.drop_database("new_db")
            self.assertEqual(m.last_request.qs["q"][0], 'drop database "new_db"')

    def test_drop_numeric_named_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text='{"results":[{}]}')
            self.cli.drop_database("123")
            self.assertEqual(m.last_request.qs["q"][0], 'drop database "123"')

    @raises(Exception)
    def test_drop_database_fails(self):
        cli = InfluxDBClient("host", 8086, "username", "password", "db")
        with _mocked_session(cli, "delete", 401):
            cli.drop_database("old_db")

    def test_get_list_database(self):
        data = {
            "results": [
                {"series": [{"name": "databases", "values": [["new_db_1"], ["new_db_2"]], "columns": ["name"]}]}
            ]
        }

        with _mocked_session(self.cli, "get", 200, json.dumps(data)):
            self.assertListEqual(self.cli.get_list_database(), [{"name": "new_db_1"}, {"name": "new_db_2"}])

    @raises(Exception)
    def test_get_list_database_fails(self):
        cli = InfluxDBClient("host", 8086, "username", "password")
        with _mocked_session(cli, "get", 401):
            cli.get_list_database()

    def test_get_list_series(self):
        example_response = (
            '{"results": [{"series": [{"name": "cpu_load_short", "columns": '
            '["_id", "host", "region"], "values": '
            '[[1, "server01", "us-west"]]}]}]}'
        )

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text=example_response)

            self.assertListEqual(
                self.cli.get_list_series(),
                [{"name": "cpu_load_short", "tags": [{"host": "server01", "_id": 1, "region": "us-west"}]}],
            )

    def test_create_retention_policy_default(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text=example_response)
            self.cli.create_retention_policy("somename", "1d", 4, default=True, database="db")

            self.assertEqual(
                m.last_request.qs["q"][0], "create retention policy somename on " "db duration 1d replication 4 default"
            )

    def test_create_retention_policy(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text=example_response)
            self.cli.create_retention_policy("somename", "1d", 4, database="db")

            self.assertEqual(
                m.last_request.qs["q"][0], "create retention policy somename on " "db duration 1d replication 4"
            )

    def test_alter_retention_policy(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text=example_response)
            # Test alter duration
            self.cli.alter_retention_policy("somename", "db", duration="4d")
            self.assertEqual(m.last_request.qs["q"][0], "alter retention policy somename on db duration 4d")
            # Test alter replication
            self.cli.alter_retention_policy("somename", "db", replication=4)
            self.assertEqual(m.last_request.qs["q"][0], "alter retention policy somename on db replication 4")

            # Test alter default
            self.cli.alter_retention_policy("somename", "db", default=True)
            self.assertEqual(m.last_request.qs["q"][0], "alter retention policy somename on db default")

    @raises(Exception)
    def test_alter_retention_policy_invalid(self):
        cli = InfluxDBClient("host", 8086, "username", "password")
        with _mocked_session(cli, "get", 400):
            self.cli.alter_retention_policy("somename", "db")

    def test_get_list_retention_policies(self):
        example_response = (
            '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'
            ' "columns": ["name", "duration", "replicaN"]}]}]}'
        )

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://*****:*****@mock.patch("requests.Session.request")
    def test_request_retry(self, mock_request):
        """Tests that two connection errors will be handled"""

        class CustomMock(object):
            i = 0

            def connection_error(self, *args, **kwargs):
                self.i += 1

                if self.i < 3:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 204
                    return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database="db")
        cli.write_points(self.dummy_points)

    @mock.patch("requests.Session.request")
    def test_request_retry_raises(self, mock_request):
        """Tests that three connection errors will not be handled"""

        class CustomMock(object):
            i = 0

            def connection_error(self, *args, **kwargs):
                self.i += 1

                if self.i < 4:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 200
                    return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database="db")

        with self.assertRaises(requests.exceptions.ConnectionError):
            cli.write_points(self.dummy_points)

    def test_get_list_users(self):
        example_response = '{"results":[{"series":[{"columns":["user","admin"],' '"values":[["test",false]]}]}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text=example_response)

            self.assertListEqual(self.cli.get_list_users(), [{"user": "******", "admin": False}])

    def test_get_list_users_empty(self):
        example_response = '{"results":[{"series":[{"columns":["user","admin"]}]}]}'
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text=example_response)

            self.assertListEqual(self.cli.get_list_users(), [])

    def test_revoke_admin_privileges(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text=example_response)
            self.cli.revoke_admin_privileges("test")

            self.assertEqual(m.last_request.qs["q"][0], "revoke all privileges from test")

    @raises(Exception)
    def test_revoke_admin_privileges_invalid(self):
        cli = InfluxDBClient("host", 8086, "username", "password")
        with _mocked_session(cli, "get", 400):
            self.cli.revoke_admin_privileges("")

    def test_grant_privilege(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text=example_response)
            self.cli.grant_privilege("read", "testdb", "test")

            self.assertEqual(m.last_request.qs["q"][0], "grant read on testdb to test")

    @raises(Exception)
    def test_grant_privilege_invalid(self):
        cli = InfluxDBClient("host", 8086, "username", "password")
        with _mocked_session(cli, "get", 400):
            self.cli.grant_privilege("", "testdb", "test")

    def test_revoke_privilege(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text=example_response)
            self.cli.revoke_privilege("read", "testdb", "test")

            self.assertEqual(m.last_request.qs["q"][0], "revoke read on testdb from test")

    @raises(Exception)
    def test_revoke_privilege_invalid(self):
        cli = InfluxDBClient("host", 8086, "username", "password")
        with _mocked_session(cli, "get", 400):
            self.cli.revoke_privilege("", "testdb", "test")
Exemplo n.º 36
0
class TestInfluxDBClient(unittest.TestCase):

    def setUp(self):
        # By default, raise exceptions on warnings
        warnings.simplefilter('error', FutureWarning)

        self.cli = InfluxDBClient('localhost', 8086, 'username', 'password')
        self.dummy_points = [
            {
                "name": "cpu_load_short",
                "tags": {
                    "host": "server01",
                    "region": "us-west"
                },
                "timestamp": "2009-11-10T23:00:00Z",
                "fields": {
                    "value": 0.64
                }
            }
        ]

    def test_scheme(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        assert cli._baseurl == 'http://*****:*****@host:1886/db')
        assert cli._baseurl == 'http://*****:*****@host:1886/db')
        assert cli.use_udp is True

        cli = InfluxDBClient.from_DSN('https+influxdb://usr:pwd@host:1886/db')
        assert cli._baseurl == 'https://*****:*****@host:1886/db',
                                      **{'ssl': False})
        assert cli._baseurl == 'http://*****:*****@unittest.skip('Not implemented for 0.9')
    def test_write_points_batch(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 200, self.dummy_points):
            assert cli.write_points(
                data=self.dummy_points,
                batch_size=2
            ) is True

    def test_write_points_udp(self):
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        port = random.randint(4000, 8000)
        s.bind(('0.0.0.0', port))

        cli = InfluxDBClient(
            'localhost', 8086, 'root', 'root',
            'test', use_udp=True, udp_port=port
        )
        cli.write_points(self.dummy_points)

        received_data, addr = s.recvfrom(1024)

        self.assertDictEqual(
            {
                "points": self.dummy_points,
                "database": "test"
            },
            json.loads(received_data.decode(), strict=True)
        )

    def test_write_bad_precision_udp(self):
        cli = InfluxDBClient(
            'localhost', 8086, 'root', 'root',
            'test', use_udp=True, udp_port=4444
        )

        with self.assertRaisesRegexp(
                Exception,
                "InfluxDB only supports seconds precision for udp writes"
        ):
            cli.write_points(
                self.dummy_points,
                time_precision='ms'
            )

    @raises(Exception)
    def test_write_points_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 500):
            cli.write_points([])

    def test_write_points_with_precision(self):
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/write"
            )

            cli = InfluxDBClient(database='db')
            cli.write_points(
                self.dummy_points,
                time_precision='n'
            )

            self.assertDictEqual(
                {'points': self.dummy_points,
                 'database': 'db',
                 'precision': 'n',
                 },
                json.loads(m.last_request.body)
            )

    def test_write_points_bad_precision(self):
        cli = InfluxDBClient()
        with self.assertRaisesRegexp(
            Exception,
            "Invalid time precision is given. "
            "\(use 'n', 'u', 'ms', 's', 'm' or 'h'\)"
        ):
            cli.write_points(
                self.dummy_points,
                time_precision='g'
            )

    @raises(Exception)
    def test_write_points_with_precision_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 500):
            cli.write_points_with_precision([])

    def test_query(self):
        example_response = \
            '{"results": [{"series": [{"name": "sdfsdfsdf", ' \
            '"columns": ["time", "value"], "values": ' \
            '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": ' \
            '[{"name": "cpu_load_short", "columns": ["time", "value"], ' \
            '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://*****:*****@unittest.skip('Not implemented for 0.9')
    def test_query_chunked(self):
        cli = InfluxDBClient(database='db')
        example_object = {
            'points': [
                [1415206250119, 40001, 667],
                [1415206244555, 30001, 7],
                [1415206228241, 20001, 788],
                [1415206212980, 10001, 555],
                [1415197271586, 10001, 23]
            ],
            'name': 'foo',
            'columns': [
                'time',
                'sequence_number',
                'val'
            ]
        }
        example_response = \
            json.dumps(example_object) + json.dumps(example_object)

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/db/db/series",
                text=example_response
            )

            self.assertListEqual(
                cli.query('select * from foo', chunked=True),
                [example_object, example_object]
            )

    @raises(Exception)
    def test_query_fail(self):
        with _mocked_session(self.cli, 'get', 401):
            self.cli.query('select column_one from foo;')

    def test_create_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text='{"results":[{}]}'
            )
            self.cli.create_database('new_db')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'create database new_db'
            )

    @raises(Exception)
    def test_create_database_fails(self):
        with _mocked_session(self.cli, 'post', 401):
            self.cli.create_database('new_db')

    def test_drop_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text='{"results":[{}]}'
            )
            self.cli.drop_database('new_db')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'drop database new_db'
            )

    @raises(Exception)
    def test_drop_database_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'delete', 401):
            cli.drop_database('old_db')

    def test_get_list_database(self):
        data = {'results': [
            {'series': [
                {'name': 'databases',
                 'values': [
                     ['new_db_1'],
                     ['new_db_2']],
                 'columns': ['name']}]}
        ]}

        with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
            self.assertListEqual(
                self.cli.get_list_database(),
                [{'name': 'new_db_1'}, {'name': 'new_db_2'}]
            )

    @raises(Exception)
    def test_get_list_database_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 401):
            cli.get_list_database()

    def test_get_list_series(self):
        example_response = \
            '{"results": [{"series": [{"name": "cpu_load_short", "columns": ' \
            '["_id", "host", "region"], "values": ' \
            '[[1, "server01", "us-west"]]}]}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://*****:*****@mock.patch('requests.Session.request')
    def test_request_retry(self, mock_request):
        """Tests that two connection errors will be handled"""

        class CustomMock(object):
            i = 0

            def connection_error(self, *args, **kwargs):
                self.i += 1

                if self.i < 3:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 200
                    return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database='db')
        cli.write_points(
            self.dummy_points
        )

    @mock.patch('requests.Session.request')
    def test_request_retry_raises(self, mock_request):
        """Tests that three connection errors will not be handled"""

        class CustomMock(object):
            i = 0

            def connection_error(self, *args, **kwargs):
                self.i += 1

                if self.i < 4:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 200
                    return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database='db')

        with self.assertRaises(requests.exceptions.ConnectionError):
            cli.write_points(self.dummy_points)
Exemplo n.º 37
0
class Tourbillon(object):

    """create a tourbillon instance reading its configuration from config_file
    """

    def __init__(self, config_file):
        super(Tourbillon, self).__init__()

        self._aio_run_event = asyncio.Event()
        self._thr_run_event = threading.Event()
        self._loop = asyncio.get_event_loop()
        self._tasks = []
        self._pluginconfig = {}

        with open(config_file, 'r') as f:
            self._config = json.load(f)

        formatter = logging.Formatter(fmt=self._config['log_format'])
        handler = logging.handlers.WatchedFileHandler(
            self._config['log_file'])
        handler.setFormatter(formatter)
        handler.setLevel(getattr(logging, self._config['log_level']))
        logging.getLogger().addHandler(handler)
        logging.getLogger().setLevel(
            getattr(logging, self._config['log_level']))

        logger.info('Use config file: %s', config_file)

        self._load_plugins_config(os.path.abspath(
                                  os.path.dirname(config_file)))

        self._influxdb = InfluxDBClient(**self._config['database'])
        self._databases = [i['name']
                           for i in self._influxdb.get_list_database()]
        print(self._databases)

    def _load_plugins_config(self, tourbillon_conf_dir):
        t = Template(self._config['plugins_conf_dir'])
        plugin_conf_dir = t.safe_substitute(
            tourbillon_conf_dir=tourbillon_conf_dir)
        logger.info('Plugin config dir: %s', plugin_conf_dir)
        config_files = glob.glob(os.path.join(plugin_conf_dir,
                                              '*.conf'))
        for file_name in config_files:
            k = os.path.splitext(os.path.basename(file_name))[0]
            with open(file_name, 'r') as f:
                try:
                    self._pluginconfig[k] = json.load(f)
                except:
                    logger.exception('error loading config file %s', file_name)

    @property
    def config(self):
        """returns a dictionary that contains configuration for each enabled
        plugin"""

        return self._pluginconfig

    @property
    def run_event(self):
        """get the asyncio.Event or threading.Event"""

        cf = inspect.currentframe()
        caller_name = cf.f_back.f_code.co_name
        caller = cf.f_back.f_globals[caller_name]
        if asyncio.iscoroutinefunction(caller) or asyncio.iscoroutine(caller):
            return self._aio_run_event
        else:
            return self._thr_run_event

    def push(self, points, database):
        """write syncronously datapoints to InfluxDB"""

        self._influxdb.write_points(points, database=database)

    def create_database(self, name, duration=None, replication=None,
                        default=True):
        """create syncronously a database and a retention policy
        in the InfluxDB instance"""

        if name not in self._databases:
            self._influxdb.create_database(name)
            logger.info('database %s created successfully', name)

        if duration and replication:
            rps = self._influxdb.get_list_retention_policies(name)
            tourbillon_rp_name = '%s_tourbillon' % name
            duration_in_hours = _to_hours(duration)
            logger.debug('duration_in_hours: %s', duration_in_hours)
            for rp in rps:
                if rp['name'] == tourbillon_rp_name:
                    logger.debug('current rp config: %s', rp)
                    if rp['duration'] == duration_in_hours and \
                            rp['replicaN'] == int(replication) and \
                            rp['default'] == default:
                        logger.debug('the retention policy %s already exists',
                                     tourbillon_rp_name)
                        return
                    self._influxdb.alter_retention_policy(
                        tourbillon_rp_name,
                        database=name,
                        duration=duration,
                        replication=replication,
                        default=default
                    )
                    logger.info('retention policy %s altered successfully',
                                tourbillon_rp_name)
                    return
            self._influxdb.create_retention_policy(
                tourbillon_rp_name,
                database=name,
                duration=duration,
                replication=replication,
                default=default
            )
            logger.info('retention policy %s created successfully',
                        tourbillon_rp_name)

    @asyncio.coroutine
    def async_push(self, points, database):
        """write asyncronously datapoints to InfluxDB"""

        yield From(self._loop.run_in_executor(
            None,
            functools.partial(self._influxdb.write_points,
                              points, database=database)))

    @asyncio.coroutine
    def async_create_database(self, name, duration=None, replication=None,
                              default=True):
        """create asyncronously a database and a retention policy
        in the InfluxDB instance"""

        if name not in self._databases:
            yield From(self._loop.run_in_executor(
                None,
                self._influxdb.create_database,
                name))
            logger.info('database %s created successfully', name)

        if duration and replication:
            rps = yield From(self._loop.run_in_executor(
                None,
                self._influxdb.get_list_retention_policies,
                name))
            tourbillon_rp_name = '%s_tourbillon' % name
            duration_in_hours = _to_hours(duration)
            logger.debug('duration_in_hours: %s', duration_in_hours)
            for rp in rps:
                if rp['name'] == tourbillon_rp_name:
                    logger.debug('current rp: %s', rp)
                    if rp['duration'] == duration_in_hours and \
                            rp['replicaN'] == int(replication) and \
                            rp['default'] == default:
                        logger.debug('the retention policy %s already exists',
                                     tourbillon_rp_name)
                        return
                    yield From(self._loop.run_in_executor(
                        None,
                        functools.partial(
                            self._influxdb.alter_retention_policy,
                            tourbillon_rp_name,
                            database=name,
                            duration=duration,
                            replication=replication,
                            default=default
                        )
                    ))
                    logger.info('retention policy %s altered successfully',
                                tourbillon_rp_name)
                    return
            yield From(self._loop.run_in_executor(
                None,
                functools.partial(
                    self._influxdb.create_retention_policy,
                    tourbillon_rp_name,
                    database=name,
                    duration=duration,
                    replication=replication,
                    default=default
                )
            ))
            logger.info('retention policy %s created successfully',
                        tourbillon_rp_name)

    def _load_tasks(self):
        if 'plugins' not in self._config:
            logger.warn('no plugin configured.')
            return
        plugins = self._config['plugins']
        thread_targets_count = 0
        for module_name, functions in plugins.items():
            logger.debug('search for tasks in module %s', module_name)
            module = import_module(module_name)
            logger.debug('module %s successfully imported', module_name)
            for task_name in functions:
                logger.debug('checking declared task %s', task_name)
                if hasattr(module, task_name):
                    candidate_task = getattr(module, task_name)
                    task_type = ''
                    if asyncio.iscoroutinefunction(candidate_task):
                        self._tasks.append(asyncio.async(
                            candidate_task(self)))
                        task_type = 'coroutine'
                    else:
                        self._tasks.append(self._loop.run_in_executor(
                            None,
                            candidate_task,
                            self))
                        task_type = 'function'
                        thread_targets_count += 1
                    logger.info('task found: %s.%s, type=%s',
                                module_name, task_name, task_type)
        if thread_targets_count > 0:
            self._loop.set_default_executor(ThreadPoolExecutor(
                max_workers=thread_targets_count + 2)
            )
        logger.debug('configured tasks: %s', self._tasks)

    def stop(self):
        """stop the tourbillon agent"""

        self._loop.remove_signal_handler(signal.SIGINT)
        self._loop.remove_signal_handler(signal.SIGTERM)
        logger.info('shutting down tourbillon...')
        self._aio_run_event.clear()
        self._thr_run_event.clear()

    def run(self):
        """start the tourbillon agent"""

        logger.info('starting tourbillon...')
        self._loop.add_signal_handler(signal.SIGINT, self.stop)
        self._loop.add_signal_handler(signal.SIGTERM, self.stop)
        self._load_tasks()
        self._aio_run_event.set()
        self._thr_run_event.set()
        logger.info('tourbillon started')
        self._loop.run_until_complete(asyncio.wait(self._tasks))
        logger.info('tourbillon shutdown completed')
Exemplo n.º 38
0
def main(host='localhost', port=8086):
    user = '******'
    password = '******'
    dbname = 'example'
    dbuser = '******'
    dbuser_password = '******'
    query = 'select value from cpu_load_short;'
    """
    json_body = [
        {
            "measurement": "cpu_load_short",
            "tags": {
                "host": "server01",
                "region": "us-west"
            },
            "time": "2009-11-10T23:00:00Z",
            "fields": {
                "value": 0.64
            }
        }
    ]
    """
    """
    json_body = [
         {'fields': {
                    'value': 0.0
                    },
          'tags': {
                    'source': 'wago',
                     'site_name': 'Nyange'
                    },
         'time': '2015-11-05T07:39:41Z',
         'measurement': 'rTotalApparentPower'}
    ]
    """
    json_body =  [{'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'rTotalActiveEnergy'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'rTotalReactiveEnergy'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'DCPower2'}, {'fields': {'value': 2.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'DCPower1'}, {'fields': {'value': 9.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'DCVolt1'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'arVoltage_L_N2'}, {'fields': {'value': 9.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'arVoltage_L_N1'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'DCVolt2'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'arVoltage_L_N3'}, {'fields': {'value': 0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'xRotatingField'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'rTotalApparentEnergy'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'arFrequency3'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'arFrequency2'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'arFrequency1'}, {'fields': {'value': datetime.datetime(2015, 11, 5, 7, 39, 41)}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'timestamp'}, {'fields': {'value': 3.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'rTotalActivePower'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'arCurrent2'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'arCurrent3'}, {'fields': {'value': 9.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'arCurrent1'}, {'fields': {'value': 1000L}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'iSiteCode'}, {'fields': {'value': 3.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'rTotalReactivePower'}, {'fields': {'value': 0L}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'trans'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'DCCurr2'}, {'fields': {'value': 10.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'DCCurr1'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'rTotalPowerFactorPF'}, {'fields': {'value': 0.0}, 'tags': {'source': 'wago', 'site_name': 'Nyange'}, 'time': '2015-11-05T07:39:41Z', 'measurement': 'rTotalApparentPower'}]


    client = InfluxDBClient(host, port, user, password, dbname)

    print("Create database: " + dbname)
    client.create_database(dbname)

    print("Create a retention policy")
    client.create_retention_policy('awesome_policy', '3d', 3, default=True)

    print("Switch user: "******"Write points: {0}".format(json_body))
    client.write_points(json_body)

    print("Queying data: " + query)
    result = client.query(query)

    print("Result: {0}".format(result))

    print("Switch user: "******"Drop database: " + dbname)
    client.drop_database(dbname)
Exemplo n.º 39
0
    def send_to_influx(self):
        if not self.config['hostname']:
            self.log.error("No Influx server configured, please set one using: "
                           "ceph influx config-set hostname <hostname>")

            self.set_health_checks({
                'MGR_INFLUX_NO_SERVER': {
                    'severity': 'warning',
                    'summary': 'No InfluxDB server configured',
                    'detail': ['Configuration option hostname not set']
                }
            })
            return

        # If influx server has authentication turned off then
        # missing username/password is valid.
        self.log.debug("Sending data to Influx host: %s",
                       self.config['hostname'])
        client = InfluxDBClient(self.config['hostname'], self.config['port'],
                                self.config['username'],
                                self.config['password'],
                                self.config['database'],
                                self.config['ssl'],
                                self.config['verify_ssl'])

        # using influx client get_list_database requires admin privs,
        # instead we'll catch the not found exception and inform the user if
        # db can not be created
        try:
            df_stats, pools = self.get_df_stats()
            client.write_points(df_stats, 'ms')
            client.write_points(self.get_daemon_stats(), 'ms')
            client.write_points(self.get_pg_summary(pools))
            self.set_health_checks(dict())
        except ConnectionError as e:
            self.log.exception("Failed to connect to Influx host %s:%d",
                               self.config['hostname'], self.config['port'])
            self.set_health_checks({
                'MGR_INFLUX_SEND_FAILED': {
                    'severity': 'warning',
                    'summary': 'Failed to send data to InfluxDB server at %s:%d'
                               ' due to an connection error'
                               % (self.config['hostname'], self.config['port']),
                    'detail': [str(e)]
                }
            })
        except InfluxDBClientError as e:
            if e.code == 404:
                self.log.info("Database '%s' not found, trying to create "
                              "(requires admin privs).  You can also create "
                              "manually and grant write privs to user "
                              "'%s'", self.config['database'],
                              self.config['username'])
                client.create_database(self.config['database'])
                client.create_retention_policy(name='8_weeks', duration='8w',
                                               replication='1', default=True,
                                               database=self.config['database'])
            else:
                self.set_health_checks({
                    'MGR_INFLUX_SEND_FAILED': {
                        'severity': 'warning',
                        'summary': 'Failed to send data to InfluxDB',
                        'detail': [str(e)]
                    }
                })
                raise
Exemplo n.º 40
0
class InfluxDBExporter(object):
    def __init__(self, host, port, 
                 dbname, user, pwd, 
                 db_management, geohash={}):
        self.host = host
        self.port = port
        self.dbname = dbname
        self.user = user
        self.pwd = pwd
        self.client = None
        self.geohash = geohash

        self.NB_MAX_TRY_REQUEST = 10  # nb of rqt error before aborting
        # self.TIME_MAX = 1*60.*60.

        # add one item by influxdb line
        self.data = []
        self.nb_data_max = 40000     # no more than 5000 (cf. influxdb doc.)

        self.client = InfluxDBClient(host=host, port=port, database=dbname)

        if db_management:
            self.prepare_db(db_management)

    def prepare_db(self, db_management):
        if db_management['drop_db']:
            self.drop_db()
        self.create_db()
        self.set_retention_policies(db_management['retention'])

    def drop_db(self, dbname=None):
        if not dbname:
            dbname = self.dbname
        logger.info("Drop %s database." % dbname)
        try:
            self.client.drop_database(dbname)
        except:
            logger.info("Can't drop %s database (not existing yet ?)." 
                        % dbname)

    def create_db(self, dbname=None):
        if not dbname:
            dbname = self.dbname
        logger.info("Open/Create %s database." % dbname)
        # self.client.create_database(dbname, if_not_exists=True)
        self.client.create_database(dbname)
        self.client.switch_database(dbname)

    def set_retention_policies(self, days, dbname=None):
        if not dbname:
            dbname = self.dbname
        name = "in_days"
        logger.info("Setting %s retention policy on %s database, keep=%d days."
                    % (name, dbname, days))
        try:
            self.client.create_retention_policy(name, 
                                                duration="%dd" % days, 
                                                replication="1",
                                                database=dbname, default=True)
        except:
            self.client.alter_retention_policy(name,
                                               database=dbname,
                                               duration="%dd" % days,
                                               replication=1,
                                               default=True)

    def make_stats(self, now):
        t = timegm(now.utctimetuple()) * 1e9 \
            + now.microsecond * 1e3
        t_str = str(int(t))
        s = "queue,type=producer size=%d " % q.qsize() + t_str
        self.data.append(s)
        s = "queue,type=consumer size=%d " % len(self.data) + t_str
        self.data.append(s)

    def make_line_latency(self, channel, starttime, latency_value):
        timestamp = starttime.datetime
        t = timegm(timestamp.utctimetuple()) * 1e9 \
            + timestamp.microsecond * 1e3
        try:
            geohash_tag = ",geohash=%s" % self.geohash[channel]
        except:
            geohash_tag = ""

        l = "latency,channel=" + channel + \
            geohash_tag + \
            " value=" + "%.1f " % latency_value + \
            str(int(t))
        self.data.append(l)

    def make_line_count(self, channel, starttime, delta, data):
        cc = "count,channel=" + channel
        for i, v in enumerate(data):
            timestamp = starttime + i*delta
            t = timegm(timestamp.utctimetuple()) * 1e9 \
                + timestamp.microsecond * 1e3
            c = cc + " value=" + "%.2f " % v + str(int(t))
            self.data.append(c)

    def send_points(self, debug=False):
        """Send points to influxsb

        to speed-up things make our own "data line"
        (bypass influxdb write_points python api)
        """
        data = '\n'.join(self.data[:self.nb_data_max])
        del self.data[:self.nb_data_max]

        headers = self.client._headers
        headers['Content-type'] = 'application/octet-stream'

        nb_try = 0
        while True:
            nb_try += 1
            try:
                self.client.request(url="write", 
                                    method='POST',
                                    params={'db': self.client._database},
                                    data=data,
                                    expected_response_code=204,
                                    headers=headers
                                    )
            except (InfluxDBServerError, 
                    InfluxDBClientError, 
                    requests.exceptions.ConnectionError) as e:
                if nb_try > self.NB_MAX_TRY_REQUEST:
                    raise e
                else:
                    logger.error("Request failed (%s)" % e)
                    logger.error("retrying (%d/%d)" %
                                 (nb_try, self.NB_MAX_TRY_REQUEST))
                    continue
            break

    def manage_data(self, trace):
        """Return True is data have been pushed to influxdb"""
        delta = trace.stats['delta']
        starttime = trace.stats['starttime']
        channel = trace.get_id()
        now = datetime.utcnow()
        nbsamples = len(trace.data)
        last_sample_time = starttime + delta * (nbsamples - 1)

        l = UTCDateTime(now) - last_sample_time

        lock.acquire()
        last_packet_time[channel] = last_sample_time
        lock.release()

        # do not process 'old' data
        # if l > self.TIME_MAX:
        #     return

        self.make_line_count(channel,
                             starttime,
                             delta,
                             trace.data)

        self.make_line_latency(channel,
                               starttime + delta * (nbsamples - 1),
                               l)

        # send data to influxdb if buffer is filled enough
        if len(self.data) > self.nb_data_max:
            now = datetime.utcnow()
            self.make_stats(now)
            logger.debug("Data sent")
            try:
                self.send_points(debug=False)
            except InfluxDBServerError as e:
                self.force_shutdown(e)
            else:
                return True
        else:
            return False

    def run(self):
        """Run unless shutdown signal is received.  """

        # time in seconds
        timeout = 0.1
        max_cumulated_wait_time = 15
        wait_time = 0

        while True:
            try:
                trace = q.get(timeout=timeout)
            except Queue.Empty:
                # process queue before shutdown
                if q.empty() and shutdown_event.isSet():
                    logger.info("%s thread has catched *shutdown_event*" %
                                self.__class__.__name__)
                    sys.exit(0)

                wait_time += timeout
                if wait_time > max_cumulated_wait_time:
                    # force data flush to influxdb
                    # even if data block is not completed
                    logger.info('Timer reached (%ds)' % max_cumulated_wait_time
                                + '. Force data flush to influxdb '
                                + '(bsize=%d/%d)!'
                                % (len(self.data), self.nb_data_max))
                    now = datetime.utcnow()
                    self.make_stats(now)
                    try:
                        self.send_points()
                    except BaseException as e:
                        self.force_shutdown(e)
                    wait_time = 0
            else:
                data_pushed = self.manage_data(trace)
                q.task_done()
                if data_pushed:
                    wait_time = 0
Exemplo n.º 41
0
class TestInfluxDBClient(unittest.TestCase):

    def setUp(self):
        # By default, raise exceptions on warnings
        warnings.simplefilter('error', FutureWarning)

        self.cli = InfluxDBClient('localhost', 8086, 'username', 'password')
        self.dummy_points = [
            {
                "measurement": "cpu_load_short",
                "tags": {
                    "host": "server01",
                    "region": "us-west"
                },
                "time": "2009-11-10T23:00:00.123456Z",
                "fields": {
                    "value": 0.64
                }
            }
        ]

        self.dsn_string = 'influxdb://*****:*****@host:1886/db'

    def test_scheme(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        self.assertEqual('http://host:8086', cli._baseurl)

        cli = InfluxDBClient(
            'host', 8086, 'username', 'password', 'database', ssl=True
        )
        self.assertEqual('https://host:8086', cli._baseurl)

    def test_dsn(self):
        cli = InfluxDBClient.from_DSN(self.dsn_string)
        self.assertEqual('http://host:1886', cli._baseurl)
        self.assertEqual('uSr', cli._username)
        self.assertEqual('pWd', cli._password)
        self.assertEqual('db', cli._database)
        self.assertFalse(cli.use_udp)

        cli = InfluxDBClient.from_DSN('udp+' + self.dsn_string)
        self.assertTrue(cli.use_udp)

        cli = InfluxDBClient.from_DSN('https+' + self.dsn_string)
        self.assertEqual('https://host:1886', cli._baseurl)

        cli = InfluxDBClient.from_DSN('https+' + self.dsn_string,
                                      **{'ssl': False})
        self.assertEqual('http://host:1886', cli._baseurl)

    def test_switch_database(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        cli.switch_database('another_database')
        self.assertEqual('another_database', cli._database)

    def test_switch_user(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        cli.switch_user('another_username', 'another_password')
        self.assertEqual('another_username', cli._username)
        self.assertEqual('another_password', cli._password)

    def test_write(self):
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/write",
                status_code=204
            )
            cli = InfluxDBClient(database='db')
            cli.write(
                {"database": "mydb",
                 "retentionPolicy": "mypolicy",
                 "points": [{"measurement": "cpu_load_short",
                             "tags": {"host": "server01",
                                      "region": "us-west"},
                             "time": "2009-11-10T23:00:00Z",
                             "fields": {"value": 0.64}}]}
            )

            self.assertEqual(
                m.last_request.body,
                b"cpu_load_short,host=server01,region=us-west "
                b"value=0.64 1257894000000000000\n",
            )

    def test_write_points(self):
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/write",
                status_code=204
            )

            cli = InfluxDBClient(database='db')
            cli.write_points(
                self.dummy_points,
            )
            self.assertEqual(
                'cpu_load_short,host=server01,region=us-west '
                'value=0.64 1257894000123456000\n',
                m.last_request.body.decode('utf-8'),
            )

    def test_write_points_toplevel_attributes(self):
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/write",
                status_code=204
            )

            cli = InfluxDBClient(database='db')
            cli.write_points(
                self.dummy_points,
                database='testdb',
                tags={"tag": "hello"},
                retention_policy="somepolicy"
            )
            self.assertEqual(
                'cpu_load_short,host=server01,region=us-west,tag=hello '
                'value=0.64 1257894000123456000\n',
                m.last_request.body.decode('utf-8'),
            )

    def test_write_points_batch(self):
        dummy_points = [
            {"measurement": "cpu_usage", "tags": {"unit": "percent"},
             "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}},
            {"measurement": "network", "tags": {"direction": "in"},
             "time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}},
            {"measurement": "network", "tags": {"direction": "out"},
             "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}}
        ]
        expected_last_body = (
            "network,direction=out,host=server01,region=us-west "
            "value=12.0 1257894000000000000\n"
        )

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST,
                           "http://localhost:8086/write",
                           status_code=204)
            cli = InfluxDBClient(database='db')
            cli.write_points(points=dummy_points,
                             database='db',
                             tags={"host": "server01",
                                   "region": "us-west"},
                             batch_size=2)
        self.assertEqual(m.call_count, 2)
        self.assertEqual(expected_last_body,
                         m.last_request.body.decode('utf-8'))

    def test_write_points_udp(self):
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        port = random.randint(4000, 8000)
        s.bind(('0.0.0.0', port))

        cli = InfluxDBClient(
            'localhost', 8086, 'root', 'root',
            'test', use_udp=True, udp_port=port
        )
        cli.write_points(self.dummy_points)

        received_data, addr = s.recvfrom(1024)

        self.assertEqual(
            'cpu_load_short,host=server01,region=us-west '
            'value=0.64 1257894000123456000\n',
            received_data.decode()
        )

    def test_write_bad_precision_udp(self):
        cli = InfluxDBClient(
            'localhost', 8086, 'root', 'root',
            'test', use_udp=True, udp_port=4444
        )

        with self.assertRaisesRegexp(
                Exception,
                "InfluxDB only supports seconds precision for udp writes"
        ):
            cli.write_points(
                self.dummy_points,
                time_precision='ms'
            )

    @raises(Exception)
    def test_write_points_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 500):
            cli.write_points([])

    def test_write_points_with_precision(self):
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/write",
                status_code=204
            )

            cli = InfluxDBClient(database='db')

            cli.write_points(self.dummy_points, time_precision='n')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123456000\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='u')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123456\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='ms')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='s')
            self.assertEqual(
                b"cpu_load_short,host=server01,region=us-west "
                b"value=0.64 1257894000\n",
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='m')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 20964900\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='h')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 349415\n',
                m.last_request.body,
            )

    def test_write_points_bad_precision(self):
        cli = InfluxDBClient()
        with self.assertRaisesRegexp(
            Exception,
            "Invalid time precision is given. "
            "\(use 'n', 'u', 'ms', 's', 'm' or 'h'\)"
        ):
            cli.write_points(
                self.dummy_points,
                time_precision='g'
            )

    @raises(Exception)
    def test_write_points_with_precision_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 500):
            cli.write_points_with_precision([])

    def test_query(self):
        example_response = (
            '{"results": [{"series": [{"measurement": "sdfsdfsdf", '
            '"columns": ["time", "value"], "values": '
            '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": '
            '[{"measurement": "cpu_load_short", "columns": ["time", "value"], '
            '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}'
        )

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://*****:*****@unittest.skip('Not implemented for 0.9')
    def test_query_chunked(self):
        cli = InfluxDBClient(database='db')
        example_object = {
            'points': [
                [1415206250119, 40001, 667],
                [1415206244555, 30001, 7],
                [1415206228241, 20001, 788],
                [1415206212980, 10001, 555],
                [1415197271586, 10001, 23]
            ],
            'measurement': 'foo',
            'columns': [
                'time',
                'sequence_number',
                'val'
            ]
        }
        example_response = \
            json.dumps(example_object) + json.dumps(example_object)

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/db/db/series",
                text=example_response
            )

            self.assertListEqual(
                cli.query('select * from foo', chunked=True),
                [example_object, example_object]
            )

    @raises(Exception)
    def test_query_fail(self):
        with _mocked_session(self.cli, 'get', 401):
            self.cli.query('select column_one from foo;')

    def test_create_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text='{"results":[{}]}'
            )
            self.cli.create_database('new_db')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'create database new_db'
            )

    @raises(Exception)
    def test_create_database_fails(self):
        with _mocked_session(self.cli, 'post', 401):
            self.cli.create_database('new_db')

    def test_drop_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text='{"results":[{}]}'
            )
            self.cli.drop_database('new_db')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'drop database new_db'
            )

    @raises(Exception)
    def test_drop_database_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'delete', 401):
            cli.drop_database('old_db')

    def test_get_list_database(self):
        data = {'results': [
            {'series': [
                {'name': 'databases',
                 'values': [
                     ['new_db_1'],
                     ['new_db_2']],
                 'columns': ['name']}]}
        ]}

        with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
            self.assertListEqual(
                self.cli.get_list_database(),
                [{'name': 'new_db_1'}, {'name': 'new_db_2'}]
            )

    @raises(Exception)
    def test_get_list_database_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 401):
            cli.get_list_database()

    def test_get_list_series(self):
        example_response = \
            '{"results": [{"series": [{"name": "cpu_load_short", "columns": ' \
            '["_id", "host", "region"], "values": ' \
            '[[1, "server01", "us-west"]]}]}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )

            self.assertListEqual(
                self.cli.get_list_series(),
                [{'name': 'cpu_load_short',
                  'tags': [
                      {'host': 'server01', '_id': 1, 'region': 'us-west'}
                  ]}]
            )

    def test_create_retention_policy_default(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.create_retention_policy(
                'somename', '1d', 4, default=True, database='db'
            )

            self.assertEqual(
                m.last_request.qs['q'][0],
                'create retention policy somename on '
                'db duration 1d replication 4 default'
            )

    def test_create_retention_policy(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.create_retention_policy(
                'somename', '1d', 4, database='db'
            )

            self.assertEqual(
                m.last_request.qs['q'][0],
                'create retention policy somename on '
                'db duration 1d replication 4'
            )

    def test_alter_retention_policy(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )
            # Test alter duration
            self.cli.alter_retention_policy('somename', 'db',
                                            duration='4d')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy somename on db duration 4d'
            )
            # Test alter replication
            self.cli.alter_retention_policy('somename', 'db',
                                            replication=4)
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy somename on db replication 4'
            )

            # Test alter default
            self.cli.alter_retention_policy('somename', 'db',
                                            default=True)
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy somename on db default'
            )

    @raises(Exception)
    def test_alter_retention_policy_invalid(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.alter_retention_policy('somename', 'db')

    def test_get_list_retention_policies(self):
        example_response = \
            '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\
            ' "columns": ["name", "duration", "replicaN"]}]}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://*****:*****@mock.patch('requests.Session.request')
    def test_request_retry(self, mock_request):
        """Tests that two connection errors will be handled"""

        class CustomMock(object):
            i = 0

            def connection_error(self, *args, **kwargs):
                self.i += 1

                if self.i < 3:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 204
                    return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database='db')
        cli.write_points(
            self.dummy_points
        )

    @mock.patch('requests.Session.request')
    def test_request_retry_raises(self, mock_request):
        """Tests that three connection errors will not be handled"""

        class CustomMock(object):
            i = 0

            def connection_error(self, *args, **kwargs):
                self.i += 1

                if self.i < 4:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 200
                    return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database='db')

        with self.assertRaises(requests.exceptions.ConnectionError):
            cli.write_points(self.dummy_points)

    def test_get_list_users(self):
        example_response = (
            '{"results":[{"series":[{"columns":["user","admin"],'
            '"values":[["test",false]]}]}]}'
        )

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )

            self.assertListEqual(
                self.cli.get_list_users(),
                [{'user': '******', 'admin': False}]
            )

    def test_get_list_users_empty(self):
        example_response = (
            '{"results":[{"series":[{"columns":["user","admin"]}]}]}'
        )
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )

            self.assertListEqual(self.cli.get_list_users(), [])

    def test_revoke_admin_privileges(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.revoke_admin_privileges('test')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'revoke all privileges from test'
            )

    @raises(Exception)
    def test_revoke_admin_privileges_invalid(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.revoke_admin_privileges('')

    def test_grant_privilege(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.grant_privilege('read', 'testdb', 'test')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'grant read on testdb to test'
            )

    @raises(Exception)
    def test_grant_privilege_invalid(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.grant_privilege('', 'testdb', 'test')

    def test_revoke_privilege(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.revoke_privilege('read', 'testdb', 'test')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'revoke read on testdb from test'
            )

    @raises(Exception)
    def test_revoke_privilege_invalid(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.revoke_privilege('', 'testdb', 'test')
Exemplo n.º 42
0
            "measurement" : "speedtest_metrics" ,
            "tags": {
                "host": "iron.lan",
                "region": "home"
            },
            "fields": {
                "ping": _ping,
                "download" : _download,
                "upload": _upload
            }
        }
    ]

    client = InfluxDBClient(
        os.environ['INFLUXDB_HOST'],
        os.environ['INFLUXDB_PORT'],
        os.environ['INFLUXDB_USER'],
        os.environ['INFLUXDB_PASSWORD'],
        os.environ['INFLUXDB_DATABASE']
    )

    client.create_database(os.environ['INFLUXDB_DATABASE'])
    client.create_retention_policy("speedtest_policy", "30d",1, default=True)
    
    # writing points
    print "===> writing points"
    client.write_points(json_body)

else:
    print "failed to retrieve data"