Esempio n. 1
1
class TestInfluxDBClient(unittest.TestCase):
    """Set up the TestInfluxDBClient object."""

    def setUp(self):
        """Initialize an instance of TestInfluxDBClient object."""
        # By default, raise exceptions on warnings
        warnings.simplefilter('error', FutureWarning)

        self.cli = InfluxDBClient('localhost', 8086, 'username', 'password')
        self.dummy_points = [
            {
                "measurement": "cpu_load_short",
                "tags": {
                    "host": "server01",
                    "region": "us-west"
                },
                "time": "2009-11-10T23:00:00.123456Z",
                "fields": {
                    "value": 0.64
                }
            }
        ]

        self.dsn_string = 'influxdb://*****:*****@my.host.fr:1886/db'

    def test_scheme(self):
        """Set up the test schema for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        self.assertEqual('http://host:8086', cli._baseurl)

        cli = InfluxDBClient(
            'host', 8086, 'username', 'password', 'database', ssl=True
        )
        self.assertEqual('https://host:8086', cli._baseurl)

        cli = InfluxDBClient(
            'host', 8086, 'username', 'password', 'database', ssl=True,
            path="somepath"
        )
        self.assertEqual('https://host:8086/somepath', cli._baseurl)

        cli = InfluxDBClient(
            'host', 8086, 'username', 'password', 'database', ssl=True,
            path=None
        )
        self.assertEqual('https://host:8086', cli._baseurl)

        cli = InfluxDBClient(
            'host', 8086, 'username', 'password', 'database', ssl=True,
            path="/somepath"
        )
        self.assertEqual('https://host:8086/somepath', cli._baseurl)

    def test_dsn(self):
        """Set up the test datasource name for TestInfluxDBClient object."""
        cli = InfluxDBClient.from_dsn('influxdb://192.168.0.1:1886')
        self.assertEqual('http://192.168.0.1:1886', cli._baseurl)

        cli = InfluxDBClient.from_dsn(self.dsn_string)
        self.assertEqual('http://my.host.fr:1886', cli._baseurl)
        self.assertEqual('uSr', cli._username)
        self.assertEqual('pWd', cli._password)
        self.assertEqual('db', cli._database)
        self.assertFalse(cli._use_udp)

        cli = InfluxDBClient.from_dsn('udp+' + self.dsn_string)
        self.assertTrue(cli._use_udp)

        cli = InfluxDBClient.from_dsn('https+' + self.dsn_string)
        self.assertEqual('https://my.host.fr:1886', cli._baseurl)

        cli = InfluxDBClient.from_dsn('https+' + self.dsn_string,
                                      **{'ssl': False})
        self.assertEqual('http://my.host.fr:1886', cli._baseurl)

    def test_switch_database(self):
        """Test switch database in TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        cli.switch_database('another_database')
        self.assertEqual('another_database', cli._database)

    def test_switch_user(self):
        """Test switch user in TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        cli.switch_user('another_username', 'another_password')
        self.assertEqual('another_username', cli._username)
        self.assertEqual('another_password', cli._password)

    def test_write(self):
        """Test write in TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/write",
                status_code=204
            )
            cli = InfluxDBClient(database='db')
            cli.write(
                {"database": "mydb",
                 "retentionPolicy": "mypolicy",
                 "points": [{"measurement": "cpu_load_short",
                             "tags": {"host": "server01",
                                      "region": "us-west"},
                             "time": "2009-11-10T23:00:00Z",
                             "fields": {"value": 0.64}}]}
            )

            self.assertEqual(
                m.last_request.body,
                b"cpu_load_short,host=server01,region=us-west "
                b"value=0.64 1257894000000000000\n",
            )

    def test_write_points(self):
        """Test write points for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/write",
                status_code=204
            )

            cli = InfluxDBClient(database='db')
            cli.write_points(
                self.dummy_points,
            )
            self.assertEqual(
                'cpu_load_short,host=server01,region=us-west '
                'value=0.64 1257894000123456000\n',
                m.last_request.body.decode('utf-8'),
            )

    def test_write_points_toplevel_attributes(self):
        """Test write points attrs for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/write",
                status_code=204
            )

            cli = InfluxDBClient(database='db')
            cli.write_points(
                self.dummy_points,
                database='testdb',
                tags={"tag": "hello"},
                retention_policy="somepolicy"
            )
            self.assertEqual(
                'cpu_load_short,host=server01,region=us-west,tag=hello '
                'value=0.64 1257894000123456000\n',
                m.last_request.body.decode('utf-8'),
            )

    def test_write_points_batch(self):
        """Test write points batch for TestInfluxDBClient object."""
        dummy_points = [
            {"measurement": "cpu_usage", "tags": {"unit": "percent"},
             "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}},
            {"measurement": "network", "tags": {"direction": "in"},
             "time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}},
            {"measurement": "network", "tags": {"direction": "out"},
             "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}}
        ]
        expected_last_body = (
            "network,direction=out,host=server01,region=us-west "
            "value=12.0 1257894000000000000\n"
        )

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST,
                           "http://localhost:8086/write",
                           status_code=204)
            cli = InfluxDBClient(database='db')
            cli.write_points(points=dummy_points,
                             database='db',
                             tags={"host": "server01",
                                   "region": "us-west"},
                             batch_size=2)
        self.assertEqual(m.call_count, 2)
        self.assertEqual(expected_last_body,
                         m.last_request.body.decode('utf-8'))

    def test_write_points_udp(self):
        """Test write points UDP for TestInfluxDBClient object."""
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        port = random.randint(4000, 8000)
        s.bind(('0.0.0.0', port))

        cli = InfluxDBClient(
            'localhost', 8086, 'root', 'root',
            'test', use_udp=True, udp_port=port
        )
        cli.write_points(self.dummy_points)

        received_data, addr = s.recvfrom(1024)

        self.assertEqual(
            'cpu_load_short,host=server01,region=us-west '
            'value=0.64 1257894000123456000\n',
            received_data.decode()
        )

    @raises(Exception)
    def test_write_points_fails(self):
        """Test write points fail for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 500):
            cli.write_points([])

    def test_write_points_with_precision(self):
        """Test write points with precision for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/write",
                status_code=204
            )

            cli = InfluxDBClient(database='db')

            cli.write_points(self.dummy_points, time_precision='n')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123456000\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='u')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123456\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='ms')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='s')
            self.assertEqual(
                b"cpu_load_short,host=server01,region=us-west "
                b"value=0.64 1257894000\n",
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='m')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 20964900\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='h')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 349415\n',
                m.last_request.body,
            )

    def test_write_points_with_precision_udp(self):
        """Test write points with precision for TestInfluxDBClient object."""
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        port = random.randint(4000, 8000)
        s.bind(('0.0.0.0', port))

        cli = InfluxDBClient(
            'localhost', 8086, 'root', 'root',
            'test', use_udp=True, udp_port=port
        )

        cli.write_points(self.dummy_points, time_precision='n')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 1257894000123456000\n',
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='u')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 1257894000123456\n',
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='ms')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 1257894000123\n',
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='s')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b"cpu_load_short,host=server01,region=us-west "
            b"value=0.64 1257894000\n",
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='m')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 20964900\n',
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='h')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 349415\n',
            received_data,
        )

    def test_write_points_bad_precision(self):
        """Test write points w/bad precision TestInfluxDBClient object."""
        cli = InfluxDBClient()
        with self.assertRaisesRegexp(
            Exception,
            "Invalid time precision is given. "
            "\(use 'n', 'u', 'ms', 's', 'm' or 'h'\)"
        ):
            cli.write_points(
                self.dummy_points,
                time_precision='g'
            )

    @raises(Exception)
    def test_write_points_with_precision_fails(self):
        """Test write points w/precision fail for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 500):
            cli.write_points_with_precision([])

    def test_query(self):
        """Test query method for TestInfluxDBClient object."""
        example_response = (
            '{"results": [{"series": [{"measurement": "sdfsdfsdf", '
            '"columns": ["time", "value"], "values": '
            '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": '
            '[{"measurement": "cpu_load_short", "columns": ["time", "value"], '
            '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}'
        )

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://*****:*****@unittest.skip('Not implemented for 0.9')
    def test_query_chunked(self):
        """Test chunked query for TestInfluxDBClient object."""
        cli = InfluxDBClient(database='db')
        example_object = {
            'points': [
                [1415206250119, 40001, 667],
                [1415206244555, 30001, 7],
                [1415206228241, 20001, 788],
                [1415206212980, 10001, 555],
                [1415197271586, 10001, 23]
            ],
            'measurement': 'foo',
            'columns': [
                'time',
                'sequence_number',
                'val'
            ]
        }
        example_response = \
            json.dumps(example_object) + json.dumps(example_object)

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/db/db/series",
                text=example_response
            )

            self.assertListEqual(
                cli.query('select * from foo', chunked=True),
                [example_object, example_object]
            )

    @raises(Exception)
    def test_query_fail(self):
        """Test query failed for TestInfluxDBClient object."""
        with _mocked_session(self.cli, 'get', 401):
            self.cli.query('select column_one from foo;')

    def test_ping(self):
        """Test ping querying InfluxDB version."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/ping",
                status_code=204,
                headers={'X-Influxdb-Version': '1.2.3'}
            )
            version = self.cli.ping()
            self.assertEqual(version, '1.2.3')

    def test_create_database(self):
        """Test create database for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text='{"results":[{}]}'
            )
            self.cli.create_database('new_db')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'create database "new_db"'
            )

    def test_create_numeric_named_database(self):
        """Test create db w/numeric name for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text='{"results":[{}]}'
            )
            self.cli.create_database('123')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'create database "123"'
            )

    @raises(Exception)
    def test_create_database_fails(self):
        """Test create database fail for TestInfluxDBClient object."""
        with _mocked_session(self.cli, 'post', 401):
            self.cli.create_database('new_db')

    def test_drop_database(self):
        """Test drop database for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text='{"results":[{}]}'
            )
            self.cli.drop_database('new_db')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'drop database "new_db"'
            )

    def test_drop_measurement(self):
        """Test drop measurement for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text='{"results":[{}]}'
            )
            self.cli.drop_measurement('new_measurement')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'drop measurement "new_measurement"'
            )

    def test_drop_numeric_named_database(self):
        """Test drop numeric db for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text='{"results":[{}]}'
            )
            self.cli.drop_database('123')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'drop database "123"'
            )

    def test_get_list_database(self):
        """Test get list of databases for TestInfluxDBClient object."""
        data = {'results': [
            {'series': [
                {'name': 'databases',
                 'values': [
                     ['new_db_1'],
                     ['new_db_2']],
                 'columns': ['name']}]}
        ]}

        with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
            self.assertListEqual(
                self.cli.get_list_database(),
                [{'name': 'new_db_1'}, {'name': 'new_db_2'}]
            )

    @raises(Exception)
    def test_get_list_database_fails(self):
        """Test get list of dbs fail for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 401):
            cli.get_list_database()

    def test_get_list_measurements(self):
        """Test get list of measurements for TestInfluxDBClient object."""
        data = {
            "results": [{
                "series": [
                    {"name": "measurements",
                     "columns": ["name"],
                     "values": [["cpu"], ["disk"]
                                ]}]}
            ]
        }

        with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
            self.assertListEqual(
                self.cli.get_list_measurements(),
                [{'name': 'cpu'}, {'name': 'disk'}]
            )

    def test_create_retention_policy_default(self):
        """Test create default ret policy for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.create_retention_policy(
                'somename', '1d', 4, default=True, database='db'
            )

            self.assertEqual(
                m.last_request.qs['q'][0],
                'create retention policy "somename" on '
                '"db" duration 1d replication 4 shard duration 0s default'
            )

    def test_create_retention_policy(self):
        """Test create retention policy for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.create_retention_policy(
                'somename', '1d', 4, database='db'
            )

            self.assertEqual(
                m.last_request.qs['q'][0],
                'create retention policy "somename" on '
                '"db" duration 1d replication 4 shard duration 0s'
            )

    def test_alter_retention_policy(self):
        """Test alter retention policy for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text=example_response
            )
            # Test alter duration
            self.cli.alter_retention_policy('somename', 'db',
                                            duration='4d')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy "somename" on "db" duration 4d'
            )
            # Test alter replication
            self.cli.alter_retention_policy('somename', 'db',
                                            replication=4)
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy "somename" on "db" replication 4'
            )

            # Test alter shard duration
            self.cli.alter_retention_policy('somename', 'db',
                                            shard_duration='1h')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy "somename" on "db" shard duration 1h'
            )

            # Test alter default
            self.cli.alter_retention_policy('somename', 'db',
                                            default=True)
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy "somename" on "db" default'
            )

    @raises(Exception)
    def test_alter_retention_policy_invalid(self):
        """Test invalid alter ret policy for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.alter_retention_policy('somename', 'db')

    def test_drop_retention_policy(self):
        """Test drop retention policy for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.drop_retention_policy('somename', 'db')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'drop retention policy "somename" on "db"'
            )

    @raises(Exception)
    def test_drop_retention_policy_fails(self):
        """Test failed drop ret policy for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'delete', 401):
            cli.drop_retention_policy('default', 'db')

    def test_get_list_retention_policies(self):
        """Test get retention policies for TestInfluxDBClient object."""
        example_response = \
            '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\
            ' "columns": ["name", "duration", "replicaN"]}]}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://*****:*****@mock.patch('requests.Session.request')
    def test_request_retry(self, mock_request):
        """Test that two connection errors will be handled."""
        class CustomMock(object):
            """Create custom mock object for test."""

            def __init__(self):
                self.i = 0

            def connection_error(self, *args, **kwargs):
                """Handle a connection error for the CustomMock object."""
                self.i += 1

                if self.i < 3:
                    raise requests.exceptions.ConnectionError

                r = requests.Response()
                r.status_code = 204
                return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database='db')
        cli.write_points(
            self.dummy_points
        )

    @mock.patch('requests.Session.request')
    def test_request_retry_raises(self, mock_request):
        """Test that three requests errors will not be handled."""
        class CustomMock(object):
            """Create custom mock object for test."""

            def __init__(self):
                self.i = 0

            def connection_error(self, *args, **kwargs):
                """Handle a connection error for the CustomMock object."""
                self.i += 1

                if self.i < 4:
                    raise requests.exceptions.HTTPError
                else:
                    r = requests.Response()
                    r.status_code = 200
                    return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database='db')

        with self.assertRaises(requests.exceptions.HTTPError):
            cli.write_points(self.dummy_points)

    @mock.patch('requests.Session.request')
    def test_random_request_retry(self, mock_request):
        """Test that a random number of connection errors will be handled."""
        class CustomMock(object):
            """Create custom mock object for test."""

            def __init__(self, retries):
                self.i = 0
                self.retries = retries

            def connection_error(self, *args, **kwargs):
                """Handle a connection error for the CustomMock object."""
                self.i += 1

                if self.i < self.retries:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 204
                    return r

        retries = random.randint(1, 5)
        mock_request.side_effect = CustomMock(retries).connection_error

        cli = InfluxDBClient(database='db', retries=retries)
        cli.write_points(self.dummy_points)

    @mock.patch('requests.Session.request')
    def test_random_request_retry_raises(self, mock_request):
        """Test a random number of conn errors plus one will not be handled."""
        class CustomMock(object):
            """Create custom mock object for test."""

            def __init__(self, retries):
                self.i = 0
                self.retries = retries

            def connection_error(self, *args, **kwargs):
                """Handle a connection error for the CustomMock object."""
                self.i += 1

                if self.i < self.retries + 1:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 200
                    return r

        retries = random.randint(1, 5)
        mock_request.side_effect = CustomMock(retries).connection_error

        cli = InfluxDBClient(database='db', retries=retries)

        with self.assertRaises(requests.exceptions.ConnectionError):
            cli.write_points(self.dummy_points)

    def test_get_list_users(self):
        """Test get users for TestInfluxDBClient object."""
        example_response = (
            '{"results":[{"series":[{"columns":["user","admin"],'
            '"values":[["test",false]]}]}]}'
        )

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )

            self.assertListEqual(
                self.cli.get_list_users(),
                [{'user': '******', 'admin': False}]
            )

    def test_get_list_users_empty(self):
        """Test get empty userlist for TestInfluxDBClient object."""
        example_response = (
            '{"results":[{"series":[{"columns":["user","admin"]}]}]}'
        )
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )

            self.assertListEqual(self.cli.get_list_users(), [])

    def test_grant_admin_privileges(self):
        """Test grant admin privs for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.grant_admin_privileges('test')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'grant all privileges to "test"'
            )

    @raises(Exception)
    def test_grant_admin_privileges_invalid(self):
        """Test grant invalid admin privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.grant_admin_privileges('')

    def test_revoke_admin_privileges(self):
        """Test revoke admin privs for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.revoke_admin_privileges('test')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'revoke all privileges from "test"'
            )

    @raises(Exception)
    def test_revoke_admin_privileges_invalid(self):
        """Test revoke invalid admin privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.revoke_admin_privileges('')

    def test_grant_privilege(self):
        """Test grant privs for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.grant_privilege('read', 'testdb', 'test')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'grant read on "testdb" to "test"'
            )

    @raises(Exception)
    def test_grant_privilege_invalid(self):
        """Test grant invalid privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.grant_privilege('', 'testdb', 'test')

    def test_revoke_privilege(self):
        """Test revoke privs for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.revoke_privilege('read', 'testdb', 'test')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'revoke read on "testdb" from "test"'
            )

    @raises(Exception)
    def test_revoke_privilege_invalid(self):
        """Test revoke invalid privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.revoke_privilege('', 'testdb', 'test')

    def test_get_list_privileges(self):
        """Tst get list of privs for TestInfluxDBClient object."""
        data = {'results': [
            {'series': [
                {'columns': ['database', 'privilege'],
                 'values': [
                     ['db1', 'READ'],
                     ['db2', 'ALL PRIVILEGES'],
                     ['db3', 'NO PRIVILEGES']]}
            ]}
        ]}

        with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
            self.assertListEqual(
                self.cli.get_list_privileges('test'),
                [{'database': 'db1', 'privilege': 'READ'},
                 {'database': 'db2', 'privilege': 'ALL PRIVILEGES'},
                 {'database': 'db3', 'privilege': 'NO PRIVILEGES'}]
            )

    @raises(Exception)
    def test_get_list_privileges_fails(self):
        """Test failed get list of privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 401):
            cli.get_list_privileges('test')

    def test_invalid_port_fails(self):
        """Test invalid port fail for TestInfluxDBClient object."""
        with self.assertRaises(ValueError):
            InfluxDBClient('host', '80/redir', 'username', 'password')

    def test_chunked_response(self):
        """Test chunked reponse for TestInfluxDBClient object."""
        example_response = \
            u'{"results":[{"statement_id":0,"series":' \
            '[{"name":"cpu","columns":["fieldKey","fieldType"],"values":' \
            '[["value","integer"]]}],"partial":true}]}\n{"results":' \
            '[{"statement_id":0,"series":[{"name":"iops","columns":' \
            '["fieldKey","fieldType"],"values":[["value","integer"]]}],' \
            '"partial":true}]}\n{"results":[{"statement_id":0,"series":' \
            '[{"name":"load","columns":["fieldKey","fieldType"],"values":' \
            '[["value","integer"]]}],"partial":true}]}\n{"results":' \
            '[{"statement_id":0,"series":[{"name":"memory","columns":' \
            '["fieldKey","fieldType"],"values":[["value","integer"]]}]}]}\n'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )
            response = self.cli.query('show series limit 4 offset 0',
                                      chunked=True, chunk_size=4)
            self.assertTrue(len(response) == 4)
            self.assertEqual(response.__repr__(), ResultSet(
                {'series': [{'values': [['value', 'integer']],
                             'name': 'cpu',
                             'columns': ['fieldKey', 'fieldType']},
                            {'values': [['value', 'integer']],
                             'name': 'iops',
                             'columns': ['fieldKey', 'fieldType']},
                            {'values': [['value', 'integer']],
                             'name': 'load',
                             'columns': ['fieldKey', 'fieldType']},
                            {'values': [['value', 'integer']],
                             'name': 'memory',
                             'columns': ['fieldKey', 'fieldType']}]}
            ).__repr__())
def start():
    """Opens connections with logger, InfluxDB and MQTT broker."""
    global logger
    global mqtt_client
    global house_data
    global config
    global influx_client
    logger = LoggerClient.open("InfluxDBHub")
    mqtt_client = Utils.getpahoclient(logger, __configure_mqtt)
    config = Utils.getconfig("influxdb", logger)
    influx_client = InfluxDBClient(config["host"], config["port"],
                                   config["user"], config["password"],
                                   config["database"])
    if not {"name": config["database"]} in influx_client.get_list_database():
        influx_client.create_database(config["database"])
    if not any([
            x["name"] == "raspimon_policy"
            for x in influx_client.get_list_retention_policies()
    ]):
        influx_client.create_retention_policy('raspimon_policy',
                                              config["retention_policy"],
                                              1,
                                              default=True)
    else:
        influx_client.alter_retention_policy(
            'raspimon_policy',
            duration=config["retention_policy"],
            replication=1,
            default=True)
Esempio n. 3
0
class InfluxClient(object):
    """[summary]
    Arguments:
        object {[type]} -- [description]
    """

    def connect(self, host='localhost', port=8086, username='', password='', database='default'):
        """ Connect to InfluxDB server

        Keyword Arguments:
            host {str} -- Hostname of InfluxDB server (default: {HOST})
            port {[type]} -- InfluxDB server port (default: {PORT})
            username {[type]} -- InfluxDB login username (default: {USERNAME})
            password {[type]} -- InfluxDB login password (default: {PASSWORD})
        """
        self.database = database
        self._db = InfluxDBClient(host, port, username, password, database=database, retries=0)

        # check if db exists (create otherwise)
        dblist = [db['name'] for db in self._db.get_list_database()]
        if self.database not in dblist:
            self._db.create_database(self.database)

    def write(self, data, protocol='json'):
        """ Writes data to InfluxDB database

        Arguments:
            data {json} -- Information to store in InfluxDB (needs to be in appropriate InfluxDB format)
            database {str} -- Name of the database to insert data into
        """
        self._db.write_points(data, database=self.database, protocol=protocol)

    def read(self, query, database):
        """ Read values from InfluxDB database

        Arguments:
            query {str} -- InfluxDB query string
        """
        self._db.switch_database(database)
        return self._db.query(query)

    def alter_retention_policy(self, name='autogen', database = 'default', duration=None, replication=None, default=None, shard_duration=None):
        """ alter retention policies

        Arguments:
        """
        self._db.alter_retention_policy(name, database, duration, replication, default, shard_duration)
    
    def get_list_retention_policies(self, database=None):
        """ get retention policies
        Arguments: database
        """
        return self._db.get_list_retention_policies(database)
def start():
    """Opens connections with logger, InfluxDB and MQTT broker."""
    global logger
    global mqtt_client
    global house_data
    global config
    global influx_client
    logger = LoggerClient.open("InfluxDBHub")
    mqtt_client = Utils.getpahoclient(logger, __configure_mqtt)
    config = Utils.getconfig("influxdb", logger)
    influx_client = InfluxDBClient(config["host"], config["port"],
                                   config["user"], config["password"],
                                   config["database"])
    if not {"name":config["database"]} in influx_client.get_list_database():
        influx_client.create_database(config["database"])
    if not any([ x["name"]=="raspimon_policy" for x in influx_client.get_list_retention_policies()]):
        influx_client.create_retention_policy('raspimon_policy',
                                              config["retention_policy"],
                                              1, default=True)
    else:
        influx_client.alter_retention_policy('raspimon_policy',
                                             duration=config["retention_policy"],
                                             replication=1,
                                             default=True)
Esempio n. 5
0
class Tourbillon(object):

    """create a tourbillon instance reading its configuration from config_file
    """

    def __init__(self, config_file):
        super(Tourbillon, self).__init__()

        self._aio_run_event = asyncio.Event()
        self._thr_run_event = threading.Event()
        self._loop = asyncio.get_event_loop()
        self._tasks = []
        self._pluginconfig = {}

        with open(config_file, 'r') as f:
            self._config = json.load(f)

        formatter = logging.Formatter(fmt=self._config['log_format'])
        handler = logging.handlers.WatchedFileHandler(
            self._config['log_file'])
        handler.setFormatter(formatter)
        handler.setLevel(getattr(logging, self._config['log_level']))
        logging.getLogger().addHandler(handler)
        logging.getLogger().setLevel(
            getattr(logging, self._config['log_level']))

        logger.info('Use config file: %s', config_file)

        self._load_plugins_config(os.path.abspath(
                                  os.path.dirname(config_file)))

        self._influxdb = InfluxDBClient(**self._config['database'])
        self._databases = [i['name']
                           for i in self._influxdb.get_list_database()]
        print(self._databases)

    def _load_plugins_config(self, tourbillon_conf_dir):
        t = Template(self._config['plugins_conf_dir'])
        plugin_conf_dir = t.safe_substitute(
            tourbillon_conf_dir=tourbillon_conf_dir)
        logger.info('Plugin config dir: %s', plugin_conf_dir)
        config_files = glob.glob(os.path.join(plugin_conf_dir,
                                              '*.conf'))
        for file_name in config_files:
            k = os.path.splitext(os.path.basename(file_name))[0]
            with open(file_name, 'r') as f:
                try:
                    self._pluginconfig[k] = json.load(f)
                except:
                    logger.exception('error loading config file %s', file_name)

    @property
    def config(self):
        """returns a dictionary that contains configuration for each enabled
        plugin"""

        return self._pluginconfig

    @property
    def run_event(self):
        """get the asyncio.Event or threading.Event"""

        cf = inspect.currentframe()
        caller_name = cf.f_back.f_code.co_name
        caller = cf.f_back.f_globals[caller_name]
        if asyncio.iscoroutinefunction(caller) or asyncio.iscoroutine(caller):
            return self._aio_run_event
        else:
            return self._thr_run_event

    def push(self, points, database):
        """write syncronously datapoints to InfluxDB"""

        self._influxdb.write_points(points, database=database)

    def create_database(self, name, duration=None, replication=None,
                        default=True):
        """create syncronously a database and a retention policy
        in the InfluxDB instance"""

        if name not in self._databases:
            self._influxdb.create_database(name)
            logger.info('database %s created successfully', name)

        if duration and replication:
            rps = self._influxdb.get_list_retention_policies(name)
            tourbillon_rp_name = '%s_tourbillon' % name
            duration_in_hours = _to_hours(duration)
            logger.debug('duration_in_hours: %s', duration_in_hours)
            for rp in rps:
                if rp['name'] == tourbillon_rp_name:
                    logger.debug('current rp config: %s', rp)
                    if rp['duration'] == duration_in_hours and \
                            rp['replicaN'] == int(replication) and \
                            rp['default'] == default:
                        logger.debug('the retention policy %s already exists',
                                     tourbillon_rp_name)
                        return
                    self._influxdb.alter_retention_policy(
                        tourbillon_rp_name,
                        database=name,
                        duration=duration,
                        replication=replication,
                        default=default
                    )
                    logger.info('retention policy %s altered successfully',
                                tourbillon_rp_name)
                    return
            self._influxdb.create_retention_policy(
                tourbillon_rp_name,
                database=name,
                duration=duration,
                replication=replication,
                default=default
            )
            logger.info('retention policy %s created successfully',
                        tourbillon_rp_name)

    @asyncio.coroutine
    def async_push(self, points, database):
        """write asyncronously datapoints to InfluxDB"""

        yield From(self._loop.run_in_executor(
            None,
            functools.partial(self._influxdb.write_points,
                              points, database=database)))

    @asyncio.coroutine
    def async_create_database(self, name, duration=None, replication=None,
                              default=True):
        """create asyncronously a database and a retention policy
        in the InfluxDB instance"""

        if name not in self._databases:
            yield From(self._loop.run_in_executor(
                None,
                self._influxdb.create_database,
                name))
            logger.info('database %s created successfully', name)

        if duration and replication:
            rps = yield From(self._loop.run_in_executor(
                None,
                self._influxdb.get_list_retention_policies,
                name))
            tourbillon_rp_name = '%s_tourbillon' % name
            duration_in_hours = _to_hours(duration)
            logger.debug('duration_in_hours: %s', duration_in_hours)
            for rp in rps:
                if rp['name'] == tourbillon_rp_name:
                    logger.debug('current rp: %s', rp)
                    if rp['duration'] == duration_in_hours and \
                            rp['replicaN'] == int(replication) and \
                            rp['default'] == default:
                        logger.debug('the retention policy %s already exists',
                                     tourbillon_rp_name)
                        return
                    yield From(self._loop.run_in_executor(
                        None,
                        functools.partial(
                            self._influxdb.alter_retention_policy,
                            tourbillon_rp_name,
                            database=name,
                            duration=duration,
                            replication=replication,
                            default=default
                        )
                    ))
                    logger.info('retention policy %s altered successfully',
                                tourbillon_rp_name)
                    return
            yield From(self._loop.run_in_executor(
                None,
                functools.partial(
                    self._influxdb.create_retention_policy,
                    tourbillon_rp_name,
                    database=name,
                    duration=duration,
                    replication=replication,
                    default=default
                )
            ))
            logger.info('retention policy %s created successfully',
                        tourbillon_rp_name)

    def _load_tasks(self):
        if 'plugins' not in self._config:
            logger.warn('no plugin configured.')
            return
        plugins = self._config['plugins']
        thread_targets_count = 0
        for module_name, functions in plugins.items():
            logger.debug('search for tasks in module %s', module_name)
            module = import_module(module_name)
            logger.debug('module %s successfully imported', module_name)
            for task_name in functions:
                logger.debug('checking declared task %s', task_name)
                if hasattr(module, task_name):
                    candidate_task = getattr(module, task_name)
                    task_type = ''
                    if asyncio.iscoroutinefunction(candidate_task):
                        self._tasks.append(asyncio.async(
                            candidate_task(self)))
                        task_type = 'coroutine'
                    else:
                        self._tasks.append(self._loop.run_in_executor(
                            None,
                            candidate_task,
                            self))
                        task_type = 'function'
                        thread_targets_count += 1
                    logger.info('task found: %s.%s, type=%s',
                                module_name, task_name, task_type)
        if thread_targets_count > 0:
            self._loop.set_default_executor(ThreadPoolExecutor(
                max_workers=thread_targets_count + 2)
            )
        logger.debug('configured tasks: %s', self._tasks)

    def stop(self):
        """stop the tourbillon agent"""

        self._loop.remove_signal_handler(signal.SIGINT)
        self._loop.remove_signal_handler(signal.SIGTERM)
        logger.info('shutting down tourbillon...')
        self._aio_run_event.clear()
        self._thr_run_event.clear()

    def run(self):
        """start the tourbillon agent"""

        logger.info('starting tourbillon...')
        self._loop.add_signal_handler(signal.SIGINT, self.stop)
        self._loop.add_signal_handler(signal.SIGTERM, self.stop)
        self._load_tasks()
        self._aio_run_event.set()
        self._thr_run_event.set()
        logger.info('tourbillon started')
        self._loop.run_until_complete(asyncio.wait(self._tasks))
        logger.info('tourbillon shutdown completed')
Esempio n. 6
0
class DBAgent:
    def __init__(self,
                 dbhost="127.0.0.1",
                 dbport=8086,
                 dbname="airdb",
                 dbusr="",
                 dbpass="",
                 delayWritingCount=1):

        self.delayWritingCount = delayWritingCount
        self.delayWritingData = []

        self.dbclient = InfluxDBClient(dbhost,
                                       dbport,
                                       dbusr,
                                       dbpass,
                                       dbname,
                                       timeout=3)

        databases = self.dbclient.get_list_database()

        hasDefaultDb = False
        for db in databases:
            if db["name"] == dbname:
                hasDefaultDb = True
                break

        if not hasDefaultDb:
            print("create the database:", dbname)
            self.dbclient.create_database(dbname)
            self.dbclient.alter_retention_policy('autogen', dbname, '7d', 1,
                                                 True)

    def setCacheCount(self, delayWritingCount):
        self.delayWritingCount = delayWritingCount
        self.flushDB()

    def flushDB(self):
        if len(self.delayWritingData) > 0:
            self.dbclient.write_points(self.delayWritingData)
            self.delayWritingCount.clear()

    def insertData(self, measurement, fields, current_time=None):
        if current_time == None:
            current_time = datetime.datetime.utcnow().isoformat()
        elif type(current_time) is datetime.datetime:
            current_time = current_time.isoformat()

        if not (type(fields) is dict):
            raise ValueError("fields must be dict type! but get " +
                             str(type(fields)))

        data = {
            "measurement": measurement,
            "time": current_time,
            "fields": fields,
        }

        if self.delayWritingCount > 1:
            self.delayWritingData.append(data)
            if len(self.delayWritingData) >= self.delayWritingCount:
                self.flushDB()
        else:
            self.dbclient.write_points([data])

    def getLeastData(self, measurement):
        query = "select * from {} order by time desc limit 1;".format(
            measurement)
        result = self.dbclient.query(query)
        datalist = list(result.get_points(measurement=measurement))
        if len(datalist) == 1:
            return datalist[0]
        else:
            return None
Esempio n. 7
0
                                options.influxdb_pass,
                                options.influxdb_db)
        try:
            logging.info("Creating database %s" % options.influxdb_db)
            client.create_database(options.influxdb_db)
        except Exception, e:
            logging.warn("Unable to create database, does it already exist?")
            # client.create_retention_policy("30d", "4w", 1, options.influxdb_db, True)

    except Exception, e:
        logging.error("Unable to connect to influxdb")
        sys.exit(1)

    if options.update_retention:
        logging.info("Altering retention policies")
        client.alter_retention_policy("default", duration="4w", replication=1, default=True)

    # forces read-only
    options.testmode = True
    settings.env = options.env.lower()

    logging.info("Connecting to appliance in %s, testmode:%s" % (settings.env, options.testmode))
    connection = SolaceAPI(settings.env, testmode=options.testmode)

    """
    Gather client stats, this is quite slow if you have MANY clients!
    """
    if options.clients:
        connection.x = SolaceXMLBuilder("show clients stats")
        connection.x.show.client.name = options.filter
        connection.x.show.client.stats
Esempio n. 8
0
class InfluxDBExporter(object):
    def __init__(self, host, port, 
                 dbname, user, pwd, 
                 db_management, geohash={}):
        self.host = host
        self.port = port
        self.dbname = dbname
        self.user = user
        self.pwd = pwd
        self.client = None
        self.geohash = geohash

        self.NB_MAX_TRY_REQUEST = 10  # nb of rqt error before aborting
        # self.TIME_MAX = 1*60.*60.

        # add one item by influxdb line
        self.data = []
        self.nb_data_max = 40000     # no more than 5000 (cf. influxdb doc.)

        self.client = InfluxDBClient(host=host, port=port, database=dbname)

        if db_management:
            self.prepare_db(db_management)

    def prepare_db(self, db_management):
        if db_management['drop_db']:
            self.drop_db()
        self.create_db()
        self.set_retention_policies(db_management['retention'])

    def drop_db(self, dbname=None):
        if not dbname:
            dbname = self.dbname
        logger.info("Drop %s database." % dbname)
        try:
            self.client.drop_database(dbname)
        except:
            logger.info("Can't drop %s database (not existing yet ?)." 
                        % dbname)

    def create_db(self, dbname=None):
        if not dbname:
            dbname = self.dbname
        logger.info("Open/Create %s database." % dbname)
        # self.client.create_database(dbname, if_not_exists=True)
        self.client.create_database(dbname)
        self.client.switch_database(dbname)

    def set_retention_policies(self, days, dbname=None):
        if not dbname:
            dbname = self.dbname
        name = "in_days"
        logger.info("Setting %s retention policy on %s database, keep=%d days."
                    % (name, dbname, days))
        try:
            self.client.create_retention_policy(name, 
                                                duration="%dd" % days, 
                                                replication="1",
                                                database=dbname, default=True)
        except:
            self.client.alter_retention_policy(name,
                                               database=dbname,
                                               duration="%dd" % days,
                                               replication=1,
                                               default=True)

    def make_stats(self, now):
        t = timegm(now.utctimetuple()) * 1e9 \
            + now.microsecond * 1e3
        t_str = str(int(t))
        s = "queue,type=producer size=%d " % q.qsize() + t_str
        self.data.append(s)
        s = "queue,type=consumer size=%d " % len(self.data) + t_str
        self.data.append(s)

    def make_line_latency(self, channel, starttime, latency_value):
        timestamp = starttime.datetime
        t = timegm(timestamp.utctimetuple()) * 1e9 \
            + timestamp.microsecond * 1e3
        try:
            geohash_tag = ",geohash=%s" % self.geohash[channel]
        except:
            geohash_tag = ""

        l = "latency,channel=" + channel + \
            geohash_tag + \
            " value=" + "%.1f " % latency_value + \
            str(int(t))
        self.data.append(l)

    def make_line_count(self, channel, starttime, delta, data):
        cc = "count,channel=" + channel
        for i, v in enumerate(data):
            timestamp = starttime + i*delta
            t = timegm(timestamp.utctimetuple()) * 1e9 \
                + timestamp.microsecond * 1e3
            c = cc + " value=" + "%.2f " % v + str(int(t))
            self.data.append(c)

    def send_points(self, debug=False):
        """Send points to influxsb

        to speed-up things make our own "data line"
        (bypass influxdb write_points python api)
        """
        data = '\n'.join(self.data[:self.nb_data_max])
        del self.data[:self.nb_data_max]

        headers = self.client._headers
        headers['Content-type'] = 'application/octet-stream'

        nb_try = 0
        while True:
            nb_try += 1
            try:
                self.client.request(url="write", 
                                    method='POST',
                                    params={'db': self.client._database},
                                    data=data,
                                    expected_response_code=204,
                                    headers=headers
                                    )
            except (InfluxDBServerError, 
                    InfluxDBClientError, 
                    requests.exceptions.ConnectionError) as e:
                if nb_try > self.NB_MAX_TRY_REQUEST:
                    raise e
                else:
                    logger.error("Request failed (%s)" % e)
                    logger.error("retrying (%d/%d)" %
                                 (nb_try, self.NB_MAX_TRY_REQUEST))
                    continue
            break

    def manage_data(self, trace):
        """Return True is data have been pushed to influxdb"""
        delta = trace.stats['delta']
        starttime = trace.stats['starttime']
        channel = trace.get_id()
        now = datetime.utcnow()
        nbsamples = len(trace.data)
        last_sample_time = starttime + delta * (nbsamples - 1)

        l = UTCDateTime(now) - last_sample_time

        lock.acquire()
        last_packet_time[channel] = last_sample_time
        lock.release()

        # do not process 'old' data
        # if l > self.TIME_MAX:
        #     return

        self.make_line_count(channel,
                             starttime,
                             delta,
                             trace.data)

        self.make_line_latency(channel,
                               starttime + delta * (nbsamples - 1),
                               l)

        # send data to influxdb if buffer is filled enough
        if len(self.data) > self.nb_data_max:
            now = datetime.utcnow()
            self.make_stats(now)
            logger.debug("Data sent")
            try:
                self.send_points(debug=False)
            except InfluxDBServerError as e:
                self.force_shutdown(e)
            else:
                return True
        else:
            return False

    def run(self):
        """Run unless shutdown signal is received.  """

        # time in seconds
        timeout = 0.1
        max_cumulated_wait_time = 15
        wait_time = 0

        while True:
            try:
                trace = q.get(timeout=timeout)
            except Queue.Empty:
                # process queue before shutdown
                if q.empty() and shutdown_event.isSet():
                    logger.info("%s thread has catched *shutdown_event*" %
                                self.__class__.__name__)
                    sys.exit(0)

                wait_time += timeout
                if wait_time > max_cumulated_wait_time:
                    # force data flush to influxdb
                    # even if data block is not completed
                    logger.info('Timer reached (%ds)' % max_cumulated_wait_time
                                + '. Force data flush to influxdb '
                                + '(bsize=%d/%d)!'
                                % (len(self.data), self.nb_data_max))
                    now = datetime.utcnow()
                    self.make_stats(now)
                    try:
                        self.send_points()
                    except BaseException as e:
                        self.force_shutdown(e)
                    wait_time = 0
            else:
                data_pushed = self.manage_data(trace)
                q.task_done()
                if data_pushed:
                    wait_time = 0
Esempio n. 9
0
class InfluxClient:
    READ_PRIVILEGE = 'read'
    TELEMETRY_CQ_FMT = """
CREATE CONTINUOUS QUERY "telemetry_{0}_cq" ON "{4}"
RESAMPLE EVERY {3}
BEGIN
    SELECT 
        mean("batteryLevel") AS "batteryLevel", 
        mean("lightLevel") AS "lightLevel", 
        mean("rssi") AS "rssi", 
        mean("sensitivity") AS "sensitivity", 
        sum("singleClick") AS "singleClick", 
        sum("threshold") AS "threshold", 
        sum("doubleTap") AS "doubleTap", 
        mean("temperature") AS "temperature", 
        mean("humidity") as "humidity",
        mean("x") AS "x", 
        mean("y") AS "y", 
        mean("z") AS "z",
        max("history") AS "history",
        MODE("sourceId") AS "sourceId"
    INTO 
        "{1}"."telemetry_{0}"
    FROM 
       "{2}"."telemetry"
    GROUP BY time({0}), trackingId
END
"""

    LOCATION_CQ_FMT = """
CREATE CONTINUOUS QUERY "locations_{0}_cq" ON "{4}"
RESAMPLE EVERY {3} FOR {5}
BEGIN
    SELECT 
        mean("rssi") AS "rssi",
        COUNT("rssi") AS "scans",
        COUNT("rssi")/(-mean("rssi")) as "quality",
        MODE("fSourceId") AS "fSourceId",
        MODE("fTrackingId") AS "fTrackingId"
    INTO 
        "{1}"."locations_{0}"
    FROM 
       "{2}"."locations"
    GROUP BY time({0}), trackingId, sourceId
END
"""

    POSTIION_CQ_FMT = """
CREATE CONTINUOUS QUERY "positions_{0}_cq" ON "{4}"
RESAMPLE EVERY {3} 
BEGIN
    SELECT 
        mean("coord_latitude") AS "coord_latitude",
        mean("coord_longitude") AS "coord_longitude"        
    INTO 
        "{1}"."positions_{0}"
    FROM 
       current_rp.position
    GROUP BY time({0}), trackingId
END
"""

    REMOVE_CQ_FMT = """
DROP CONTINUOUS QUERY "{0}_{1}_cq" ON "{2}"
    """

    def __init__(self, address, port, user_name, password):
        self._client = InfluxDBClient(host=address.replace('http://', ''),
                                      port=port,
                                      username=user_name,
                                      password=password)

    def create_database(self, database_name):
        print "Creating database %s" % database_name
        self._client.create_database(database_name)

    def create_user(self, user_name, password, database_name=None):
        print "Creating user %s" % user_name
        try:
            self._client.create_user(user_name, password)
        except InfluxDBClientError as e:
            if e.message != 'user already exists':
                raise e

        if database_name is not None:
            self._client.grant_privilege(self.READ_PRIVILEGE, database_name,
                                         user_name)

    def create_retention_policy(self, database_name, policy_name, duration):
        print "Creating retention policy %s with duration %s on database %s" % (
            policy_name, duration, database_name)
        try:
            self._client.create_retention_policy(policy_name,
                                                 duration,
                                                 1,
                                                 database=database_name)
        except InfluxDBClientError as e:
            if e.message == 'retention policy already exists':
                print "Updating retention policy %s with duration %s on database %s" % (
                    policy_name, duration, database_name)
                self._client.alter_retention_policy(policy_name, database_name,
                                                    duration, 1)

    def recreate_continuous_query(self, database_name, aggregation_time,
                                  retention_policy, source_retention_policy,
                                  resample_time, resample_for):
        self._execute_query(
            self.REMOVE_CQ_FMT.format('telemetry', aggregation_time,
                                      database_name), database_name)

        self._execute_query(
            self.REMOVE_CQ_FMT.format('locations', aggregation_time,
                                      database_name), database_name)

        self._execute_query(
            self.REMOVE_CQ_FMT.format('positions', aggregation_time,
                                      database_name), database_name)

        self._execute_query(
            self.TELEMETRY_CQ_FMT.format(aggregation_time, retention_policy,
                                         source_retention_policy,
                                         resample_time, database_name),
            database_name)

        self._execute_query(
            self.LOCATION_CQ_FMT.format(aggregation_time, retention_policy,
                                        source_retention_policy, resample_time,
                                        database_name, resample_for),
            database_name)

        self._execute_query(
            self.POSTIION_CQ_FMT.format(aggregation_time, retention_policy,
                                        source_retention_policy, resample_time,
                                        database_name), database_name)

    def _execute_query(self, query, database_name):
        print "Executing query %s" % query
        self._client.query(query, database=database_name)
Esempio n. 10
0
AGO = PARAMS.AGO
AUTOGEN_DURATION = PARAMS.AUTOGEN_DURATION
GROUP_BY = PARAMS.GROUP_BY
UPDATE = PARAMS.UPDATE
OVERWRITE = PARAMS.OVERWRITE


# Connecting to database and switching to desired database
DB_CONNECTION = InfluxDBClient(SERVER, PORT)
DB_CONNECTION.switch_database(DATABASE)

# Generating the downsampled retention Policy if not exists, if exists
# altering it
if POLICY_NAME in [d['name'] for d in DB_CONNECTION.get_list_retention_policies(database=DATABASE)] and UPDATE:
    print "Policy {pn} already exists, updating...".format(pn=POLICY_NAME)
    DB_CONNECTION.alter_retention_policy(
        POLICY_NAME, duration=POLICY_DURATION, replication=1)
elif POLICY_NAME in [d['name'] for d in DB_CONNECTION.get_list_retention_policies(database=DATABASE)] and not UPDATE:
    print "The policy {pn} already exists and the update flag is set to False".format(pn=POLICY_NAME)
else:
    print "The policy {pn} doesn't exists, creating...".format(pn=POLICY_NAME)
    DB_CONNECTION.create_retention_policy(
        POLICY_NAME, duration=POLICY_DURATION, replication=1)

# Getting all existent measurements and continuous queries in database
MEASUREMENTS = [d['name'] for d in list(
    DB_CONNECTION.query('show measurements;').get_points())]
CQ_LIST = [d['name']
           for d in list(DB_CONNECTION.query('show continuous queries;'))[1]]

''' Looping through all measurements and appying a continuous query
on each key for each measurement for downsampling purposes '''
Esempio n. 11
0
class DatabaseInfluxDB(Database):
    def __init__(self, db_params=None):
        self.db_params = db_params
        self.client = None
        super().__init__()
        self.cache = TTLCache(maxsize=1048576, ttl=60, getsizeof=len)

    def _connect(self):
        """ Connect to the InfluxDB server. Load params if not specified. """
        if not self.db_params:
            self.db_params = env.get_influxdb_params()
        self.client = InfluxDBClient(self.db_params['INFLUXDB_HOST'],
                                     self.db_params['INFLUXDB_PORT'],
                                     self.db_params['INFLUXDB_USER'],
                                     self.db_params['INFLUXDB_PASS'],
                                     self.db_params['INFLUXDB_DB'])
        logging.info("Connected to InfluxDB @ %s",
                     self.db_params['INFLUXDB_HOST'])
        # this does nothing if the DB already exists. I think.
        self.client.create_database('ping')
        self.client.alter_retention_policy('autogen',
                                           duration='4w',
                                           shard_duration='1d')

    @staticmethod
    def get_prober_id_by_name(prober_name: str) -> int:
        """ Returns the ID of the prober with the given name. """
        return Prober.objects.get(name=prober_name).id

    @staticmethod
    def get_src_dst_pairs() -> Iterable[SrcDst]:
        return SrcDst.objects.all()

    @staticmethod
    def get_src_dst_by_id(pair_id) -> SrcDst:
        """ Gets a source-destination pair from the database by ID number. """
        return SrcDst.objects.get(id=pair_id)

    @staticmethod
    @cached(class_cache)
    def src_dst_id(prober_name, dst) -> int:
        """ Gets the ID of the src-dst pair from the DB, maybe creating the entry.

        Create a new src-dst pair in the DB if it does not already exist.

        Returns the ID of src-dst pair.
        """
        objects = SrcDst.objects.filter(prober__name=prober_name, dst=dst)
        if not objects:
            # we need to create the src-dst pair
            prober_id = DatabaseInfluxDB.get_prober_id_by_name(prober_name)
            src_dst = SrcDst()
            src_dst.prober_id = prober_id
            src_dst.dst = dst
            src_dst.save()
            pair_id = src_dst.id
            logging.debug("Added SrcDst ID %i to database", pair_id)
        else:
            result = objects[0]
            pair_id = result.id
        return pair_id

    def get_poll_counts_by_pair(self, prober_name, dst_ip) -> int:
        """ Return the number of polls for a specific pair. """
        if not self.client:
            self._connect()
        query = 'SELECT COUNT(latency) FROM "icmp-echo" WHERE ' + \
                'prober_name=$prober_name AND dst_ip=$dst_ip'
        params = {
            'prober_name': prober_name,
            'dst_ip': dst_ip,
        }
        logging.debug("Querying: %s | %s", query, params)
        result_set = self.client.query(query, bind_params=params, epoch='s')
        points = list(result_set.get_points())
        if not points:
            return 0
        return points[0]['count']

    @cachedmethod(operator.attrgetter('cache'))
    def get_poll_data_by_id(self,
                            pair_id,
                            start=None,
                            end=None,
                            convert_to_datetime=False):
        """ Get poll data from DB for specific src_dst pair.

            Optionally specify the time window with datetime.datetime objects.

            Returns a list of rows from the database.
            Each row is a list with two items: time and latency.
            The time is integer UNIX time, unless convert_to_datetime is True.
            The latency is the number of seconds latency (float).
            A latency value of None indicates a timeout.
        """
        if end is None:
            end = datetime.datetime.now()
        if start is None:
            start = end - datetime.timedelta(seconds=3601)
        src_dst = self.get_src_dst_by_id(pair_id)
        prober_name = src_dst.prober.name
        dst_ip = src_dst.dst
        records = self.read_records(prober_name, dst_ip, start, end)
        if convert_to_datetime:
            for record in records:
                # record['time'] = datetime.datetime.strptime(record['time'], "%Y-%m-%dT%H:%M:%SZ")
                record['time'] = datetime.datetime.fromtimestamp(
                    record['time'])
        return records

    @staticmethod
    def calculate_statistics(records) -> dict:
        """ Calculate some statistics for a list of records.

            Returns a dictionary of statistical values for the list of records.
            Statistics := {
                'echos': 1801  # number of echo requests
                'successes': 1234  # count of successful responses
                'timeouts': 567  # count of "no response receive"
                'success_rate': 0.712  # fraction of requests that succeeded
                'mean': 0.123  # average latency, not considering timeouts
                'minimum': 0.001
                'maximum': 0.876  # does not account for infinite timeout
            }
        """
        minimum = sys.float_info.max
        maximum = sys.float_info.min
        total = 0.0
        successes = 0
        timeouts = 0
        mean = 0.0
        success_rate = 0.0
        for record in records:
            latency = record['latency']
            if latency is None or latency == TIMEOUT_VALUE:
                timeouts += 1
                continue
            successes += 1
            total += latency
            if latency < minimum:
                minimum = latency
            if latency > maximum:
                maximum = latency
        if successes:
            mean = total / successes
            success_rate = successes / len(records)
        statistics = {
            'echos': len(records),
            'successes': successes,
            'timeouts': timeouts,
            'success_rate': success_rate,
            'mean': mean,
            'minimum': minimum,
            'maximum': maximum
        }
        return statistics

    def record_poll_data(self, prober_name, dst_ip, send_time,
                         receive_time) -> None:
        """ Record results of a single poll in the database. """
        if not self.client:
            self._connect()
        # do this to make sure there is a record created in the MySQL DB for this pair.
        pair_id = self.src_dst_id(prober_name, dst_ip)
        if receive_time is None:
            latency = TIMEOUT_VALUE
        else:
            latency = round(receive_time - send_time, LATENCY_PRECISION)
        point = {
            "measurement": "icmp-echo",
            "tags": {
                "probe": "[unimplemented]",
                "prober_name": prober_name,
                "dst_ip": dst_ip
            },
            "time": int(send_time),
            "fields": {
                "latency": latency
            }
        }
        self.client.write_points([point], time_precision='s')

    def last_poll_time_by_pair(self, prober_name, dst_ip) -> datetime.datetime:
        """ Get the last time a particular pair ID was polled.

        Returns a datetime.datetime object.
        """
        if not self.client:
            self._connect()
        query = 'SELECT LAST(*) FROM "icmp-echo" WHERE ' + \
                'prober_name=$prober_name AND dst_ip=$dst_ip'
        params = {
            'prober_name': prober_name,
            'dst_ip': dst_ip,
        }
        logging.debug("Querying: %s | %s", query, params)
        result_set = self.client.query(query, bind_params=params, epoch='s')
        points = list(result_set.get_points())
        if not points:
            dt = datetime.datetime.min
        else:
            dt = datetime.datetime.fromtimestamp(points[0]['time'])
        return dt

    def read_records(self, prober_name, dst_ip, start_time, end_time) -> List:
        """ Return the list of records from start to end times, inclusive.

            start_time and end_time are datetime.datetime objects.
            If there are no records within the time range, [] is returned.

            Sample return:
                [{'1970-01-01T12:34:56}, 0.0123}, {...}, ...]
        """
        if not self.client:
            self._connect()
        start_time = str(int(start_time.timestamp()))
        end_time = str(int(end_time.timestamp()))
        query = 'SELECT "latency" FROM "icmp-echo" WHERE prober_name=$prober_name AND ' + \
                'dst_ip=$dst_ip AND time>=' + start_time + 's AND time<=' + end_time + 's'
        params = {
            'prober_name': prober_name,
            'dst_ip': dst_ip,
        }
        logging.debug("Querying: %s | %s", query, params)
        result_set = self.client.query(query, bind_params=params, epoch='s')
        points = list(result_set.get_points())
        logging.debug("Got %i records from %i to %i", len(points), start_time,
                      end_time)
        return points
Esempio n. 12
0
class TestInfluxDBClient(unittest.TestCase):
    def setUp(self):
        # By default, raise exceptions on warnings
        warnings.simplefilter("error", FutureWarning)

        self.cli = InfluxDBClient("localhost", 8086, "username", "password")
        self.dummy_points = [
            {
                "measurement": "cpu_load_short",
                "tags": {"host": "server01", "region": "us-west"},
                "time": "2009-11-10T23:00:00.123456Z",
                "fields": {"value": 0.64},
            }
        ]

        self.dsn_string = "influxdb://*****:*****@my.host.fr:1886/db"

    def test_scheme(self):
        cli = InfluxDBClient("host", 8086, "username", "password", "database")
        self.assertEqual("http://host:8086", cli._baseurl)

        cli = InfluxDBClient("host", 8086, "username", "password", "database", ssl=True)
        self.assertEqual("https://host:8086", cli._baseurl)

    def test_dsn(self):
        cli = InfluxDBClient.from_DSN("influxdb://192.168.0.1:1886")
        self.assertEqual("http://192.168.0.1:1886", cli._baseurl)

        cli = InfluxDBClient.from_DSN(self.dsn_string)
        self.assertEqual("http://my.host.fr:1886", cli._baseurl)
        self.assertEqual("uSr", cli._username)
        self.assertEqual("pWd", cli._password)
        self.assertEqual("db", cli._database)
        self.assertFalse(cli.use_udp)

        cli = InfluxDBClient.from_DSN("udp+" + self.dsn_string)
        self.assertTrue(cli.use_udp)

        cli = InfluxDBClient.from_DSN("https+" + self.dsn_string)
        self.assertEqual("https://my.host.fr:1886", cli._baseurl)

        cli = InfluxDBClient.from_DSN("https+" + self.dsn_string, **{"ssl": False})
        self.assertEqual("http://my.host.fr:1886", cli._baseurl)

    def test_switch_database(self):
        cli = InfluxDBClient("host", 8086, "username", "password", "database")
        cli.switch_database("another_database")
        self.assertEqual("another_database", cli._database)

    def test_switch_user(self):
        cli = InfluxDBClient("host", 8086, "username", "password", "database")
        cli.switch_user("another_username", "another_password")
        self.assertEqual("another_username", cli._username)
        self.assertEqual("another_password", cli._password)

    def test_write(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST, "http://localhost:8086/write", status_code=204)
            cli = InfluxDBClient(database="db")
            cli.write(
                {
                    "database": "mydb",
                    "retentionPolicy": "mypolicy",
                    "points": [
                        {
                            "measurement": "cpu_load_short",
                            "tags": {"host": "server01", "region": "us-west"},
                            "time": "2009-11-10T23:00:00Z",
                            "fields": {"value": 0.64},
                        }
                    ],
                }
            )

            self.assertEqual(
                m.last_request.body, b"cpu_load_short,host=server01,region=us-west " b"value=0.64 1257894000000000000\n"
            )

    def test_write_points(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST, "http://localhost:8086/write", status_code=204)

            cli = InfluxDBClient(database="db")
            cli.write_points(self.dummy_points)
            self.assertEqual(
                "cpu_load_short,host=server01,region=us-west " "value=0.64 1257894000123456000\n",
                m.last_request.body.decode("utf-8"),
            )

    def test_write_points_toplevel_attributes(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST, "http://localhost:8086/write", status_code=204)

            cli = InfluxDBClient(database="db")
            cli.write_points(self.dummy_points, database="testdb", tags={"tag": "hello"}, retention_policy="somepolicy")
            self.assertEqual(
                "cpu_load_short,host=server01,region=us-west,tag=hello " "value=0.64 1257894000123456000\n",
                m.last_request.body.decode("utf-8"),
            )

    def test_write_points_batch(self):
        dummy_points = [
            {
                "measurement": "cpu_usage",
                "tags": {"unit": "percent"},
                "time": "2009-11-10T23:00:00Z",
                "fields": {"value": 12.34},
            },
            {
                "measurement": "network",
                "tags": {"direction": "in"},
                "time": "2009-11-10T23:00:00Z",
                "fields": {"value": 123.00},
            },
            {
                "measurement": "network",
                "tags": {"direction": "out"},
                "time": "2009-11-10T23:00:00Z",
                "fields": {"value": 12.00},
            },
        ]
        expected_last_body = "network,direction=out,host=server01,region=us-west " "value=12.0 1257894000000000000\n"

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST, "http://localhost:8086/write", status_code=204)
            cli = InfluxDBClient(database="db")
            cli.write_points(
                points=dummy_points, database="db", tags={"host": "server01", "region": "us-west"}, batch_size=2
            )
        self.assertEqual(m.call_count, 2)
        self.assertEqual(expected_last_body, m.last_request.body.decode("utf-8"))

    def test_write_points_udp(self):
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        port = random.randint(4000, 8000)
        s.bind(("0.0.0.0", port))

        cli = InfluxDBClient("localhost", 8086, "root", "root", "test", use_udp=True, udp_port=port)
        cli.write_points(self.dummy_points)

        received_data, addr = s.recvfrom(1024)

        self.assertEqual(
            "cpu_load_short,host=server01,region=us-west " "value=0.64 1257894000123456000\n", received_data.decode()
        )

    def test_write_bad_precision_udp(self):
        cli = InfluxDBClient("localhost", 8086, "root", "root", "test", use_udp=True, udp_port=4444)

        with self.assertRaisesRegexp(Exception, "InfluxDB only supports seconds precision for udp writes"):
            cli.write_points(self.dummy_points, time_precision="ms")

    @raises(Exception)
    def test_write_points_fails(self):
        cli = InfluxDBClient("host", 8086, "username", "password", "db")
        with _mocked_session(cli, "post", 500):
            cli.write_points([])

    def test_write_points_with_precision(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST, "http://localhost:8086/write", status_code=204)

            cli = InfluxDBClient(database="db")

            cli.write_points(self.dummy_points, time_precision="n")
            self.assertEqual(
                b"cpu_load_short,host=server01,region=us-west " b"value=0.64 1257894000123456000\n", m.last_request.body
            )

            cli.write_points(self.dummy_points, time_precision="u")
            self.assertEqual(
                b"cpu_load_short,host=server01,region=us-west " b"value=0.64 1257894000123456\n", m.last_request.body
            )

            cli.write_points(self.dummy_points, time_precision="ms")
            self.assertEqual(
                b"cpu_load_short,host=server01,region=us-west " b"value=0.64 1257894000123\n", m.last_request.body
            )

            cli.write_points(self.dummy_points, time_precision="s")
            self.assertEqual(
                b"cpu_load_short,host=server01,region=us-west " b"value=0.64 1257894000\n", m.last_request.body
            )

            cli.write_points(self.dummy_points, time_precision="m")
            self.assertEqual(
                b"cpu_load_short,host=server01,region=us-west " b"value=0.64 20964900\n", m.last_request.body
            )

            cli.write_points(self.dummy_points, time_precision="h")
            self.assertEqual(
                b"cpu_load_short,host=server01,region=us-west " b"value=0.64 349415\n", m.last_request.body
            )

    def test_write_points_bad_precision(self):
        cli = InfluxDBClient()
        with self.assertRaisesRegexp(
            Exception, "Invalid time precision is given. " "\(use 'n', 'u', 'ms', 's', 'm' or 'h'\)"
        ):
            cli.write_points(self.dummy_points, time_precision="g")

    @raises(Exception)
    def test_write_points_with_precision_fails(self):
        cli = InfluxDBClient("host", 8086, "username", "password", "db")
        with _mocked_session(cli, "post", 500):
            cli.write_points_with_precision([])

    def test_query(self):
        example_response = (
            '{"results": [{"series": [{"measurement": "sdfsdfsdf", '
            '"columns": ["time", "value"], "values": '
            '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": '
            '[{"measurement": "cpu_load_short", "columns": ["time", "value"], '
            '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}'
        )

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://*****:*****@unittest.skip("Not implemented for 0.9")
    def test_query_chunked(self):
        cli = InfluxDBClient(database="db")
        example_object = {
            "points": [
                [1415206250119, 40001, 667],
                [1415206244555, 30001, 7],
                [1415206228241, 20001, 788],
                [1415206212980, 10001, 555],
                [1415197271586, 10001, 23],
            ],
            "measurement": "foo",
            "columns": ["time", "sequence_number", "val"],
        }
        example_response = json.dumps(example_object) + json.dumps(example_object)

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/db/db/series", text=example_response)

            self.assertListEqual(cli.query("select * from foo", chunked=True), [example_object, example_object])

    @raises(Exception)
    def test_query_fail(self):
        with _mocked_session(self.cli, "get", 401):
            self.cli.query("select column_one from foo;")

    def test_create_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text='{"results":[{}]}')
            self.cli.create_database("new_db")
            self.assertEqual(m.last_request.qs["q"][0], 'create database "new_db"')

    def test_create_numeric_named_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text='{"results":[{}]}')
            self.cli.create_database("123")
            self.assertEqual(m.last_request.qs["q"][0], 'create database "123"')

    @raises(Exception)
    def test_create_database_fails(self):
        with _mocked_session(self.cli, "post", 401):
            self.cli.create_database("new_db")

    def test_drop_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text='{"results":[{}]}')
            self.cli.drop_database("new_db")
            self.assertEqual(m.last_request.qs["q"][0], 'drop database "new_db"')

    def test_drop_numeric_named_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text='{"results":[{}]}')
            self.cli.drop_database("123")
            self.assertEqual(m.last_request.qs["q"][0], 'drop database "123"')

    @raises(Exception)
    def test_drop_database_fails(self):
        cli = InfluxDBClient("host", 8086, "username", "password", "db")
        with _mocked_session(cli, "delete", 401):
            cli.drop_database("old_db")

    def test_get_list_database(self):
        data = {
            "results": [
                {"series": [{"name": "databases", "values": [["new_db_1"], ["new_db_2"]], "columns": ["name"]}]}
            ]
        }

        with _mocked_session(self.cli, "get", 200, json.dumps(data)):
            self.assertListEqual(self.cli.get_list_database(), [{"name": "new_db_1"}, {"name": "new_db_2"}])

    @raises(Exception)
    def test_get_list_database_fails(self):
        cli = InfluxDBClient("host", 8086, "username", "password")
        with _mocked_session(cli, "get", 401):
            cli.get_list_database()

    def test_get_list_series(self):
        example_response = (
            '{"results": [{"series": [{"name": "cpu_load_short", "columns": '
            '["_id", "host", "region"], "values": '
            '[[1, "server01", "us-west"]]}]}]}'
        )

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text=example_response)

            self.assertListEqual(
                self.cli.get_list_series(),
                [{"name": "cpu_load_short", "tags": [{"host": "server01", "_id": 1, "region": "us-west"}]}],
            )

    def test_create_retention_policy_default(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text=example_response)
            self.cli.create_retention_policy("somename", "1d", 4, default=True, database="db")

            self.assertEqual(
                m.last_request.qs["q"][0], "create retention policy somename on " "db duration 1d replication 4 default"
            )

    def test_create_retention_policy(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text=example_response)
            self.cli.create_retention_policy("somename", "1d", 4, database="db")

            self.assertEqual(
                m.last_request.qs["q"][0], "create retention policy somename on " "db duration 1d replication 4"
            )

    def test_alter_retention_policy(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text=example_response)
            # Test alter duration
            self.cli.alter_retention_policy("somename", "db", duration="4d")
            self.assertEqual(m.last_request.qs["q"][0], "alter retention policy somename on db duration 4d")
            # Test alter replication
            self.cli.alter_retention_policy("somename", "db", replication=4)
            self.assertEqual(m.last_request.qs["q"][0], "alter retention policy somename on db replication 4")

            # Test alter default
            self.cli.alter_retention_policy("somename", "db", default=True)
            self.assertEqual(m.last_request.qs["q"][0], "alter retention policy somename on db default")

    @raises(Exception)
    def test_alter_retention_policy_invalid(self):
        cli = InfluxDBClient("host", 8086, "username", "password")
        with _mocked_session(cli, "get", 400):
            self.cli.alter_retention_policy("somename", "db")

    def test_get_list_retention_policies(self):
        example_response = (
            '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'
            ' "columns": ["name", "duration", "replicaN"]}]}]}'
        )

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://*****:*****@mock.patch("requests.Session.request")
    def test_request_retry(self, mock_request):
        """Tests that two connection errors will be handled"""

        class CustomMock(object):
            i = 0

            def connection_error(self, *args, **kwargs):
                self.i += 1

                if self.i < 3:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 204
                    return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database="db")
        cli.write_points(self.dummy_points)

    @mock.patch("requests.Session.request")
    def test_request_retry_raises(self, mock_request):
        """Tests that three connection errors will not be handled"""

        class CustomMock(object):
            i = 0

            def connection_error(self, *args, **kwargs):
                self.i += 1

                if self.i < 4:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 200
                    return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database="db")

        with self.assertRaises(requests.exceptions.ConnectionError):
            cli.write_points(self.dummy_points)

    def test_get_list_users(self):
        example_response = '{"results":[{"series":[{"columns":["user","admin"],' '"values":[["test",false]]}]}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text=example_response)

            self.assertListEqual(self.cli.get_list_users(), [{"user": "******", "admin": False}])

    def test_get_list_users_empty(self):
        example_response = '{"results":[{"series":[{"columns":["user","admin"]}]}]}'
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text=example_response)

            self.assertListEqual(self.cli.get_list_users(), [])

    def test_revoke_admin_privileges(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text=example_response)
            self.cli.revoke_admin_privileges("test")

            self.assertEqual(m.last_request.qs["q"][0], "revoke all privileges from test")

    @raises(Exception)
    def test_revoke_admin_privileges_invalid(self):
        cli = InfluxDBClient("host", 8086, "username", "password")
        with _mocked_session(cli, "get", 400):
            self.cli.revoke_admin_privileges("")

    def test_grant_privilege(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text=example_response)
            self.cli.grant_privilege("read", "testdb", "test")

            self.assertEqual(m.last_request.qs["q"][0], "grant read on testdb to test")

    @raises(Exception)
    def test_grant_privilege_invalid(self):
        cli = InfluxDBClient("host", 8086, "username", "password")
        with _mocked_session(cli, "get", 400):
            self.cli.grant_privilege("", "testdb", "test")

    def test_revoke_privilege(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET, "http://localhost:8086/query", text=example_response)
            self.cli.revoke_privilege("read", "testdb", "test")

            self.assertEqual(m.last_request.qs["q"][0], "revoke read on testdb from test")

    @raises(Exception)
    def test_revoke_privilege_invalid(self):
        cli = InfluxDBClient("host", 8086, "username", "password")
        with _mocked_session(cli, "get", 400):
            self.cli.revoke_privilege("", "testdb", "test")
Esempio n. 13
0
class Tourbillon(object):
    """create a tourbillon instance reading its configuration from config_file
    """
    def __init__(self, config_file):
        super(Tourbillon, self).__init__()

        self._aio_run_event = asyncio.Event()
        self._thr_run_event = threading.Event()
        self._loop = asyncio.get_event_loop()
        self._tasks = []
        self._pluginconfig = {}

        with open(config_file, 'r') as f:
            self._config = json.load(f)

        formatter = logging.Formatter(fmt=self._config['log_format'])
        handler = logging.handlers.WatchedFileHandler(self._config['log_file'])
        handler.setFormatter(formatter)
        handler.setLevel(getattr(logging, self._config['log_level']))
        logging.getLogger().addHandler(handler)
        logging.getLogger().setLevel(
            getattr(logging, self._config['log_level']))
        logger.info('Use config file: %s', config_file)

        self._load_plugins_config(os.path.abspath(
            os.path.dirname(config_file)))

        self._influxdb = InfluxDBClient(**self._config['database'])
        self._databases = [
            i['name'] for i in self._influxdb.get_list_database()
        ]

    def _load_plugins_config(self, tourbillon_conf_dir):
        t = Template(self._config['plugins_conf_dir'])
        plugin_conf_dir = t.safe_substitute(
            tourbillon_conf_dir=tourbillon_conf_dir)
        logger.info('Plugin config dir: %s', plugin_conf_dir)
        config_files = glob.glob(os.path.join(plugin_conf_dir, '*.conf'))
        for file_name in config_files:
            k = os.path.splitext(os.path.basename(file_name))[0]
            with open(file_name, 'r') as f:
                try:
                    self._pluginconfig[k] = json.load(f)
                except:
                    logger.exception('error loading config file %s', file_name)

    @property
    def config(self):
        """returns a dictionary that contains configuration for each enabled
        plugin"""

        return self._pluginconfig

    @property
    def run_event(self):
        """get the asyncio.Event or threading.Event"""

        cf = inspect.currentframe()
        caller_name = cf.f_back.f_code.co_name
        caller = cf.f_back.f_globals[caller_name]
        if asyncio.iscoroutinefunction(caller) or asyncio.iscoroutine(caller):
            return self._aio_run_event
        else:
            return self._thr_run_event

    def push(self, points, database):
        """write syncronously datapoints to InfluxDB"""

        self._influxdb.write_points(points, database=database)

    def create_database(self,
                        name,
                        duration=None,
                        replication=None,
                        default=True):
        """create syncronously a database and a retention policy
        in the InfluxDB instance"""

        if name not in self._databases:
            self._influxdb.create_database(name)
            logger.info('database %s created successfully', name)

        if duration and replication:
            rps = self._influxdb.get_list_retention_policies(name)
            tourbillon_rp_name = '%s_tourbillon' % name
            duration_in_hours = _to_hours(duration)
            logger.debug('duration_in_hours: %s', duration_in_hours)
            for rp in rps:
                if rp['name'] == tourbillon_rp_name:
                    logger.debug('current rp config: %s', rp)
                    if rp['duration'] == duration_in_hours and \
                            rp['replicaN'] == int(replication) and \
                            rp['default'] == default:
                        logger.debug('the retention policy %s already exists',
                                     tourbillon_rp_name)
                        return
                    self._influxdb.alter_retention_policy(
                        tourbillon_rp_name,
                        database=name,
                        duration=duration,
                        replication=replication,
                        default=default)
                    logger.info('retention policy %s altered successfully',
                                tourbillon_rp_name)
                    return
            self._influxdb.create_retention_policy(tourbillon_rp_name,
                                                   database=name,
                                                   duration=duration,
                                                   replication=replication,
                                                   default=default)
            logger.info('retention policy %s created successfully',
                        tourbillon_rp_name)

    @asyncio.coroutine
    def async_push(self, points, database):
        """write asyncronously datapoints to InfluxDB"""

        yield from self._loop.run_in_executor(
            None,
            functools.partial(self._influxdb.write_points,
                              points,
                              database=database))

    @asyncio.coroutine
    def async_create_database(self,
                              name,
                              duration=None,
                              replication=None,
                              default=True):
        """create asyncronously a database and a retention policy
        in the InfluxDB instance"""

        if name not in self._databases:
            yield from self._loop.run_in_executor(
                None, self._influxdb.create_database, name)
            logger.info('database %s created successfully', name)

        if duration and replication:
            rps = yield from self._loop.run_in_executor(
                None, self._influxdb.get_list_retention_policies, name)
            tourbillon_rp_name = '%s_tourbillon' % name
            duration_in_hours = _to_hours(duration)
            logger.debug('duration_in_hours: %s', duration_in_hours)
            for rp in rps:
                if rp['name'] == tourbillon_rp_name:
                    logger.debug('current rp: %s', rp)
                    if rp['duration'] == duration_in_hours and \
                            rp['replicaN'] == int(replication) and \
                            rp['default'] == default:
                        logger.debug('the retention policy %s already exists',
                                     tourbillon_rp_name)
                        return
                    yield from self._loop.run_in_executor(
                        None,
                        functools.partial(
                            self._influxdb.alter_retention_policy,
                            tourbillon_rp_name,
                            database=name,
                            duration=duration,
                            replication=replication,
                            default=default))
                    logger.info('retention policy %s altered successfully',
                                tourbillon_rp_name)
                    return
            yield from self._loop.run_in_executor(
                None,
                functools.partial(self._influxdb.create_retention_policy,
                                  tourbillon_rp_name,
                                  database=name,
                                  duration=duration,
                                  replication=replication,
                                  default=default))
            logger.info('retention policy %s created successfully',
                        tourbillon_rp_name)

    def _load_tasks(self):
        if 'plugins' not in self._config:
            logger.warn('no plugin configured.')
            return
        plugins = self._config['plugins']
        thread_targets_count = 0
        for module_name, functions in plugins.items():
            logger.debug('search for tasks in module %s', module_name)
            module = import_module(module_name)
            logger.debug('module %s successfully imported', module_name)
            for task_name in functions:
                logger.debug('checking declared task %s', task_name)
                if hasattr(module, task_name):
                    candidate_task = getattr(module, task_name)
                    task_type = ''
                    if asyncio.iscoroutinefunction(candidate_task):
                        self._tasks.append(asyncio. async (
                            candidate_task(self)))
                        task_type = 'coroutine'
                    else:
                        self._tasks.append(
                            self._loop.run_in_executor(None, candidate_task,
                                                       self))
                        task_type = 'function'
                        thread_targets_count += 1
                    logger.info('task found: %s.%s, type=%s', module_name,
                                task_name, task_type)
        if thread_targets_count > 0:
            self._loop.set_default_executor(
                ThreadPoolExecutor(max_workers=thread_targets_count + 2))
        logger.debug('configured tasks: %s', self._tasks)

    def stop(self):
        """stop the tourbillon agent"""

        self._loop.remove_signal_handler(signal.SIGINT)
        self._loop.remove_signal_handler(signal.SIGTERM)
        logger.info('shutting down tourbillon...')
        self._aio_run_event.clear()
        self._thr_run_event.clear()

    def run(self):
        """start the tourbillon agent"""

        logger.info('starting tourbillon...')
        self._loop.add_signal_handler(signal.SIGINT, self.stop)
        self._loop.add_signal_handler(signal.SIGTERM, self.stop)
        self._load_tasks()
        self._aio_run_event.set()
        self._thr_run_event.set()
        logger.info('tourbillon started')
        self._loop.run_until_complete(asyncio.wait(self._tasks))
        logger.info('tourbillon shutdown completed')
Esempio n. 14
0
class Output(cowrie.core.output.Output):
    """
    influx output
    """
    def start(self):
        host = CowrieConfig().get('output_influx', 'host', fallback='')
        port = CowrieConfig().getint('output_influx', 'port', fallback=8086)
        ssl = CowrieConfig().getboolean('output_influx', 'ssl', fallback=False)

        self.client = None

        self.ipstack_api_key = None

        try:
            self.client = InfluxDBClient(host=host,
                                         port=port,
                                         ssl=ssl,
                                         verify_ssl=ssl)
        except InfluxDBClientError as e:
            log.err("output_influx: I/O error({0}): '{1}'".format(
                e.errno, e.strerror))
            return

        if self.client is None:
            log.err("output_influx: cannot instantiate client!")
            return

        if (CowrieConfig().has_option('output_influx', 'username')
                and CowrieConfig().has_option('output_influx', 'password')):
            username = CowrieConfig().get('output_influx', 'username')
            password = CowrieConfig().get('output_influx',
                                          'password',
                                          raw=True)
            self.client.switch_user(username, password)

        try:
            dbname = CowrieConfig().get('output_influx', 'database_name')
        except Exception:
            dbname = 'cowrie'

        retention_policy_duration_default = '12w'
        retention_policy_name = dbname + "_retention_policy"

        if CowrieConfig().has_option('output_influx',
                                     'retention_policy_duration'):
            retention_policy_duration = CowrieConfig().get(
                'output_influx', 'retention_policy_duration')

            match = re.search(r'^\d+[dhmw]{1}$', retention_policy_duration)
            if not match:
                log.err(
                    ("output_influx: invalid retention policy."
                     "Using default '{}'..").format(retention_policy_duration))
                retention_policy_duration = retention_policy_duration_default
        else:
            retention_policy_duration = retention_policy_duration_default

        database_list = self.client.get_list_database()
        dblist = [str(elem['name']) for elem in database_list]

        if dbname not in dblist:
            self.client.create_database(dbname)
            self.client.create_retention_policy(retention_policy_name,
                                                retention_policy_duration,
                                                1,
                                                database=dbname,
                                                default=True)
        else:
            retention_policies_list = self.client.get_list_retention_policies(
                database=dbname)
            rplist = [str(elem['name']) for elem in retention_policies_list]
            if retention_policy_name not in rplist:
                self.client.create_retention_policy(retention_policy_name,
                                                    retention_policy_duration,
                                                    1,
                                                    database=dbname,
                                                    default=True)
            else:
                self.client.alter_retention_policy(
                    retention_policy_name,
                    database=dbname,
                    duration=retention_policy_duration,
                    replication=1,
                    default=True)

        self.client.switch_database(dbname)

        if CowrieConfig().has_option("ipstack_api", "ipstack_api_key"):
            self.ipstack_api_key = CowrieConfig().get("ipstack_api",
                                                      "ipstack_api_key")

    def stop(self):
        pass

    def write(self, entry):
        if self.client is None:
            log.err("output_influx: client object is not instantiated")
            return

        # event id
        eventid = entry['eventid']

        # measurement init
        m = {
            'measurement': eventid.replace('.', '_'),
            'tags': {
                'session': entry['session'],
                'src_ip': entry['src_ip']
            },
            'fields': {
                'sensor': self.sensor
            },
        }

        # event parsing
        if eventid in ['cowrie.command.failed', 'cowrie.command.input']:
            m['fields'].update({
                'input': entry['input'],
            })

        elif eventid == 'cowrie.session.connect':
            if self.ipstack_api_key != None:
                url = "http://api.ipstack.com/" + entry[
                    'src_ip'] + "?access_key=" + self.ipstack_api_key + "&format=1"
                try:
                    response = requests.get(url)
                    response = json.loads(response.text)
                    m['fields'].update({
                        'protocol':
                        entry['protocol'],
                        'src_ip':
                        entry['src_ip'],
                        'src_port':
                        entry['src_port'],
                        'dst_port':
                        entry['dst_port'],
                        'dst_ip':
                        entry['dst_ip'],
                        'lat':
                        response['latitude'],
                        'long':
                        response['longitude'],
                        'geohash':
                        geohash.encode(response['latitude'],
                                       response['longitude']),
                        'city':
                        response['city'],
                        'region_code':
                        response['region_code'],
                        'country_code':
                        response['country_code'],
                        'continent_code':
                        response['continent_code'],
                    })
                    m['tags'].update({
                        'geohash':
                        geohash.encode(response['latitude'],
                                       response['longitude']),
                        'city':
                        response['city'],
                        'region_code':
                        response['region_code'],
                        'country_code':
                        response['country_code'],
                        'continent_code':
                        response['continent_code'],
                    })
                except requests.exceptions.RequestException as e:
                    log.err("output_influx: I/O error({0}): '{1}'".format(
                        e.errno, e.strerror))
                    m['fields'].update({
                        'protocol': entry['protocol'],
                        'src_ip': entry['src_ip'],
                        'src_port': entry['src_port'],
                        'dst_port': entry['dst_port'],
                        'dst_ip': entry['dst_ip'],
                    })

            else:
                m['fields'].update({
                    'protocol': entry['protocol'],
                    'src_ip': entry['src_ip'],
                    'src_port': entry['src_port'],
                    'dst_port': entry['dst_port'],
                    'dst_ip': entry['dst_ip'],
                })

        elif eventid in ['cowrie.login.success', 'cowrie.login.failed']:
            m['fields'].update({
                'username': entry['username'],
                'password': entry['password'],
            })

        elif eventid == 'cowrie.session.file_download':
            m['fields'].update({
                'shasum': entry.get('shasum'),
                'url': entry.get('url'),
                'outfile': entry.get('outfile')
            })

        elif eventid == 'cowrie.session.file_download.failed':
            m['fields'].update({'url': entry.get('url')})

        elif eventid == 'cowrie.session.file_upload':
            m['fields'].update({
                'shasum': entry.get('shasum'),
                'outfile': entry.get('outfile'),
            })

        elif eventid == 'cowrie.session.closed':
            m['fields'].update({'duration': entry['duration']})

        elif eventid == 'cowrie.client.version':
            m['fields'].update({
                'version': ','.join(entry['version']),
            })

        elif eventid == 'cowrie.client.kex':
            m['fields'].update({
                'maccs': ','.join(entry['macCS']),
                'kexalgs': ','.join(entry['kexAlgs']),
                'keyalgs': ','.join(entry['keyAlgs']),
                'compcs': ','.join(entry['compCS']),
                'enccs': ','.join(entry['encCS'])
            })

        elif eventid == 'cowrie.client.size':
            m['fields'].update({
                'height': entry['height'],
                'width': entry['width'],
            })

        elif eventid == 'cowrie.client.var':
            m['fields'].update({
                'name': entry['name'],
                'value': entry['value'],
            })

        elif eventid == 'cowrie.client.fingerprint':
            m['fields'].update({'fingerprint': entry['fingerprint']})

            # cowrie.direct-tcpip.data, cowrie.direct-tcpip.request
            # cowrie.log.closed
            # are not implemented
        else:
            # other events should be handled
            log.err("output_influx: event '{}' not handled. Skipping..".format(
                eventid))
            return

        result = self.client.write_points([m])

        if not result:
            log.err("output_influx: error when writing '{}' measurement"
                    "in the db.".format(eventid))
                                options.influxdb_db)
        try:
            logging.info("Creating database %s" % options.influxdb_db)
            client.create_database(options.influxdb_db)
        except Exception, e:
            logging.warn("Unable to create database, does it already exist?")
            # client.create_retention_policy("30d", "4w", 1, options.influxdb_db, True)

    except Exception, e:
        logging.error("Unable to connect to influxdb")
        sys.exit(1)

    if options.update_retention:
        logging.info("Altering retention policies")
        client.alter_retention_policy("default",
                                      duration="4w",
                                      replication=1,
                                      default=True)

    # forces read-only
    options.testmode = True
    settings.env = options.env.lower()

    logging.info("Connecting to appliance in %s, testmode:%s" %
                 (settings.env, options.testmode))
    connection = SolaceAPI(settings.env, testmode=options.testmode)
    """
    Gather client stats, this is quite slow if you have MANY clients!
    """
    if options.clients:
        connection.x = SolaceXMLBuilder("show clients stats")
        connection.x.show.client.name = options.filter
Esempio n. 16
0
class Output(cowrie.core.output.Output):

    def __init__(self):
        cowrie.core.output.Output.__init__(self)

    def start(self):
        try:
            host = CONFIG.get('output_influx', 'host')
        except Exception:
            host = ''

        try:
            port = CONFIG.getint('output_influx', 'port')
        except Exception:
            port = 8086

        try:
            ssl = CONFIG.getboolean('output_influx', 'ssl')
        except Exception:
            ssl = False

        self.client = None
        try:
            self.client = InfluxDBClient(host=host, port=port, ssl=ssl, verify_ssl=ssl)
        except InfluxDBClientError as e:
            log.err("output_influx: I/O error({0}): '{1}'".format(
                e.errno, e.strerror))
            return

        if self.client is None:
            log.err("output_influx: cannot instantiate client!")
            return

        if (CONFIG.has_option('output_influx', 'username') and
                CONFIG.has_option('output_influx', 'password')):
            username = CONFIG.get('output_influx', 'username')
            password = CONFIG.get('output_influx', 'password', raw=True)
            self.client.switch_user(username, password)

        try:
            dbname = CONFIG.get('output_influx', 'database_name')
        except Exception:
            dbname = 'cowrie'

        retention_policy_duration_default = '12w'
        retention_policy_name = dbname + "_retention_policy"

        if CONFIG.has_option('output_influx', 'retention_policy_duration'):
            retention_policy_duration = CONFIG.get(
                'output_influx', 'retention_policy_duration')

            match = re.search(r'^\d+[dhmw]{1}$', retention_policy_duration)
            if not match:
                log.err(("output_influx: invalid retention policy."
                         "Using default '{}'..").format(
                    retention_policy_duration))
                retention_policy_duration = retention_policy_duration_default
        else:
            retention_policy_duration = retention_policy_duration_default

        database_list = self.client.get_list_database()
        dblist = [str(elem['name']) for elem in database_list]

        if dbname not in dblist:
            self.client.create_database(dbname)
            self.client.create_retention_policy(
                retention_policy_name, retention_policy_duration, 1,
                database=dbname, default=True)
        else:
            retention_policies_list = self.client.get_list_retention_policies(
                database=dbname)
            rplist = [str(elem['name']) for elem in retention_policies_list]
            if retention_policy_name not in rplist:
                self.client.create_retention_policy(
                    retention_policy_name, retention_policy_duration, 1,
                    database=dbname, default=True)
            else:
                self.client.alter_retention_policy(
                    retention_policy_name, database=dbname,
                    duration=retention_policy_duration,
                    replication=1, default=True)

        self.client.switch_database(dbname)

    def stop(self):
        pass

    def write(self, entry):
        if self.client is None:
            log.err("output_influx: client object is not instantiated")
            return

        # event id
        eventid = entry['eventid']

        # measurement init
        m = {
            'measurement': eventid.replace('.', '_'),
            'tags': {
                'session': entry['session'],
                'src_ip': entry['src_ip']
            },
            'fields': {
                'sensor': self.sensor
            },
        }

        # event parsing
        if eventid in ['cowrie.command.failed',
                       'cowrie.command.input']:
            m['fields'].update({
                'input': entry['input'],
            })

        elif eventid == 'cowrie.session.connect':
            m['fields'].update({
                'protocol': entry['protocol'],
                'src_port': entry['src_port'],
                'dst_port': entry['dst_port'],
                'dst_ip': entry['dst_ip'],
            })

        elif eventid in ['cowrie.login.success', 'cowrie.login.failed']:
            m['fields'].update({
                'username': entry['username'],
                'password': entry['password'],
            })

        elif eventid == 'cowrie.session.file_download':
            m['fields'].update({
                'shasum': entry.get('shasum'),
                'url': entry.get('url'),
                'outfile': entry.get('outfile')
            })

        elif eventid == 'cowrie.session.file_download.failed':
            m['fields'].update({
                'url': entry.get('url')
            })

        elif eventid == 'cowrie.session.file_upload':
            m['fields'].update({
                'shasum': entry.get('shasum'),
                'outfile': entry.get('outfile'),
            })

        elif eventid == 'cowrie.session.closed':
            m['fields'].update({
                'duration': entry['duration']
            })

        elif eventid == 'cowrie.client.version':
            m['fields'].update({
                'version': ','.join(entry['version']),
            })

        elif eventid == 'cowrie.client.kex':
            m['fields'].update({
                'maccs': ','.join(entry['macCS']),
                'kexalgs': ','.join(entry['kexAlgs']),
                'keyalgs': ','.join(entry['keyAlgs']),
                'compcs': ','.join(entry['compCS']),
                'enccs': ','.join(entry['encCS'])
            })

        elif eventid == 'cowrie.client.size':
            m['fields'].update({
                'height': entry['height'],
                'width': entry['width'],
            })

        elif eventid == 'cowrie.client.var':
            m['fields'].update({
                'name': entry['name'],
                'value': entry['value'],
            })

        elif eventid == 'cowrie.client.fingerprint':
            m['fields'].update({
                'fingerprint': entry['fingerprint']
            })

            # cowrie.direct-tcpip.data, cowrie.direct-tcpip.request
            # cowrie.log.closed
            # are not implemented
        else:
            # other events should be handled
            log.err(
                "output_influx: event '{}' not handled. Skipping..".format(
                    eventid))
            return

        result = self.client.write_points([m])

        if not result:
            log.err("output_influx: error when writing '{}' measurement"
                    "in the db.".format(eventid))
Esempio n. 17
0
                            database=INFLUXDB_DATABASE)

    client.create_database(INFLUXDB_DATABASE)

    try:
        # Ensure we can connect. Wait for 2 minutes for WSP to startup.
        SESSION.get(PROXY_BASE_URL, timeout=120)
        configuration = get_configuration()

        # set up our default retention policies if we have that configured
        try:
            client.create_retention_policy("default_retention", "1w", "1",
                                           INFLUXDB_DATABASE, True)
        except InfluxDBClientError:
            LOG.info("Updating retention policy to {}...".format("1w"))
            client.alter_retention_policy("default_retention",
                                          INFLUXDB_DATABASE, "1w", "1", True)
        try:
            client.create_retention_policy("downsample_retention",
                                           RETENTION_DUR, "1",
                                           INFLUXDB_DATABASE, False)
        except InfluxDBClientError:
            LOG.info(
                "Updating retention policy to {}...".format(RETENTION_DUR))
            client.alter_retention_policy("downsample_retention",
                                          INFLUXDB_DATABASE, RETENTION_DUR,
                                          "1", False)

        # set up continuous queries that will downsample our metric data periodically
        create_continuous_query(DRIVE_PARAMS, "disks")
        create_continuous_query(SYSTEM_PARAMS, "system")
        create_continuous_query(VOLUME_PARAMS, "volumes")
Esempio n. 18
0
class TestInfluxDBClient(unittest.TestCase):
    def setUp(self):
        # By default, raise exceptions on warnings
        warnings.simplefilter('error', FutureWarning)

        self.cli = InfluxDBClient('localhost', 8086, 'username', 'password')
        self.dummy_points = [{
            "measurement": "cpu_load_short",
            "tags": {
                "host": "server01",
                "region": "us-west"
            },
            "time": "2009-11-10T23:00:00.123456Z",
            "fields": {
                "value": 0.64
            }
        }]

        self.dsn_string = 'influxdb://*****:*****@my.host.fr:1886/db'

    def test_scheme(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        self.assertEqual('http://host:8086', cli._baseurl)

        cli = InfluxDBClient('host',
                             8086,
                             'username',
                             'password',
                             'database',
                             ssl=True)
        self.assertEqual('https://host:8086', cli._baseurl)

    def test_dsn(self):
        cli = InfluxDBClient.from_DSN('influxdb://192.168.0.1:1886')
        self.assertEqual('http://192.168.0.1:1886', cli._baseurl)

        cli = InfluxDBClient.from_DSN(self.dsn_string)
        self.assertEqual('http://my.host.fr:1886', cli._baseurl)
        self.assertEqual('uSr', cli._username)
        self.assertEqual('pWd', cli._password)
        self.assertEqual('db', cli._database)
        self.assertFalse(cli.use_udp)

        cli = InfluxDBClient.from_DSN('udp+' + self.dsn_string)
        self.assertTrue(cli.use_udp)

        cli = InfluxDBClient.from_DSN('https+' + self.dsn_string)
        self.assertEqual('https://my.host.fr:1886', cli._baseurl)

        cli = InfluxDBClient.from_DSN('https+' + self.dsn_string,
                                      **{'ssl': False})
        self.assertEqual('http://my.host.fr:1886', cli._baseurl)

    def test_switch_database(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        cli.switch_database('another_database')
        self.assertEqual('another_database', cli._database)

    def test_switch_user(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        cli.switch_user('another_username', 'another_password')
        self.assertEqual('another_username', cli._username)
        self.assertEqual('another_password', cli._password)

    def test_write(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST,
                           "http://localhost:8086/write",
                           status_code=204)
            cli = InfluxDBClient(database='db')
            cli.write({
                "database":
                "mydb",
                "retentionPolicy":
                "mypolicy",
                "points": [{
                    "measurement": "cpu_load_short",
                    "tags": {
                        "host": "server01",
                        "region": "us-west"
                    },
                    "time": "2009-11-10T23:00:00Z",
                    "fields": {
                        "value": 0.64
                    }
                }]
            })

            self.assertEqual(
                m.last_request.body,
                b"cpu_load_short,host=server01,region=us-west "
                b"value=0.64 1257894000000000000\n",
            )

    def test_write_points(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST,
                           "http://localhost:8086/write",
                           status_code=204)

            cli = InfluxDBClient(database='db')
            cli.write_points(self.dummy_points, )
            self.assertEqual(
                'cpu_load_short,host=server01,region=us-west '
                'value=0.64 1257894000123456000\n',
                m.last_request.body.decode('utf-8'),
            )

    def test_write_points_toplevel_attributes(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST,
                           "http://localhost:8086/write",
                           status_code=204)

            cli = InfluxDBClient(database='db')
            cli.write_points(self.dummy_points,
                             database='testdb',
                             tags={"tag": "hello"},
                             retention_policy="somepolicy")
            self.assertEqual(
                'cpu_load_short,host=server01,region=us-west,tag=hello '
                'value=0.64 1257894000123456000\n',
                m.last_request.body.decode('utf-8'),
            )

    def test_write_points_batch(self):
        dummy_points = [{
            "measurement": "cpu_usage",
            "tags": {
                "unit": "percent"
            },
            "time": "2009-11-10T23:00:00Z",
            "fields": {
                "value": 12.34
            }
        }, {
            "measurement": "network",
            "tags": {
                "direction": "in"
            },
            "time": "2009-11-10T23:00:00Z",
            "fields": {
                "value": 123.00
            }
        }, {
            "measurement": "network",
            "tags": {
                "direction": "out"
            },
            "time": "2009-11-10T23:00:00Z",
            "fields": {
                "value": 12.00
            }
        }]
        expected_last_body = (
            "network,direction=out,host=server01,region=us-west "
            "value=12.0 1257894000000000000\n")

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST,
                           "http://localhost:8086/write",
                           status_code=204)
            cli = InfluxDBClient(database='db')
            cli.write_points(points=dummy_points,
                             database='db',
                             tags={
                                 "host": "server01",
                                 "region": "us-west"
                             },
                             batch_size=2)
        self.assertEqual(m.call_count, 2)
        self.assertEqual(expected_last_body,
                         m.last_request.body.decode('utf-8'))

    def test_write_points_udp(self):
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        port = random.randint(4000, 8000)
        s.bind(('0.0.0.0', port))

        cli = InfluxDBClient('localhost',
                             8086,
                             'root',
                             'root',
                             'test',
                             use_udp=True,
                             udp_port=port)
        cli.write_points(self.dummy_points)

        received_data, addr = s.recvfrom(1024)

        self.assertEqual(
            'cpu_load_short,host=server01,region=us-west '
            'value=0.64 1257894000123456000\n', received_data.decode())

    def test_write_bad_precision_udp(self):
        cli = InfluxDBClient('localhost',
                             8086,
                             'root',
                             'root',
                             'test',
                             use_udp=True,
                             udp_port=4444)

        with self.assertRaisesRegexp(
                Exception,
                "InfluxDB only supports seconds precision for udp writes"):
            cli.write_points(self.dummy_points, time_precision='ms')

    @raises(Exception)
    def test_write_points_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 500):
            cli.write_points([])

    def test_write_points_with_precision(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST,
                           "http://localhost:8086/write",
                           status_code=204)

            cli = InfluxDBClient(database='db')

            cli.write_points(self.dummy_points, time_precision='n')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123456000\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='u')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123456\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='ms')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='s')
            self.assertEqual(
                b"cpu_load_short,host=server01,region=us-west "
                b"value=0.64 1257894000\n",
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='m')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 20964900\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='h')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 349415\n',
                m.last_request.body,
            )

    def test_write_points_bad_precision(self):
        cli = InfluxDBClient()
        with self.assertRaisesRegexp(
                Exception, "Invalid time precision is given. "
                "\(use 'n', 'u', 'ms', 's', 'm' or 'h'\)"):
            cli.write_points(self.dummy_points, time_precision='g')

    @raises(Exception)
    def test_write_points_with_precision_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 500):
            cli.write_points_with_precision([])

    def test_query(self):
        example_response = (
            '{"results": [{"series": [{"measurement": "sdfsdfsdf", '
            '"columns": ["time", "value"], "values": '
            '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": '
            '[{"measurement": "cpu_load_short", "columns": ["time", "value"], '
            '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}')

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://*****:*****@unittest.skip('Not implemented for 0.9')
    def test_query_chunked(self):
        cli = InfluxDBClient(database='db')
        example_object = {
            'points': [[1415206250119, 40001, 667], [1415206244555, 30001, 7],
                       [1415206228241, 20001,
                        788], [1415206212980, 10001, 555],
                       [1415197271586, 10001, 23]],
            'measurement':
            'foo',
            'columns': ['time', 'sequence_number', 'val']
        }
        example_response = \
            json.dumps(example_object) + json.dumps(example_object)

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/db/db/series",
                           text=example_response)

            self.assertListEqual(cli.query('select * from foo', chunked=True),
                                 [example_object, example_object])

    @raises(Exception)
    def test_query_fail(self):
        with _mocked_session(self.cli, 'get', 401):
            self.cli.query('select column_one from foo;')

    def test_create_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text='{"results":[{}]}')
            self.cli.create_database('new_db')
            self.assertEqual(m.last_request.qs['q'][0],
                             'create database "new_db"')

    def test_create_numeric_named_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text='{"results":[{}]}')
            self.cli.create_database('123')
            self.assertEqual(m.last_request.qs['q'][0],
                             'create database "123"')

    @raises(Exception)
    def test_create_database_fails(self):
        with _mocked_session(self.cli, 'post', 401):
            self.cli.create_database('new_db')

    def test_drop_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text='{"results":[{}]}')
            self.cli.drop_database('new_db')
            self.assertEqual(m.last_request.qs['q'][0],
                             'drop database "new_db"')

    def test_drop_numeric_named_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text='{"results":[{}]}')
            self.cli.drop_database('123')
            self.assertEqual(m.last_request.qs['q'][0], 'drop database "123"')

    def test_get_list_database(self):
        data = {
            'results': [{
                'series': [{
                    'name': 'databases',
                    'values': [['new_db_1'], ['new_db_2']],
                    'columns': ['name']
                }]
            }]
        }

        with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
            self.assertListEqual(self.cli.get_list_database(),
                                 [{
                                     'name': 'new_db_1'
                                 }, {
                                     'name': 'new_db_2'
                                 }])

    @raises(Exception)
    def test_get_list_database_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 401):
            cli.get_list_database()

    def test_create_retention_policy_default(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.create_retention_policy('somename',
                                             '1d',
                                             4,
                                             default=True,
                                             database='db')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'create retention policy "somename" on '
                '"db" duration 1d replication 4 default')

    def test_create_retention_policy(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.create_retention_policy('somename',
                                             '1d',
                                             4,
                                             database='db')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'create retention policy "somename" on '
                '"db" duration 1d replication 4')

    def test_alter_retention_policy(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            # Test alter duration
            self.cli.alter_retention_policy('somename', 'db', duration='4d')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy "somename" on "db" duration 4d')
            # Test alter replication
            self.cli.alter_retention_policy('somename', 'db', replication=4)
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy "somename" on "db" replication 4')

            # Test alter default
            self.cli.alter_retention_policy('somename', 'db', default=True)
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy "somename" on "db" default')

    @raises(Exception)
    def test_alter_retention_policy_invalid(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.alter_retention_policy('somename', 'db')

    def test_drop_retention_policy(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.drop_retention_policy('somename', 'db')
            self.assertEqual(m.last_request.qs['q'][0],
                             'drop retention policy "somename" on "db"')

    @raises(Exception)
    def test_drop_retention_policy_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'delete', 401):
            cli.drop_retention_policy('default', 'db')

    def test_get_list_retention_policies(self):
        example_response = \
            '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\
            ' "columns": ["name", "duration", "replicaN"]}]}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://*****:*****@mock.patch('requests.Session.request')
    def test_request_retry(self, mock_request):
        """Tests that two connection errors will be handled"""
        class CustomMock(object):
            i = 0

            def connection_error(self, *args, **kwargs):
                self.i += 1

                if self.i < 3:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 204
                    return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database='db')
        cli.write_points(self.dummy_points)

    @mock.patch('requests.Session.request')
    def test_request_retry_raises(self, mock_request):
        """Tests that three connection errors will not be handled"""
        class CustomMock(object):
            i = 0

            def connection_error(self, *args, **kwargs):
                self.i += 1

                if self.i < 4:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 200
                    return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database='db')

        with self.assertRaises(requests.exceptions.ConnectionError):
            cli.write_points(self.dummy_points)

    def test_get_list_users(self):
        example_response = (
            '{"results":[{"series":[{"columns":["user","admin"],'
            '"values":[["test",false]]}]}]}')

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)

            self.assertListEqual(self.cli.get_list_users(), [{
                'user': '******',
                'admin': False
            }])

    def test_get_list_users_empty(self):
        example_response = (
            '{"results":[{"series":[{"columns":["user","admin"]}]}]}')
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)

            self.assertListEqual(self.cli.get_list_users(), [])

    def test_grant_admin_privileges(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.grant_admin_privileges('test')

            self.assertEqual(m.last_request.qs['q'][0],
                             'grant all privileges to test')

    @raises(Exception)
    def test_grant_admin_privileges_invalid(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.grant_admin_privileges('')

    def test_revoke_admin_privileges(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.revoke_admin_privileges('test')

            self.assertEqual(m.last_request.qs['q'][0],
                             'revoke all privileges from test')

    @raises(Exception)
    def test_revoke_admin_privileges_invalid(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.revoke_admin_privileges('')

    def test_grant_privilege(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.grant_privilege('read', 'testdb', 'test')

            self.assertEqual(m.last_request.qs['q'][0],
                             'grant read on testdb to test')

    @raises(Exception)
    def test_grant_privilege_invalid(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.grant_privilege('', 'testdb', 'test')

    def test_revoke_privilege(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.revoke_privilege('read', 'testdb', 'test')

            self.assertEqual(m.last_request.qs['q'][0],
                             'revoke read on testdb from test')

    @raises(Exception)
    def test_revoke_privilege_invalid(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.revoke_privilege('', 'testdb', 'test')

    def test_get_list_privileges(self):
        data = {
            'results': [{
                'series': [{
                    'columns': ['database', 'privilege'],
                    'values': [['db1', 'READ'], ['db2', 'ALL PRIVILEGES'],
                               ['db3', 'NO PRIVILEGES']]
                }]
            }]
        }

        with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
            self.assertListEqual(self.cli.get_list_privileges('test'),
                                 [{
                                     'database': 'db1',
                                     'privilege': 'READ'
                                 }, {
                                     'database': 'db2',
                                     'privilege': 'ALL PRIVILEGES'
                                 }, {
                                     'database': 'db3',
                                     'privilege': 'NO PRIVILEGES'
                                 }])

    @raises(Exception)
    def test_get_list_privileges_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 401):
            cli.get_list_privileges('test')

    def test_invalid_port_fails(self):
        with self.assertRaises(ValueError):
            InfluxDBClient('host', '80/redir', 'username', 'password')
Esempio n. 19
0
class InfluxDBExporter(object):
    def __init__(self,
                 host,
                 port,
                 dbname,
                 user,
                 pwd,
                 flushtime,
                 db_management,
                 geohash={}):
        self.host = host
        self.port = port
        self.dbname = dbname
        self.user = user
        self.pwd = pwd
        self.flushtime = flushtime
        self.client = None
        self.geohash = geohash

        # holds 'point' to be send to influxdb (1 by line)
        self.data = []

        # max batch size to send:  no more than 5000 (cf. influxdb doc.)
        self.nb_data_max = 5000

        # nb of rqt error before aborting
        self.NB_MAX_TRY_REQUEST = 10

        # get influxdb client
        self.client = InfluxDBClient(host=host, port=port, database=dbname)

        if db_management:
            self.prepare_db(db_management)

    def prepare_db(self, db_management):
        if db_management['drop_db']:
            self.drop_db()
        self.create_db()
        self.set_retention_policies(db_management['retention'])

    def drop_db(self, dbname=None):
        if not dbname:
            dbname = self.dbname
        logger.info("Drop %s database." % dbname)
        try:
            self.client.drop_database(dbname)
        except Exception as e:
            logger.warning(e)
            logger.warning("Can't drop %s database (not existing yet ?)." %
                           dbname)

    def create_db(self, dbname=None):
        if not dbname:
            dbname = self.dbname
        logger.info("Open/Create %s database." % dbname)
        # self.client.create_database(dbname, if_not_exists=True)
        try:
            self.client.create_database(dbname)
        except Exception as e:
            raise Exception("Can't create database %s (%s)!" % (dbname, e))

        try:
            self.client.switch_database(dbname)
        except Exception:
            raise Exception("Can't switch to database %s" % dbname)

    def set_retention_policies(self, days, dbname=None):
        if not dbname:
            dbname = self.dbname
        name = "in_days"
        logger.info(
            "Setting %s retention policy on %s database, keep=%d days." %
            (name, dbname, days))
        try:
            self.client.create_retention_policy(name,
                                                duration="%dd" % days,
                                                replication="1",
                                                database=dbname,
                                                default=True)
        except Exception:
            logger.info("Policy already exists. Changing to new policy !")
            self.client.alter_retention_policy(name,
                                               database=dbname,
                                               duration="%dd" % days,
                                               replication=1,
                                               default=True)

    def send_points(self, debug=False):
        """ Send all data points to influxdb

        To speed-up things make our own "data line"
        (bypass influxdb write_points python api)
        """
        data = '\n'.join(self.data[:self.nb_data_max])
        del self.data[:self.nb_data_max]

        headers = self.client._headers
        headers['Content-type'] = 'application/octet-stream'

        nb_try = 0
        while True:
            nb_try += 1
            try:
                self.client.request(url="write",
                                    method='POST',
                                    params={'db': self.client._database},
                                    data=data,
                                    expected_response_code=204,
                                    headers=headers)
            except (InfluxDBServerError, InfluxDBClientError,
                    requests.exceptions.ConnectionError) as e:
                if nb_try > self.NB_MAX_TRY_REQUEST:
                    raise e
                else:
                    logger.error("Request failed (%s)" % e)
                    logger.error("retrying (%d/%d)" %
                                 (nb_try, self.NB_MAX_TRY_REQUEST))
                    continue
            break

    def run(self):
        pass
Esempio n. 20
0
class TestInfluxDBClient(unittest.TestCase):
    """Set up the TestInfluxDBClient object."""
    def setUp(self):
        """Initialize an instance of TestInfluxDBClient object."""
        # By default, raise exceptions on warnings
        warnings.simplefilter('error', FutureWarning)

        self.cli = InfluxDBClient('localhost', 8086, 'username', 'password')
        self.dummy_points = [{
            "measurement": "cpu_load_short",
            "tags": {
                "host": "server01",
                "region": "us-west"
            },
            "time": "2009-11-10T23:00:00.123456Z",
            "fields": {
                "value": 0.64
            }
        }]

        self.dsn_string = 'influxdb://*****:*****@my.host.fr:1886/db'

    def test_scheme(self):
        """Set up the test schema for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        self.assertEqual('http://host:8086', cli._baseurl)

        cli = InfluxDBClient('host',
                             8086,
                             'username',
                             'password',
                             'database',
                             ssl=True)
        self.assertEqual('https://host:8086', cli._baseurl)

        cli = InfluxDBClient('host',
                             8086,
                             'username',
                             'password',
                             'database',
                             ssl=True,
                             path="somepath")
        self.assertEqual('https://host:8086/somepath', cli._baseurl)

        cli = InfluxDBClient('host',
                             8086,
                             'username',
                             'password',
                             'database',
                             ssl=True,
                             path="/somepath")
        self.assertEqual('https://host:8086/somepath', cli._baseurl)

    def test_dsn(self):
        """Set up the test datasource name for TestInfluxDBClient object."""
        cli = InfluxDBClient.from_dsn('influxdb://192.168.0.1:1886')
        self.assertEqual('http://192.168.0.1:1886', cli._baseurl)

        cli = InfluxDBClient.from_dsn(self.dsn_string)
        self.assertEqual('http://my.host.fr:1886', cli._baseurl)
        self.assertEqual('uSr', cli._username)
        self.assertEqual('pWd', cli._password)
        self.assertEqual('db', cli._database)
        self.assertFalse(cli._use_udp)

        cli = InfluxDBClient.from_dsn('udp+' + self.dsn_string)
        self.assertTrue(cli._use_udp)

        cli = InfluxDBClient.from_dsn('https+' + self.dsn_string)
        self.assertEqual('https://my.host.fr:1886', cli._baseurl)

        cli = InfluxDBClient.from_dsn('https+' + self.dsn_string,
                                      **{'ssl': False})
        self.assertEqual('http://my.host.fr:1886', cli._baseurl)

    def test_switch_database(self):
        """Test switch database in TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        cli.switch_database('another_database')
        self.assertEqual('another_database', cli._database)

    def test_switch_user(self):
        """Test switch user in TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        cli.switch_user('another_username', 'another_password')
        self.assertEqual('another_username', cli._username)
        self.assertEqual('another_password', cli._password)

    def test_write(self):
        """Test write in TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST,
                           "http://localhost:8086/write",
                           status_code=204)
            cli = InfluxDBClient(database='db')
            cli.write({
                "database":
                "mydb",
                "retentionPolicy":
                "mypolicy",
                "points": [{
                    "measurement": "cpu_load_short",
                    "tags": {
                        "host": "server01",
                        "region": "us-west"
                    },
                    "time": "2009-11-10T23:00:00Z",
                    "fields": {
                        "value": 0.64
                    }
                }]
            })

            self.assertEqual(
                m.last_request.body,
                b"cpu_load_short,host=server01,region=us-west "
                b"value=0.64 1257894000000000000\n",
            )

    def test_write_points(self):
        """Test write points for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST,
                           "http://localhost:8086/write",
                           status_code=204)

            cli = InfluxDBClient(database='db')
            cli.write_points(self.dummy_points, )
            self.assertEqual(
                'cpu_load_short,host=server01,region=us-west '
                'value=0.64 1257894000123456000\n',
                m.last_request.body.decode('utf-8'),
            )

    def test_write_points_toplevel_attributes(self):
        """Test write points attrs for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST,
                           "http://localhost:8086/write",
                           status_code=204)

            cli = InfluxDBClient(database='db')
            cli.write_points(self.dummy_points,
                             database='testdb',
                             tags={"tag": "hello"},
                             retention_policy="somepolicy")
            self.assertEqual(
                'cpu_load_short,host=server01,region=us-west,tag=hello '
                'value=0.64 1257894000123456000\n',
                m.last_request.body.decode('utf-8'),
            )

    def test_write_points_batch(self):
        """Test write points batch for TestInfluxDBClient object."""
        dummy_points = [{
            "measurement": "cpu_usage",
            "tags": {
                "unit": "percent"
            },
            "time": "2009-11-10T23:00:00Z",
            "fields": {
                "value": 12.34
            }
        }, {
            "measurement": "network",
            "tags": {
                "direction": "in"
            },
            "time": "2009-11-10T23:00:00Z",
            "fields": {
                "value": 123.00
            }
        }, {
            "measurement": "network",
            "tags": {
                "direction": "out"
            },
            "time": "2009-11-10T23:00:00Z",
            "fields": {
                "value": 12.00
            }
        }]
        expected_last_body = (
            "network,direction=out,host=server01,region=us-west "
            "value=12.0 1257894000000000000\n")

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST,
                           "http://localhost:8086/write",
                           status_code=204)
            cli = InfluxDBClient(database='db')
            cli.write_points(points=dummy_points,
                             database='db',
                             tags={
                                 "host": "server01",
                                 "region": "us-west"
                             },
                             batch_size=2)
        self.assertEqual(m.call_count, 2)
        self.assertEqual(expected_last_body,
                         m.last_request.body.decode('utf-8'))

    def test_write_points_udp(self):
        """Test write points UDP for TestInfluxDBClient object."""
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        port = random.randint(4000, 8000)
        s.bind(('0.0.0.0', port))

        cli = InfluxDBClient('localhost',
                             8086,
                             'root',
                             'root',
                             'test',
                             use_udp=True,
                             udp_port=port)
        cli.write_points(self.dummy_points)

        received_data, addr = s.recvfrom(1024)

        self.assertEqual(
            'cpu_load_short,host=server01,region=us-west '
            'value=0.64 1257894000123456000\n', received_data.decode())

    @raises(Exception)
    def test_write_points_fails(self):
        """Test write points fail for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 500):
            cli.write_points([])

    def test_write_points_with_precision(self):
        """Test write points with precision for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST,
                           "http://localhost:8086/write",
                           status_code=204)

            cli = InfluxDBClient(database='db')

            cli.write_points(self.dummy_points, time_precision='n')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123456000\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='u')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123456\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='ms')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='s')
            self.assertEqual(
                b"cpu_load_short,host=server01,region=us-west "
                b"value=0.64 1257894000\n",
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='m')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 20964900\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='h')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 349415\n',
                m.last_request.body,
            )

    def test_write_points_with_precision_udp(self):
        """Test write points with precision for TestInfluxDBClient object."""
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        port = random.randint(4000, 8000)
        s.bind(('0.0.0.0', port))

        cli = InfluxDBClient('localhost',
                             8086,
                             'root',
                             'root',
                             'test',
                             use_udp=True,
                             udp_port=port)

        cli.write_points(self.dummy_points, time_precision='n')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 1257894000123456000\n',
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='u')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 1257894000123456\n',
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='ms')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 1257894000123\n',
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='s')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b"cpu_load_short,host=server01,region=us-west "
            b"value=0.64 1257894000\n",
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='m')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 20964900\n',
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='h')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 349415\n',
            received_data,
        )

    def test_write_points_bad_precision(self):
        """Test write points w/bad precision TestInfluxDBClient object."""
        cli = InfluxDBClient()
        with self.assertRaisesRegexp(
                Exception, "Invalid time precision is given. "
                "\(use 'n', 'u', 'ms', 's', 'm' or 'h'\)"):
            cli.write_points(self.dummy_points, time_precision='g')

    @raises(Exception)
    def test_write_points_with_precision_fails(self):
        """Test write points w/precision fail for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 500):
            cli.write_points_with_precision([])

    def test_query(self):
        """Test query method for TestInfluxDBClient object."""
        example_response = (
            '{"results": [{"series": [{"measurement": "sdfsdfsdf", '
            '"columns": ["time", "value"], "values": '
            '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": '
            '[{"measurement": "cpu_load_short", "columns": ["time", "value"], '
            '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}')

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://*****:*****@unittest.skip('Not implemented for 0.9')
    def test_query_chunked(self):
        """Test chunked query for TestInfluxDBClient object."""
        cli = InfluxDBClient(database='db')
        example_object = {
            'points': [[1415206250119, 40001, 667], [1415206244555, 30001, 7],
                       [1415206228241, 20001,
                        788], [1415206212980, 10001, 555],
                       [1415197271586, 10001, 23]],
            'measurement':
            'foo',
            'columns': ['time', 'sequence_number', 'val']
        }
        example_response = \
            json.dumps(example_object) + json.dumps(example_object)

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/db/db/series",
                           text=example_response)

            self.assertListEqual(cli.query('select * from foo', chunked=True),
                                 [example_object, example_object])

    @raises(Exception)
    def test_query_fail(self):
        """Test query failed for TestInfluxDBClient object."""
        with _mocked_session(self.cli, 'get', 401):
            self.cli.query('select column_one from foo;')

    def test_ping(self):
        """Test ping querying InfluxDB version."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/ping",
                           status_code=204,
                           headers={'X-Influxdb-Version': '1.2.3'})
            version = self.cli.ping()
            self.assertEqual(version, '1.2.3')

    def test_create_database(self):
        """Test create database for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text='{"results":[{}]}')
            self.cli.create_database('new_db')
            self.assertEqual(m.last_request.qs['q'][0],
                             'create database "new_db"')

    def test_create_numeric_named_database(self):
        """Test create db w/numeric name for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text='{"results":[{}]}')
            self.cli.create_database('123')
            self.assertEqual(m.last_request.qs['q'][0],
                             'create database "123"')

    @raises(Exception)
    def test_create_database_fails(self):
        """Test create database fail for TestInfluxDBClient object."""
        with _mocked_session(self.cli, 'post', 401):
            self.cli.create_database('new_db')

    def test_drop_database(self):
        """Test drop database for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text='{"results":[{}]}')
            self.cli.drop_database('new_db')
            self.assertEqual(m.last_request.qs['q'][0],
                             'drop database "new_db"')

    def test_drop_measurement(self):
        """Test drop measurement for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text='{"results":[{}]}')
            self.cli.drop_measurement('new_measurement')
            self.assertEqual(m.last_request.qs['q'][0],
                             'drop measurement "new_measurement"')

    def test_drop_numeric_named_database(self):
        """Test drop numeric db for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text='{"results":[{}]}')
            self.cli.drop_database('123')
            self.assertEqual(m.last_request.qs['q'][0], 'drop database "123"')

    def test_get_list_database(self):
        """Test get list of databases for TestInfluxDBClient object."""
        data = {
            'results': [{
                'series': [{
                    'name': 'databases',
                    'values': [['new_db_1'], ['new_db_2']],
                    'columns': ['name']
                }]
            }]
        }

        with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
            self.assertListEqual(self.cli.get_list_database(),
                                 [{
                                     'name': 'new_db_1'
                                 }, {
                                     'name': 'new_db_2'
                                 }])

    @raises(Exception)
    def test_get_list_database_fails(self):
        """Test get list of dbs fail for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 401):
            cli.get_list_database()

    def test_get_list_measurements(self):
        """Test get list of measurements for TestInfluxDBClient object."""
        data = {
            "results": [{
                "series": [{
                    "name": "measurements",
                    "columns": ["name"],
                    "values": [["cpu"], ["disk"]]
                }]
            }]
        }

        with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
            self.assertListEqual(self.cli.get_list_measurements(),
                                 [{
                                     'name': 'cpu'
                                 }, {
                                     'name': 'disk'
                                 }])

    def test_create_retention_policy_default(self):
        """Test create default ret policy for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.create_retention_policy('somename',
                                             '1d',
                                             4,
                                             default=True,
                                             database='db')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'create retention policy "somename" on '
                '"db" duration 1d replication 4 default')

    def test_create_retention_policy(self):
        """Test create retention policy for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.create_retention_policy('somename',
                                             '1d',
                                             4,
                                             database='db')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'create retention policy "somename" on '
                '"db" duration 1d replication 4')

    def test_alter_retention_policy(self):
        """Test alter retention policy for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            # Test alter duration
            self.cli.alter_retention_policy('somename', 'db', duration='4d')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy "somename" on "db" duration 4d')
            # Test alter replication
            self.cli.alter_retention_policy('somename', 'db', replication=4)
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy "somename" on "db" replication 4')

            # Test alter default
            self.cli.alter_retention_policy('somename', 'db', default=True)
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy "somename" on "db" default')

    @raises(Exception)
    def test_alter_retention_policy_invalid(self):
        """Test invalid alter ret policy for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.alter_retention_policy('somename', 'db')

    def test_drop_retention_policy(self):
        """Test drop retention policy for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.drop_retention_policy('somename', 'db')
            self.assertEqual(m.last_request.qs['q'][0],
                             'drop retention policy "somename" on "db"')

    @raises(Exception)
    def test_drop_retention_policy_fails(self):
        """Test failed drop ret policy for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'delete', 401):
            cli.drop_retention_policy('default', 'db')

    def test_get_list_retention_policies(self):
        """Test get retention policies for TestInfluxDBClient object."""
        example_response = \
            '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\
            ' "columns": ["name", "duration", "replicaN"]}]}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://*****:*****@mock.patch('requests.Session.request')
    def test_request_retry(self, mock_request):
        """Test that two connection errors will be handled."""
        class CustomMock(object):
            """Create custom mock object for test."""
            def __init__(self):
                self.i = 0

            def connection_error(self, *args, **kwargs):
                """Handle a connection error for the CustomMock object."""
                self.i += 1

                if self.i < 3:
                    raise requests.exceptions.ConnectionError

                r = requests.Response()
                r.status_code = 204
                return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database='db')
        cli.write_points(self.dummy_points)

    @mock.patch('requests.Session.request')
    def test_request_retry_raises(self, mock_request):
        """Test that three requests errors will not be handled."""
        class CustomMock(object):
            """Create custom mock object for test."""
            def __init__(self):
                self.i = 0

            def connection_error(self, *args, **kwargs):
                """Handle a connection error for the CustomMock object."""
                self.i += 1

                if self.i < 4:
                    raise requests.exceptions.HTTPError
                else:
                    r = requests.Response()
                    r.status_code = 200
                    return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database='db')

        with self.assertRaises(requests.exceptions.HTTPError):
            cli.write_points(self.dummy_points)

    @mock.patch('requests.Session.request')
    def test_random_request_retry(self, mock_request):
        """Test that a random number of connection errors will be handled."""
        class CustomMock(object):
            """Create custom mock object for test."""
            def __init__(self, retries):
                self.i = 0
                self.retries = retries

            def connection_error(self, *args, **kwargs):
                """Handle a connection error for the CustomMock object."""
                self.i += 1

                if self.i < self.retries:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 204
                    return r

        retries = random.randint(1, 5)
        mock_request.side_effect = CustomMock(retries).connection_error

        cli = InfluxDBClient(database='db', retries=retries)
        cli.write_points(self.dummy_points)

    @mock.patch('requests.Session.request')
    def test_random_request_retry_raises(self, mock_request):
        """Test a random number of conn errors plus one will not be handled."""
        class CustomMock(object):
            """Create custom mock object for test."""
            def __init__(self, retries):
                self.i = 0
                self.retries = retries

            def connection_error(self, *args, **kwargs):
                """Handle a connection error for the CustomMock object."""
                self.i += 1

                if self.i < self.retries + 1:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 200
                    return r

        retries = random.randint(1, 5)
        mock_request.side_effect = CustomMock(retries).connection_error

        cli = InfluxDBClient(database='db', retries=retries)

        with self.assertRaises(requests.exceptions.ConnectionError):
            cli.write_points(self.dummy_points)

    def test_get_list_users(self):
        """Test get users for TestInfluxDBClient object."""
        example_response = (
            '{"results":[{"series":[{"columns":["user","admin"],'
            '"values":[["test",false]]}]}]}')

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)

            self.assertListEqual(self.cli.get_list_users(), [{
                'user': '******',
                'admin': False
            }])

    def test_get_list_users_empty(self):
        """Test get empty userlist for TestInfluxDBClient object."""
        example_response = (
            '{"results":[{"series":[{"columns":["user","admin"]}]}]}')
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)

            self.assertListEqual(self.cli.get_list_users(), [])

    def test_grant_admin_privileges(self):
        """Test grant admin privs for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.grant_admin_privileges('test')

            self.assertEqual(m.last_request.qs['q'][0],
                             'grant all privileges to "test"')

    @raises(Exception)
    def test_grant_admin_privileges_invalid(self):
        """Test grant invalid admin privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.grant_admin_privileges('')

    def test_revoke_admin_privileges(self):
        """Test revoke admin privs for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.revoke_admin_privileges('test')

            self.assertEqual(m.last_request.qs['q'][0],
                             'revoke all privileges from "test"')

    @raises(Exception)
    def test_revoke_admin_privileges_invalid(self):
        """Test revoke invalid admin privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.revoke_admin_privileges('')

    def test_grant_privilege(self):
        """Test grant privs for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.grant_privilege('read', 'testdb', 'test')

            self.assertEqual(m.last_request.qs['q'][0],
                             'grant read on "testdb" to "test"')

    @raises(Exception)
    def test_grant_privilege_invalid(self):
        """Test grant invalid privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.grant_privilege('', 'testdb', 'test')

    def test_revoke_privilege(self):
        """Test revoke privs for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.revoke_privilege('read', 'testdb', 'test')

            self.assertEqual(m.last_request.qs['q'][0],
                             'revoke read on "testdb" from "test"')

    @raises(Exception)
    def test_revoke_privilege_invalid(self):
        """Test revoke invalid privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.revoke_privilege('', 'testdb', 'test')

    def test_get_list_privileges(self):
        """Tst get list of privs for TestInfluxDBClient object."""
        data = {
            'results': [{
                'series': [{
                    'columns': ['database', 'privilege'],
                    'values': [['db1', 'READ'], ['db2', 'ALL PRIVILEGES'],
                               ['db3', 'NO PRIVILEGES']]
                }]
            }]
        }

        with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
            self.assertListEqual(self.cli.get_list_privileges('test'),
                                 [{
                                     'database': 'db1',
                                     'privilege': 'READ'
                                 }, {
                                     'database': 'db2',
                                     'privilege': 'ALL PRIVILEGES'
                                 }, {
                                     'database': 'db3',
                                     'privilege': 'NO PRIVILEGES'
                                 }])

    @raises(Exception)
    def test_get_list_privileges_fails(self):
        """Test failed get list of privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 401):
            cli.get_list_privileges('test')

    def test_invalid_port_fails(self):
        """Test invalid port fail for TestInfluxDBClient object."""
        with self.assertRaises(ValueError):
            InfluxDBClient('host', '80/redir', 'username', 'password')

    def test_chunked_response(self):
        """Test chunked reponse for TestInfluxDBClient object."""
        example_response = \
            u'{"results":[{"statement_id":0,"series":' \
            '[{"name":"cpu","columns":["fieldKey","fieldType"],"values":' \
            '[["value","integer"]]}],"partial":true}]}\n{"results":' \
            '[{"statement_id":0,"series":[{"name":"iops","columns":' \
            '["fieldKey","fieldType"],"values":[["value","integer"]]}],' \
            '"partial":true}]}\n{"results":[{"statement_id":0,"series":' \
            '[{"name":"load","columns":["fieldKey","fieldType"],"values":' \
            '[["value","integer"]]}],"partial":true}]}\n{"results":' \
            '[{"statement_id":0,"series":[{"name":"memory","columns":' \
            '["fieldKey","fieldType"],"values":[["value","integer"]]}]}]}\n'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            response = self.cli.query('show series limit 4 offset 0',
                                      chunked=True,
                                      chunk_size=4)
            self.assertTrue(len(response) == 4)
            self.assertEqual(
                response.__repr__(),
                ResultSet({
                    'series': [{
                        'values': [['value', 'integer']],
                        'name': 'cpu',
                        'columns': ['fieldKey', 'fieldType']
                    }, {
                        'values': [['value', 'integer']],
                        'name': 'iops',
                        'columns': ['fieldKey', 'fieldType']
                    }, {
                        'values': [['value', 'integer']],
                        'name': 'load',
                        'columns': ['fieldKey', 'fieldType']
                    }, {
                        'values': [['value', 'integer']],
                        'name': 'memory',
                        'columns': ['fieldKey', 'fieldType']
                    }]
                }).__repr__())
Esempio n. 21
0
    from influxdb import InfluxDBClient
    influxClient = InfluxDBClient(host=config["influxdb"]["host"],
                                  port=config["influxdb"]["port"],
                                  username=config["influxdb"]["user"],
                                  password=config["influxdb"]["pass"])

    createDb = True
    for db in influxClient.get_list_database():
        if db["name"] == config["influxdb"]["database"]:
            createDb = False
            break
    if createDb:
        influxClient.create_database(config["influxdb"]["database"])
        influxClient.switch_database(config["influxdb"]["database"])
        influxClient.alter_retention_policy("autogen",
                                            duration="2h",
                                            replication=1,
                                            shard_duration="1h")
        influxClient.create_retention_policy("one_week",
                                             duration="1w",
                                             replication=1,
                                             shard_duration='24h')
        influxClient.create_retention_policy(
            "one_year",
            database=config["influxdb"]["database"],
            duration="365d",
            replication=1,
            shard_duration='1w')
        influxClient.create_continuous_query(
            "three_min",
            'SELECT mean(T) as "T", mean(RH) as "RH", mean(AH) as "AH", mean(DEW) as "DEW" INTO "one_week"."lacrosse" from "lacrosse" GROUP BY time(3m),*'
        )
Esempio n. 22
0
class InfluxDBManager:
    def __init__(self):
        self.influx_db = InfluxDBClient(Constants.influx_host,
                                        8086,
                                        database='S4G')
        self.influx_db.create_database('S4G')
        try:
            self.influx_db.create_retention_policy("raw_data_retention",
                                                   Constants.influx_retention,
                                                   replication="1",
                                                   database="S4G",
                                                   default=True)
        except Exception as e:
            if "retention policy already exists" in str(e):
                self.influx_db.alter_retention_policy(
                    "raw_data_retention",
                    duration=Constants.influx_retention,
                    database="S4G",
                    default=True)

    def write(self, json_body):
        try:
            if json_body is not None:
                r = self.influx_db.write_points(json_body)
                print(r)
                return r
        except Exception as e:
            print("error " + str(e))
            logger.error("error writing influx " + str(e))
            return False

    def read(self,
             measurement_name,
             field,
             instance_id=None,
             start_time=None,
             end_time=None):
        logger.debug("influx read " + str(measurement_name) + " " +
                     str(field) + " " + str(instance_id) + " " +
                     str(start_time) + " " + str(end_time))
        data = []
        where_count = 0
        try:
            if measurement_name is not None:
                q = None
                result = None
                try:
                    q = "select " + str(field) + " from " + str(
                        measurement_name)
                    if instance_id or start_time or end_time:
                        q += " where"
                    if instance_id:
                        q += " instance_id='" + str(instance_id) + "'"
                        where_count += 1
                    start_time = self.get_time_string(start_time)
                    if start_time:
                        if where_count > 0:
                            q += " and"
                        q += " time>='" + str(start_time) + "'"
                        where_count += 1
                    end_time = self.get_time_string(end_time)
                    if end_time:
                        if where_count > 0:
                            q += " and"
                        q += " time<='" + str(end_time) + "'"
                        where_count += 1
                    q += ";"
                    logger.info(q)
                except Exception as e:
                    logger.error("error forming read query " +
                                 str(measurement_name) + " " + str(e))
                if q:
                    result = self.influx_db.query(q)
                if result:
                    for r in result.get_points():
                        t = int(
                            datetime.datetime.strptime(str(
                                r["time"]), "%Y-%m-%dT%H:%M:%SZ").timestamp())
                        v = r[str(field)]
                        try:
                            v = float(v)
                        except Exception as e:
                            pass
                        if isinstance(v, str) and "," in v:
                            v = v.split(",")
                        data.append([t, v])
        except Exception as e:
            logger.error("error reading influx " + str(e) + " " +
                         str(measurement_name))
        return data

    def get_time_string(self, time):
        t = None
        if time:
            if isinstance(time, int) or isinstance(time, float):
                t = datetime.datetime.fromtimestamp(time).strftime(
                    "%Y-%m-%dT%H:%M:%SZ")
            elif isinstance(time, datetime.datetime):
                t = time.strftime("%Y-%m-%dT%H:%M:%SZ")
        return t

    def timeseries_list_to_influx_json(self, data, measurement_name, field,
                                       instance_id):
        json_body = []
        for t, v in data:
            json_body.append({
                "measurement":
                measurement_name,
                "tags": {
                    "instance_id": instance_id
                },
                "time":
                datetime.datetime.fromtimestamp(t).strftime(
                    "%Y-%m-%dT%H:%M:%SZ"),
                "fields": {
                    field: v
                }
            })
        print(len(json_body))
        return json_body

    def timeseries_dict_to_influx_json(self, data, measurement_name,
                                       instance_id):
        json_body = []
        for t, v in data.items():
            json_body.append({
                "measurement":
                measurement_name,
                "tags": {
                    "instance_id": instance_id
                },
                "time":
                datetime.datetime.fromtimestamp(t).strftime(
                    "%Y-%m-%dT%H:%M:%SZ"),
                "fields":
                v
            })
        print(len(json_body))
        return json_body
Esempio n. 23
0
class Output(cowrie.core.output.Output):
    """
    influx output
    """
    def start(self):
        host = CowrieConfig().get("output_influx", "host", fallback="")
        port = CowrieConfig().getint("output_influx", "port", fallback=8086)
        ssl = CowrieConfig().getboolean("output_influx", "ssl", fallback=False)

        self.client = None
        try:
            self.client = InfluxDBClient(host=host,
                                         port=port,
                                         ssl=ssl,
                                         verify_ssl=ssl)
        except InfluxDBClientError as e:
            log.msg(f"output_influx: I/O error({e.code}): '{e.message}'")
            return

        if self.client is None:
            log.msg("output_influx: cannot instantiate client!")
            return

        if CowrieConfig().has_option("output_influx",
                                     "username") and CowrieConfig().has_option(
                                         "output_influx", "password"):
            username = CowrieConfig().get("output_influx", "username")
            password = CowrieConfig().get("output_influx",
                                          "password",
                                          raw=True)
            self.client.switch_user(username, password)

        try:
            dbname = CowrieConfig().get("output_influx", "database_name")
        except Exception:
            dbname = "cowrie"

        retention_policy_duration_default = "12w"
        retention_policy_name = dbname + "_retention_policy"

        if CowrieConfig().has_option("output_influx",
                                     "retention_policy_duration"):
            retention_policy_duration = CowrieConfig().get(
                "output_influx", "retention_policy_duration")

            match = re.search(r"^\d+[dhmw]{1}$", retention_policy_duration)
            if not match:
                log.msg(
                    ("output_influx: invalid retention policy."
                     "Using default '{}'..").format(retention_policy_duration))
                retention_policy_duration = retention_policy_duration_default
        else:
            retention_policy_duration = retention_policy_duration_default

        database_list = self.client.get_list_database()
        dblist = [str(elem["name"]) for elem in database_list]

        if dbname not in dblist:
            self.client.create_database(dbname)
            self.client.create_retention_policy(
                retention_policy_name,
                retention_policy_duration,
                1,
                database=dbname,
                default=True,
            )
        else:
            retention_policies_list = self.client.get_list_retention_policies(
                database=dbname)
            rplist = [str(elem["name"]) for elem in retention_policies_list]
            if retention_policy_name not in rplist:
                self.client.create_retention_policy(
                    retention_policy_name,
                    retention_policy_duration,
                    1,
                    database=dbname,
                    default=True,
                )
            else:
                self.client.alter_retention_policy(
                    retention_policy_name,
                    database=dbname,
                    duration=retention_policy_duration,
                    replication=1,
                    default=True,
                )

        self.client.switch_database(dbname)

    def stop(self):
        pass

    def write(self, entry):
        if self.client is None:
            log.msg("output_influx: client object is not instantiated")
            return

        # event id
        eventid = entry["eventid"]

        # measurement init
        m = {
            "measurement": eventid.replace(".", "_"),
            "tags": {
                "session": entry["session"],
                "src_ip": entry["src_ip"]
            },
            "fields": {
                "sensor": self.sensor
            },
        }

        # event parsing
        if eventid in ["cowrie.command.failed", "cowrie.command.input"]:
            m["fields"].update({
                "input": entry["input"],
            })

        elif eventid == "cowrie.session.connect":
            m["fields"].update({
                "protocol": entry["protocol"],
                "src_port": entry["src_port"],
                "dst_port": entry["dst_port"],
                "dst_ip": entry["dst_ip"],
            })

        elif eventid in ["cowrie.login.success", "cowrie.login.failed"]:
            m["fields"].update({
                "username": entry["username"],
                "password": entry["password"],
            })

        elif eventid == "cowrie.session.file_download":
            m["fields"].update({
                "shasum": entry.get("shasum"),
                "url": entry.get("url"),
                "outfile": entry.get("outfile"),
            })

        elif eventid == "cowrie.session.file_download.failed":
            m["fields"].update({"url": entry.get("url")})

        elif eventid == "cowrie.session.file_upload":
            m["fields"].update({
                "shasum": entry.get("shasum"),
                "outfile": entry.get("outfile"),
            })

        elif eventid == "cowrie.session.closed":
            m["fields"].update({"duration": entry["duration"]})

        elif eventid == "cowrie.client.version":
            m["fields"].update({
                "version": ",".join(entry["version"]),
            })

        elif eventid == "cowrie.client.kex":
            m["fields"].update({
                "maccs": ",".join(entry["macCS"]),
                "kexalgs": ",".join(entry["kexAlgs"]),
                "keyalgs": ",".join(entry["keyAlgs"]),
                "compcs": ",".join(entry["compCS"]),
                "enccs": ",".join(entry["encCS"]),
            })

        elif eventid == "cowrie.client.size":
            m["fields"].update({
                "height": entry["height"],
                "width": entry["width"],
            })

        elif eventid == "cowrie.client.var":
            m["fields"].update({
                "name": entry["name"],
                "value": entry["value"],
            })

        elif eventid == "cowrie.client.fingerprint":
            m["fields"].update({"fingerprint": entry["fingerprint"]})

            # cowrie.direct-tcpip.data, cowrie.direct-tcpip.request
            # cowrie.log.closed
            # are not implemented
        else:
            # other events should be handled
            log.msg(
                f"output_influx: event '{eventid}' not handled. Skipping..")
            return

        result = self.client.write_points([m])

        if not result:
            log.msg("output_influx: error when writing '{}' measurement"
                    "in the db.".format(eventid))
Esempio n. 24
0
class Output(cowrie.core.output.Output):
    def __init__(self):
        cowrie.core.output.Output.__init__(self)

    def start(self):
        """
        """
        try:
            host = CONFIG.get('output_influx', 'host')
        except:
            host = ''

        try:
            port = CONFIG.getint('output_influx', 'port')
        except:
            port = 8086

        self.client = None
        try:
            self.client = InfluxDBClient(host=host, port=port)
        except InfluxDBClientError as e:
            log.err("output_influx: I/O error({0}): '{1}'".format(
                e.errno, e.strerror))
            return

        if self.client is None:
            log.err("output_influx: cannot instantiate client!")
            return

        if (CONFIG.has_option('output_influx', 'username')
                and CONFIG.has_option('output_influx', 'password')):
            username = CONFIG.get('output_influx', 'username')
            password = CONFIG.get('output_influx', 'password', raw=True)
            self.client.switch_user(username, password)

        try:
            dbname = CONFIG.get('output_influx', 'database_name')
        except:
            dbname = 'cowrie'

        retention_policy_duration_default = '12w'
        retention_policy_name = dbname + "_retention_policy"

        if CONFIG.has_option('output_influx', 'retention_policy_duration'):
            retention_policy_duration = CONFIG.get(
                'output_influx', 'retention_policy_duration')

            match = re.search('^\d+[dhmw]{1}$', retention_policy_duration)
            if not match:
                log.err(
                    ("output_influx: invalid retention policy."
                     "Using default '{}'..").format(retention_policy_duration))
                retention_policy_duration = retention_policy_duration_default
        else:
            retention_policy_duration = retention_policy_duration_default

        database_list = self.client.get_list_database()
        dblist = [str(elem['name']) for elem in database_list]

        if dbname not in dblist:
            self.client.create_database(dbname)
            self.client.create_retention_policy(retention_policy_name,
                                                retention_policy_duration,
                                                1,
                                                database=dbname,
                                                default=True)
        else:
            retention_policies_list = self.client.get_list_retention_policies(
                database=dbname)
            rplist = [str(elem['name']) for elem in retention_policies_list]
            if retention_policy_name not in rplist:
                self.client.create_retention_policy(retention_policy_name,
                                                    retention_policy_duration,
                                                    1,
                                                    database=dbname,
                                                    default=True)
            else:
                self.client.alter_retention_policy(
                    retention_policy_name,
                    database=dbname,
                    duration=retention_policy_duration,
                    replication=1,
                    default=True)

        self.client.switch_database(dbname)

    def stop(self):
        pass

    def write(self, entry):
        if self.client is None:
            log.err("output_influx: client object is not instantiated")
            return

        # event id
        eventid = entry['eventid']

        # measurement init
        m = {
            'measurement': eventid.replace('.', '_'),
            'tags': {
                'session': entry['session'],
                'src_ip': entry['src_ip']
            },
            'fields': {
                'sensor': self.sensor
            },
        }

        # event parsing
        if eventid in ['cowrie.command.failed', 'cowrie.command.input']:
            m['fields'].update({
                'input': entry['input'],
            })

        elif eventid == 'cowrie.session.connect':
            m['fields'].update({
                'protocol': entry['protocol'],
                'src_port': entry['src_port'],
                'dst_port': entry['dst_port'],
                'dst_ip': entry['dst_ip'],
            })

        elif eventid in ['cowrie.login.success', 'cowrie.login.failed']:
            m['fields'].update({
                'username': entry['username'],
                'password': entry['password'],
            })

        elif eventid == 'cowrie.session.file_download':
            m['fields'].update({
                'shasum': entry.get('shasum'),
                'url': entry.get('url'),
                'outfile': entry.get('outfile')
            })

        elif eventid == 'cowrie.session.file_download.failed':
            m['fields'].update({'url': entry.get('url')})

        elif eventid == 'cowrie.session.file_upload':
            m['fields'].update({
                'shasum': entry.get('shasum'),
                'outfile': entry.get('outfile'),
            })

        elif eventid == 'cowrie.session.closed':
            m['fields'].update({'duration': entry['duration']})

        elif eventid == 'cowrie.client.version':
            m['fields'].update({
                'maccs': ','.join(entry['macCS']),
                'kexalgs': ','.join(entry['kexAlgs']),
                'keyalgs': ','.join(entry['keyAlgs']),
                'version': ','.join(entry['version']),
                'compcs': ','.join(entry['compCS']),
                'enccs': ','.join(entry['encCS'])
            })

        elif eventid == 'cowrie.client.size':
            m['fields'].update({
                'height': entry['height'],
                'width': entry['width'],
            })

        elif eventid == 'cowrie.client.var':
            m['fields'].update({
                'name': entry['name'],
                'value': entry['value'],
            })

        elif eventid == 'cowrie.client.fingerprint':
            m['fields'].update({'fingerprint': entry['fingerprint']})

            # cowrie.direct-tcpip.data, cowrie.direct-tcpip.request
            # cowrie.log.closed cowrie.log.open
            # are not implemented
        else:
            # other events should be handled
            log.err("output_influx: event '{}' not handled. Skipping..".format(
                eventid))
            return

        result = self.client.write_points([m])

        if not result:
            log.err("output_influx: error when writing '{}' measurement"
                    "in the db.".format(eventid))
Esempio n. 25
0
class TestInfluxDBClient(unittest.TestCase):

    def setUp(self):
        # By default, raise exceptions on warnings
        warnings.simplefilter('error', FutureWarning)

        self.cli = InfluxDBClient('localhost', 8086, 'username', 'password')
        self.dummy_points = [
            {
                "measurement": "cpu_load_short",
                "tags": {
                    "host": "server01",
                    "region": "us-west"
                },
                "time": "2009-11-10T23:00:00.123456Z",
                "fields": {
                    "value": 0.64
                }
            }
        ]

        self.dsn_string = 'influxdb://*****:*****@host:1886/db'

    def test_scheme(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        self.assertEqual('http://host:8086', cli._baseurl)

        cli = InfluxDBClient(
            'host', 8086, 'username', 'password', 'database', ssl=True
        )
        self.assertEqual('https://host:8086', cli._baseurl)

    def test_dsn(self):
        cli = InfluxDBClient.from_DSN(self.dsn_string)
        self.assertEqual('http://host:1886', cli._baseurl)
        self.assertEqual('uSr', cli._username)
        self.assertEqual('pWd', cli._password)
        self.assertEqual('db', cli._database)
        self.assertFalse(cli.use_udp)

        cli = InfluxDBClient.from_DSN('udp+' + self.dsn_string)
        self.assertTrue(cli.use_udp)

        cli = InfluxDBClient.from_DSN('https+' + self.dsn_string)
        self.assertEqual('https://host:1886', cli._baseurl)

        cli = InfluxDBClient.from_DSN('https+' + self.dsn_string,
                                      **{'ssl': False})
        self.assertEqual('http://host:1886', cli._baseurl)

    def test_switch_database(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        cli.switch_database('another_database')
        self.assertEqual('another_database', cli._database)

    def test_switch_user(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        cli.switch_user('another_username', 'another_password')
        self.assertEqual('another_username', cli._username)
        self.assertEqual('another_password', cli._password)

    def test_write(self):
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/write",
                status_code=204
            )
            cli = InfluxDBClient(database='db')
            cli.write(
                {"database": "mydb",
                 "retentionPolicy": "mypolicy",
                 "points": [{"measurement": "cpu_load_short",
                             "tags": {"host": "server01",
                                      "region": "us-west"},
                             "time": "2009-11-10T23:00:00Z",
                             "fields": {"value": 0.64}}]}
            )

            self.assertEqual(
                m.last_request.body,
                b"cpu_load_short,host=server01,region=us-west "
                b"value=0.64 1257894000000000000\n",
            )

    def test_write_points(self):
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/write",
                status_code=204
            )

            cli = InfluxDBClient(database='db')
            cli.write_points(
                self.dummy_points,
            )
            self.assertEqual(
                'cpu_load_short,host=server01,region=us-west '
                'value=0.64 1257894000123456000\n',
                m.last_request.body.decode('utf-8'),
            )

    def test_write_points_toplevel_attributes(self):
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/write",
                status_code=204
            )

            cli = InfluxDBClient(database='db')
            cli.write_points(
                self.dummy_points,
                database='testdb',
                tags={"tag": "hello"},
                retention_policy="somepolicy"
            )
            self.assertEqual(
                'cpu_load_short,host=server01,region=us-west,tag=hello '
                'value=0.64 1257894000123456000\n',
                m.last_request.body.decode('utf-8'),
            )

    def test_write_points_batch(self):
        dummy_points = [
            {"measurement": "cpu_usage", "tags": {"unit": "percent"},
             "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}},
            {"measurement": "network", "tags": {"direction": "in"},
             "time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}},
            {"measurement": "network", "tags": {"direction": "out"},
             "time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}}
        ]
        expected_last_body = (
            "network,direction=out,host=server01,region=us-west "
            "value=12.0 1257894000000000000\n"
        )

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST,
                           "http://localhost:8086/write",
                           status_code=204)
            cli = InfluxDBClient(database='db')
            cli.write_points(points=dummy_points,
                             database='db',
                             tags={"host": "server01",
                                   "region": "us-west"},
                             batch_size=2)
        self.assertEqual(m.call_count, 2)
        self.assertEqual(expected_last_body,
                         m.last_request.body.decode('utf-8'))

    def test_write_points_udp(self):
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        port = random.randint(4000, 8000)
        s.bind(('0.0.0.0', port))

        cli = InfluxDBClient(
            'localhost', 8086, 'root', 'root',
            'test', use_udp=True, udp_port=port
        )
        cli.write_points(self.dummy_points)

        received_data, addr = s.recvfrom(1024)

        self.assertEqual(
            'cpu_load_short,host=server01,region=us-west '
            'value=0.64 1257894000123456000\n',
            received_data.decode()
        )

    def test_write_bad_precision_udp(self):
        cli = InfluxDBClient(
            'localhost', 8086, 'root', 'root',
            'test', use_udp=True, udp_port=4444
        )

        with self.assertRaisesRegexp(
                Exception,
                "InfluxDB only supports seconds precision for udp writes"
        ):
            cli.write_points(
                self.dummy_points,
                time_precision='ms'
            )

    @raises(Exception)
    def test_write_points_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 500):
            cli.write_points([])

    def test_write_points_with_precision(self):
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.POST,
                "http://localhost:8086/write",
                status_code=204
            )

            cli = InfluxDBClient(database='db')

            cli.write_points(self.dummy_points, time_precision='n')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123456000\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='u')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123456\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='ms')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='s')
            self.assertEqual(
                b"cpu_load_short,host=server01,region=us-west "
                b"value=0.64 1257894000\n",
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='m')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 20964900\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='h')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 349415\n',
                m.last_request.body,
            )

    def test_write_points_bad_precision(self):
        cli = InfluxDBClient()
        with self.assertRaisesRegexp(
            Exception,
            "Invalid time precision is given. "
            "\(use 'n', 'u', 'ms', 's', 'm' or 'h'\)"
        ):
            cli.write_points(
                self.dummy_points,
                time_precision='g'
            )

    @raises(Exception)
    def test_write_points_with_precision_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 500):
            cli.write_points_with_precision([])

    def test_query(self):
        example_response = (
            '{"results": [{"series": [{"measurement": "sdfsdfsdf", '
            '"columns": ["time", "value"], "values": '
            '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": '
            '[{"measurement": "cpu_load_short", "columns": ["time", "value"], '
            '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}'
        )

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://*****:*****@unittest.skip('Not implemented for 0.9')
    def test_query_chunked(self):
        cli = InfluxDBClient(database='db')
        example_object = {
            'points': [
                [1415206250119, 40001, 667],
                [1415206244555, 30001, 7],
                [1415206228241, 20001, 788],
                [1415206212980, 10001, 555],
                [1415197271586, 10001, 23]
            ],
            'measurement': 'foo',
            'columns': [
                'time',
                'sequence_number',
                'val'
            ]
        }
        example_response = \
            json.dumps(example_object) + json.dumps(example_object)

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/db/db/series",
                text=example_response
            )

            self.assertListEqual(
                cli.query('select * from foo', chunked=True),
                [example_object, example_object]
            )

    @raises(Exception)
    def test_query_fail(self):
        with _mocked_session(self.cli, 'get', 401):
            self.cli.query('select column_one from foo;')

    def test_create_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text='{"results":[{}]}'
            )
            self.cli.create_database('new_db')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'create database new_db'
            )

    @raises(Exception)
    def test_create_database_fails(self):
        with _mocked_session(self.cli, 'post', 401):
            self.cli.create_database('new_db')

    def test_drop_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text='{"results":[{}]}'
            )
            self.cli.drop_database('new_db')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'drop database new_db'
            )

    @raises(Exception)
    def test_drop_database_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'delete', 401):
            cli.drop_database('old_db')

    def test_get_list_database(self):
        data = {'results': [
            {'series': [
                {'name': 'databases',
                 'values': [
                     ['new_db_1'],
                     ['new_db_2']],
                 'columns': ['name']}]}
        ]}

        with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
            self.assertListEqual(
                self.cli.get_list_database(),
                [{'name': 'new_db_1'}, {'name': 'new_db_2'}]
            )

    @raises(Exception)
    def test_get_list_database_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 401):
            cli.get_list_database()

    def test_get_list_series(self):
        example_response = \
            '{"results": [{"series": [{"name": "cpu_load_short", "columns": ' \
            '["_id", "host", "region"], "values": ' \
            '[[1, "server01", "us-west"]]}]}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )

            self.assertListEqual(
                self.cli.get_list_series(),
                [{'name': 'cpu_load_short',
                  'tags': [
                      {'host': 'server01', '_id': 1, 'region': 'us-west'}
                  ]}]
            )

    def test_create_retention_policy_default(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.create_retention_policy(
                'somename', '1d', 4, default=True, database='db'
            )

            self.assertEqual(
                m.last_request.qs['q'][0],
                'create retention policy somename on '
                'db duration 1d replication 4 default'
            )

    def test_create_retention_policy(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.create_retention_policy(
                'somename', '1d', 4, database='db'
            )

            self.assertEqual(
                m.last_request.qs['q'][0],
                'create retention policy somename on '
                'db duration 1d replication 4'
            )

    def test_alter_retention_policy(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )
            # Test alter duration
            self.cli.alter_retention_policy('somename', 'db',
                                            duration='4d')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy somename on db duration 4d'
            )
            # Test alter replication
            self.cli.alter_retention_policy('somename', 'db',
                                            replication=4)
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy somename on db replication 4'
            )

            # Test alter default
            self.cli.alter_retention_policy('somename', 'db',
                                            default=True)
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy somename on db default'
            )

    @raises(Exception)
    def test_alter_retention_policy_invalid(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.alter_retention_policy('somename', 'db')

    def test_get_list_retention_policies(self):
        example_response = \
            '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\
            ' "columns": ["name", "duration", "replicaN"]}]}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://*****:*****@mock.patch('requests.Session.request')
    def test_request_retry(self, mock_request):
        """Tests that two connection errors will be handled"""

        class CustomMock(object):
            i = 0

            def connection_error(self, *args, **kwargs):
                self.i += 1

                if self.i < 3:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 204
                    return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database='db')
        cli.write_points(
            self.dummy_points
        )

    @mock.patch('requests.Session.request')
    def test_request_retry_raises(self, mock_request):
        """Tests that three connection errors will not be handled"""

        class CustomMock(object):
            i = 0

            def connection_error(self, *args, **kwargs):
                self.i += 1

                if self.i < 4:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 200
                    return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database='db')

        with self.assertRaises(requests.exceptions.ConnectionError):
            cli.write_points(self.dummy_points)

    def test_get_list_users(self):
        example_response = (
            '{"results":[{"series":[{"columns":["user","admin"],'
            '"values":[["test",false]]}]}]}'
        )

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )

            self.assertListEqual(
                self.cli.get_list_users(),
                [{'user': '******', 'admin': False}]
            )

    def test_get_list_users_empty(self):
        example_response = (
            '{"results":[{"series":[{"columns":["user","admin"]}]}]}'
        )
        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )

            self.assertListEqual(self.cli.get_list_users(), [])

    def test_revoke_admin_privileges(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.revoke_admin_privileges('test')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'revoke all privileges from test'
            )

    @raises(Exception)
    def test_revoke_admin_privileges_invalid(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.revoke_admin_privileges('')

    def test_grant_privilege(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.grant_privilege('read', 'testdb', 'test')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'grant read on testdb to test'
            )

    @raises(Exception)
    def test_grant_privilege_invalid(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.grant_privilege('', 'testdb', 'test')

    def test_revoke_privilege(self):
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(
                requests_mock.GET,
                "http://localhost:8086/query",
                text=example_response
            )
            self.cli.revoke_privilege('read', 'testdb', 'test')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'revoke read on testdb from test'
            )

    @raises(Exception)
    def test_revoke_privilege_invalid(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.revoke_privilege('', 'testdb', 'test')
Esempio n. 26
0
class Influx_Handler(object):
    def __init__(self, host, username, password, database, retention):
        self.client = InfluxDBClient(host=host,
                                     username=username,
                                     password=password,
                                     ssl=True,
                                     verify_ssl=False)
        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
        self.database = database
        self.retention = retention

    #data base stuff
    def create_database(self, data_base):
        return self.client.create_database(data_base)

    def drop_database(self, data_base):
        return self.client.drop_database(data_base)

    def switch_database(self, data_base):
        self.client.switch_database(data_base)

    def list_databases(self):
        return self.client.get_list_database()

    def write_point(self, data, tags={}):

        temp = datetime.datetime.utcnow().isoformat(sep='T')
        data["time"] = temp
        data["tags"] = tags

        final_data = [data]

        return self.client.write_points(points=final_data,
                                        database=self.database,
                                        retention_policy=self.retention)

    def insert_point(self,
                     measurement="",
                     tags={},
                     fields={},
                     db=None,
                     retention=None):

        data = {}
        data["measurement"] = measurement
        data["tags"] = tags
        data["fields"] = fields

        temp = datetime.datetime.utcnow().isoformat(sep='T')
        data["time"] = temp
        final_data = [data]

        return self.client.write_points(points=final_data,
                                        database=db,
                                        tags=tags,
                                        retention_policy=retention)

    # point maintainence
    def delete_series(measurement, tags=None, db=None):
        return self.client.delete_point_series(measurement, tags, db)

    def delete_measurement(self, measurement):
        return self.client.drop_measurement(measurement)

    def list_measurements(self):
        return self.client.get_list_measurements()

    # query stuff -- returns results set see notes at top
    def query(self, query_string):
        self.results = self.client.query(query_string)
        return self.results

    # retention policy must be >= 1hr
    def setup_retention(self, name, duration, database=None, default=False):
        self.client.create_retention_policy(name,
                                            duration,
                                            1,
                                            database,
                                            default=False,
                                            shard_duration=duration)

    def alter_retention(self, name, duration, database=None, default=False):
        self.client.alter_retention_policy(name,
                                           database,
                                           duration,
                                           replication=None,
                                           default=default,
                                           shard_duration=duration)

    def drop_retention(self, name, database=None):
        self.client.drop_retention_policy(name, database)

    def list_retention(self, database=None):
        return self.client.get_list_retention_policies(database)