def test_custom_queries(aggregator, instance):
    instance['custom_queries'] = [{
        'tags': ['test:clickhouse'],
        'query':
        'SELECT COUNT(*) FROM system.settings WHERE changed',
        'columns': [{
            'name': 'settings.changed',
            'type': 'gauge'
        }],
    }]

    check = ClickhouseCheck('clickhouse', {}, [instance])
    check.run()

    aggregator.assert_metric(
        'clickhouse.settings.changed',
        metric_type=0,
        tags=[
            'server:{}'.format(instance['server']),
            'port:{}'.format(instance['port']),
            'db:default',
            'foo:bar',
            'test:clickhouse',
        ],
    )
def test_version_metadata(instance, datadog_agent, dd_run_check):
    check = ClickhouseCheck('clickhouse', {}, [instance])
    check.check_id = 'test:123'
    dd_run_check(check)

    datadog_agent.assert_metadata(
        'test:123', {'version.scheme': 'calver', 'version.year': CLICKHOUSE_VERSION.split(".")[0]}
    )
def test_version_metadata(instance, datadog_agent):
    check = ClickhouseCheck('clickhouse', {}, [instance])
    check.check_id = 'test:123'
    check.run()

    datadog_agent.assert_metadata('test:123', {
        'version.scheme': 'calver',
        'version.year': CLICKHOUSE_VERSION
    })
def test_can_connect(aggregator, instance):
    """
    Regression test: a copy of the `can_connect` service check must be submitted for each check run.
    (It used to be submitted only once on check init, which led to customer seeing "no data" in the UI.)
    """
    check = ClickhouseCheck('clickhouse', {}, [instance])
    num_runs = 3
    for _ in range(num_runs):
        check.run()
    aggregator.assert_service_check("clickhouse.can_connect", count=num_runs)
def test_error_query(instance, dd_run_check):
    check = ClickhouseCheck('clickhouse', {}, [instance])
    check.log = mock.MagicMock()
    del check.check_initializations[-2]

    client = mock.MagicMock()
    client.execute_iter = raise_error
    check._client = client
    with pytest.raises(Exception):
        dd_run_check(check)
def test_can_connect(aggregator, instance, dd_run_check):
    """
    Regression test: a copy of the `can_connect` service check must be submitted for each check run.
    (It used to be submitted only once on check init, which led to customer seeing "no data" in the UI.)
    """
    check = ClickhouseCheck('clickhouse', {}, [instance])

    # Test for consecutive healthy clickhouse.can_connect statuses
    num_runs = 3
    for _ in range(num_runs):
        dd_run_check(check)
    aggregator.assert_service_check("clickhouse.can_connect", count=num_runs, status=check.OK)
    aggregator.reset()

    # Test 1 healthy connection --> 2 Unhealthy service checks --> 1 healthy connection. Recovered
    dd_run_check(check)
    with mock.patch('clickhouse_driver.Client', side_effect=NetworkError('Connection refused')):
        with mock.patch('datadog_checks.clickhouse.ClickhouseCheck.ping_clickhouse', return_value=False):
            with pytest.raises(Exception):
                dd_run_check(check)
            with pytest.raises(Exception):
                dd_run_check(check)
    dd_run_check(check)
    aggregator.assert_service_check("clickhouse.can_connect", count=2, status=check.CRITICAL)
    aggregator.assert_service_check("clickhouse.can_connect", count=2, status=check.OK)
    aggregator.reset()

    # Test Exception in ping_clickhouse(), but reestablishes connection.
    dd_run_check(check)
    with mock.patch('datadog_checks.clickhouse.ClickhouseCheck.ping_clickhouse', side_effect=Error()):
        # connect() should be able to handle an exception in ping_clickhouse() and attempt reconnection
        dd_run_check(check)
    dd_run_check(check)
    aggregator.assert_service_check("clickhouse.can_connect", count=3, status=check.OK)
def test_check(aggregator, instance):
    # We do not do aggregator.assert_all_metrics_covered() because depending on timing, some other metrics may appear
    check = ClickhouseCheck('clickhouse', {}, [instance])
    check.run()

    server_tag = 'server:{}'.format(instance['server'])
    port_tag = 'port:{}'.format(instance['port'])
    for metric in ALL_METRICS:
        aggregator.assert_metric_has_tag(metric, server_tag)
        aggregator.assert_metric_has_tag(metric, port_tag)
        aggregator.assert_metric_has_tag(metric, 'db:default')
        aggregator.assert_metric_has_tag(metric, 'foo:bar')

    aggregator.assert_metric('clickhouse.table.replicated.total', 2)
    aggregator.assert_metric('clickhouse.dictionary.item.current',
                             tags=[
                                 server_tag, port_tag, 'db:default', 'foo:bar',
                                 'dictionary:test'
                             ])
def test_config(instance):
    check = ClickhouseCheck('clickhouse', {}, [instance])
    check.check_id = 'test-clickhouse'

    with mock.patch('clickhouse_driver.Client') as m:
        check.connect()
        m.assert_called_once_with(
            host=instance['server'],
            port=instance['port'],
            user=instance['user'],
            password=instance['password'],
            database='default',
            connect_timeout=10,
            send_receive_timeout=10,
            sync_request_timeout=10,
            compression=False,
            secure=False,
            settings={},
            client_name='datadog-test-clickhouse',
        )
Exemple #9
0
def test_error_query(instance):
    check = ClickhouseCheck('clickhouse', {}, [instance])
    check.log = mock.MagicMock()
    del check.check_initializations[-2]

    client = mock.MagicMock()
    client.execute_iter = raise_error
    check._client = client

    check.run()
    check.log.error.assert_any_call('Error querying %s: %s', 'system.metrics', mock.ANY)
def test_check(aggregator, instance, dd_run_check):
    # We do not do aggregator.assert_all_metrics_covered() because depending on timing, some other metrics may appear
    check = ClickhouseCheck('clickhouse', {}, [instance])
    dd_run_check(check)
    server_tag = 'server:{}'.format(instance['server'])
    port_tag = 'port:{}'.format(instance['port'])
    metrics = get_metrics(CLICKHOUSE_VERSION)

    for metric in metrics:
        aggregator.assert_metric_has_tag(metric, port_tag, at_least=1)
        aggregator.assert_metric_has_tag(metric, server_tag, at_least=1)
        aggregator.assert_metric_has_tag(metric, 'db:default', at_least=1)
        aggregator.assert_metric_has_tag(metric, 'foo:bar', at_least=1)

    aggregator.assert_metric(
        'clickhouse.dictionary.item.current',
        tags=[server_tag, port_tag, 'db:default', 'foo:bar', 'dictionary:test'],
        at_least=1,
    )

    aggregator.assert_metric('clickhouse.table.replicated.total', 2)
    aggregator.assert_service_check("clickhouse.can_connect", count=1)
def test_config_error():
    check = ClickhouseCheck('clickhouse', {}, [{}])

    error = check.run()
    assert error and json.loads(
        error)[0]['message'] == 'the `server` setting is required'