Пример #1
0
def test_object_name(aggregator, instance_docker, sqlserver):
    init_config_object_name = {
        'custom_metrics': [{
            'name': 'sqlserver.cache.hit_ratio',
            'counter_name': 'Cache Hit Ratio',
            'instance_name': 'SQL Plans',
            'object_name': 'SQLServer:Plan Cache',
            'tags': ['optional_tag:tag1']
        }, {
            'name': 'sqlserver.active_requests',
            'counter_name': 'Active requests',
            'instance_name': 'default',
            'object_name': 'SQLServer:Workload Group Stats',
            'tags': ['optional_tag:tag1']
        }]
    }

    sqlserver_check = SQLServer(CHECK_NAME, init_config_object_name, {},
                                [instance_docker])
    sqlserver_check.check(instance_docker)

    aggregator.assert_metric('sqlserver.cache.hit_ratio',
                             tags=['optional:tag1', 'optional_tag:tag1'],
                             count=1)
    aggregator.assert_metric('sqlserver.active_requests',
                             tags=['optional:tag1', 'optional_tag:tag1'],
                             count=1)
Пример #2
0
def test_autodiscovery_matches_all_by_default(instance_autodiscovery):
    fetchall_results, mock_cursor = _mock_database_list()
    all_dbs = set([r.name for r in fetchall_results])
    # check base case of default filters
    check = SQLServer(CHECK_NAME, {}, [instance_autodiscovery])
    check.autodiscover_databases(mock_cursor)
    assert check.databases == all_dbs
Пример #3
0
def test_check_local(aggregator, init_config, instance_sql2017):
    sqlserver_check = SQLServer(CHECK_NAME, init_config, {},
                                [instance_sql2017])
    sqlserver_check.check(instance_sql2017)
    expected_tags = instance_sql2017.get(
        'tags', []) + ['host:{}'.format(LOCAL_SERVER), 'db:master']
    _assert_metrics(aggregator, expected_tags)
Пример #4
0
def test_check_local(aggregator, init_config, instance_sql2008):
    sqlserver_check = SQLServer(CHECK_NAME, init_config, {},
                                [instance_sql2008])
    sqlserver_check.check(instance_sql2008)
    expected_tags = instance_sql2008.get(
        'tags', []) + ['host:(local)\SQL2008R2SP2', 'db:master']
    _assert_metrics(aggregator, expected_tags)
Пример #5
0
def test_check_docker(aggregator, init_config, instance_docker):
    sqlserver_check = SQLServer(CHECK_NAME, init_config, [instance_docker])
    sqlserver_check.check(instance_docker)
    expected_tags = instance_docker.get('tags', []) + [
        'host:{}'.format(instance_docker.get('host')), 'db:master'
    ]
    _assert_metrics(aggregator, expected_tags)
Пример #6
0
def test_autodiscovery_exclude_override(instance_autodiscovery):
    fetchall_results, mock_cursor = _mock_database_list()
    instance_autodiscovery['autodiscovery_include'] = ['t.*', 'master']  # remove default `.*`
    instance_autodiscovery['autodiscovery_exclude'] = ['.*2020db$', 'm.*']
    check = SQLServer(CHECK_NAME, {}, [instance_autodiscovery])
    check.autodiscover_databases(mock_cursor)
    assert check.databases == set(['tempdb'])
Пример #7
0
def test_autodiscovery_exclude_some(instance_autodiscovery):
    fetchall_results, mock_cursor = _mock_database_list()
    instance_autodiscovery['autodiscovery_include'] = ['.*']  # replace default `.*`
    instance_autodiscovery['autodiscovery_exclude'] = ['.*2020db$', 'm.*']
    check = SQLServer(CHECK_NAME, {}, [instance_autodiscovery])
    check.autodiscover_databases(mock_cursor)
    assert check.databases == set(['tempdb', 'AdventureWorks2017', 'CaseSensitive2018'])
Пример #8
0
def test_connection_cleanup(instance_docker):
    check = SQLServer(CHECK_NAME, {}, [instance_docker])
    check.initialize_connection()

    # regular operation
    with check.connection.open_managed_default_connection():
        assert len(check.connection._conns) == 1
        with check.connection.get_managed_cursor() as cursor:
            cursor.execute("select 1")
            assert len(check.connection._conns) == 1
    assert len(
        check.connection._conns) == 0, "connection should have been closed"

    # db exception
    with pytest.raises(Exception) as e:
        with check.connection.open_managed_default_connection():
            assert len(check.connection._conns) == 1
            with check.connection.get_managed_cursor() as cursor:
                assert len(check.connection._conns) == 1
                cursor.execute("gimme some data")
    assert "incorrect syntax" in str(e).lower()
    assert len(
        check.connection._conns) == 0, "connection should have been closed"

    # application exception
    with pytest.raises(Exception) as e:
        with check.connection.open_managed_default_connection():
            assert len(check.connection._conns) == 1
            with check.connection.get_managed_cursor():
                assert len(check.connection._conns) == 1
                raise Exception("oops")
    assert "oops" in str(e)
    assert len(
        check.connection._conns) == 0, "connection should have been closed"
Пример #9
0
def test_object_name(aggregator, init_config_object_name, instance_docker):

    sqlserver_check = SQLServer(CHECK_NAME, init_config_object_name, {}, [instance_docker])
    sqlserver_check.check(instance_docker)

    aggregator.assert_metric('sqlserver.cache.hit_ratio', tags=['optional:tag1', 'optional_tag:tag1'], count=1)
    aggregator.assert_metric('sqlserver.active_requests', tags=['optional:tag1', 'optional_tag:tag1'], count=1)
Пример #10
0
def test_autodiscovery_matches_some(instance_autodiscovery):
    fetchall_results, mock_cursor = _mock_database_list()
    instance_autodiscovery['autodiscovery_include'] = [
        'master', 'fancy2020db', 'missingdb', 'fakedb'
    ]
    check = SQLServer(CHECK_NAME, {}, [instance_autodiscovery])
    check.autodiscover_databases(mock_cursor)
    assert check.databases == set(['master', 'Fancy2020db'])
Пример #11
0
def test_check_adoprovider(aggregator, init_config, instance_sql2017, adoprovider):
    instance = deepcopy(instance_sql2017)
    instance['adoprovider'] = adoprovider

    sqlserver_check = SQLServer(CHECK_NAME, init_config, {}, [instance])
    sqlserver_check.check(instance)
    expected_tags = instance.get('tags', []) + [r'host:(local)\SQL2017', 'db:master']
    _assert_metrics(aggregator, expected_tags)
Пример #12
0
def test_autodiscovery_matches_none(instance_autodiscovery):
    fetchall_results, mock_cursor = _mock_database_list()
    # check missing additions, but no exclusions
    mock_cursor.fetchall.return_value = iter(fetchall_results)  # reset the mock results
    instance_autodiscovery['autodiscovery_include'] = ['missingdb', 'fakedb']
    check = SQLServer(CHECK_NAME, {}, [instance_autodiscovery])
    check.autodiscover_databases(mock_cursor)
    assert check.databases == set()
Пример #13
0
def test_check_adoprovider(aggregator, init_config, instance_sql2017, adoprovider):
    instance = deepcopy(instance_sql2017)
    instance['adoprovider'] = adoprovider

    sqlserver_check = SQLServer(CHECK_NAME, init_config, [instance])
    sqlserver_check.check(instance)
    expected_tags = instance.get('tags', []) + ['host:{}'.format(LOCAL_SERVER), 'db:master']
    _assert_metrics(aggregator, expected_tags)
Пример #14
0
def test_get_cursor(instance_sql2017):
    """
    Ensure we don't leak connection info in case of a KeyError when the
    connection pool is empty or the params for `get_cursor` are invalid.
    """
    check = SQLServer(CHECK_NAME, {}, [])
    with pytest.raises(SQLConnectionError):
        check.get_cursor(instance_sql2017, 'foo')
Пример #15
0
def test_get_cursor(instance_docker):
    """
    Ensure we don't leak connection info in case of a KeyError when the
    connection pool is empty or the params for `get_cursor` are invalid.
    """
    check = SQLServer(CHECK_NAME, {}, [instance_docker])
    check.initialize_connection()
    with pytest.raises(SQLConnectionError):
        check.connection.get_cursor('foo')
Пример #16
0
def test_check_invalid_password(aggregator, init_config, instance_docker, sqlserver):
    instance_docker['password'] = '******'

    sqlserver_check = SQLServer(CHECK_NAME, init_config, {}, [instance_docker])

    with pytest.raises(SQLConnectionError) as excinfo:
        sqlserver_check.check(instance_docker)
        assert excinfo.value.args[0] == 'Unable to connect to SQL Server'
    aggregator.assert_service_check('sqlserver.can_connect', status=sqlserver_check.CRITICAL,
                                    tags=['host:localhost,1433', 'db:master', 'optional:tag1'])
Пример #17
0
def test_async_job_enabled(dd_run_check, dbm_instance, activity_enabled):
    dbm_instance['query_activity'] = {'enabled': activity_enabled, 'run_sync': False}
    check = SQLServer(CHECK_NAME, {}, [dbm_instance])
    dd_run_check(check)
    check.cancel()
    if activity_enabled:
        assert check.activity._job_loop_future is not None
        check.activity._job_loop_future.result()
    else:
        assert check.activity._job_loop_future is None
Пример #18
0
def test_async_job_enabled(dd_run_check, dbm_instance, statement_metrics_enabled):
    dbm_instance['query_metrics'] = {'enabled': statement_metrics_enabled, 'run_sync': False}
    check = SQLServer(CHECK_NAME, {}, [dbm_instance])
    dd_run_check(check)
    check.cancel()
    if statement_metrics_enabled:
        assert check.statement_metrics._job_loop_future is not None
        check.statement_metrics._job_loop_future.result()
    else:
        assert check.statement_metrics._job_loop_future is None
Пример #19
0
def test_get_available_query_metrics_columns(dbm_instance, expected_columns, available_columns):
    check = SQLServer(CHECK_NAME, {}, [dbm_instance])
    check.initialize_connection()
    _conn_key_prefix = "dbm-"
    with check.connection.open_managed_default_connection(key_prefix=_conn_key_prefix):
        with check.connection.get_managed_cursor(key_prefix=_conn_key_prefix) as cursor:
            result_available_columns = check.statement_metrics._get_available_query_metrics_columns(
                cursor, expected_columns
            )
            assert result_available_columns == available_columns
Пример #20
0
def test_missing_db(instance_sql2017):
    instance = copy.copy(instance_sql2017)
    instance['ignore_missing_database'] = False
    with mock.patch('datadog_checks.sqlserver.connection.Connection.check_database', return_value=(False, 'db')):
        with pytest.raises(ConfigurationError):
            check = SQLServer(CHECK_NAME, {}, [instance])

    instance['ignore_missing_database'] = True
    with mock.patch('datadog_checks.sqlserver.connection.Connection.check_database', return_value=(False, 'db')):
        check = SQLServer(CHECK_NAME, {}, [instance])
        assert check.do_check is False
Пример #21
0
def test_activity_collection_rate_limit(aggregator, dd_run_check, dbm_instance):
    # test the activity collection loop rate limit
    collection_interval = 0.1
    dbm_instance['query_activity']['collection_interval'] = collection_interval
    dbm_instance['query_activity']['run_sync'] = False
    check = SQLServer(CHECK_NAME, {}, [dbm_instance])
    dd_run_check(check)
    sleep_time = 1
    time.sleep(sleep_time)
    max_collections = int(1 / collection_interval * sleep_time) + 1
    check.cancel()
    metrics = aggregator.metrics("dd.sqlserver.activity.collect_activity.payload_size")
    assert max_collections / 2.0 <= len(metrics) <= max_collections
Пример #22
0
def test_async_job_cancel_cancel(aggregator, dd_run_check, dbm_instance):
    dbm_instance['query_activity']['run_sync'] = False
    check = SQLServer(CHECK_NAME, {}, [dbm_instance])
    dd_run_check(check)
    check.cancel()
    # wait for it to stop and make sure it doesn't throw any exceptions
    check.activity._job_loop_future.result()
    assert not check.activity._job_loop_future.running(), "activity thread should be stopped"
    # if the thread doesn't start until after the cancel signal is set then the db connection will never
    # be created in the first place
    aggregator.assert_metric(
        "dd.sqlserver.async_job.cancel",
        tags=_expected_dbm_instance_tags(dbm_instance) + ['job:query-activity'],
    )
Пример #23
0
def test_get_statement_metrics_query_cached(aggregator, dbm_instance, caplog):
    caplog.set_level(logging.DEBUG)
    check = SQLServer(CHECK_NAME, {}, [dbm_instance])
    check.initialize_connection()
    _conn_key_prefix = "dbm-"
    with check.connection.open_managed_default_connection(key_prefix=_conn_key_prefix):
        with check.connection.get_managed_cursor(key_prefix=_conn_key_prefix) as cursor:
            for _ in range(3):
                query = check.statement_metrics._get_statement_metrics_query_cached(cursor)
                assert query, "query should be non-empty"
    times_columns_loaded = 0
    for r in caplog.records:
        if r.message.startswith("found available sys.dm_exec_query_stats columns"):
            times_columns_loaded += 1
    assert times_columns_loaded == 1, "columns should have been loaded only once"
Пример #24
0
def test_check_stored_procedure(aggregator, init_config, instance_docker):
    instance_pass = deepcopy(instance_docker)

    proc = 'pyStoredProc'
    sp_tags = "foo:bar,baz:qux"
    instance_pass['stored_procedure'] = proc

    load_stored_procedure(instance_pass, proc, sp_tags)

    sqlserver_check = SQLServer(CHECK_NAME, init_config, [instance_pass])
    sqlserver_check.check(instance_docker)

    expected_tags = instance_pass.get('tags', []) + sp_tags.split(',')
    aggregator.assert_metric('sql.sp.testa', value=100, tags=expected_tags, count=1)
    aggregator.assert_metric('sql.sp.testb', tags=expected_tags, count=2)
Пример #25
0
def test_resolved_hostname(instance_docker, dbm_enabled, instance_host,
                           reported_hostname, expected_hostname):
    instance_docker['dbm'] = dbm_enabled
    instance_docker['host'] = instance_host
    instance_docker['reported_hostname'] = reported_hostname
    sqlserver_check = SQLServer(CHECK_NAME, {}, [instance_docker])
    assert sqlserver_check.resolved_hostname == expected_hostname
Пример #26
0
def test_custom_metrics_alt_tables(aggregator, dd_run_check,
                                   init_config_alt_tables, instance_docker):
    instance = deepcopy(instance_docker)
    instance['include_task_scheduler_metrics'] = False

    sqlserver_check = SQLServer(CHECK_NAME, init_config_alt_tables, [instance])
    dd_run_check(sqlserver_check)

    aggregator.assert_metric('sqlserver.LCK_M_S.max_wait_time_ms',
                             tags=['optional:tag1'],
                             count=1)
    aggregator.assert_metric('sqlserver.LCK_M_S.signal_wait_time_ms',
                             tags=['optional:tag1'],
                             count=1)
    aggregator.assert_metric(
        'sqlserver.MEMORYCLERK_BITMAP.virtual_memory_committed_kb',
        tags=['memory_node_id:0', 'optional:tag1'],
        count=1)
    aggregator.assert_metric(
        'sqlserver.MEMORYCLERK_BITMAP.virtual_memory_reserved_kb',
        tags=['memory_node_id:0', 'optional:tag1'],
        count=1)

    # check a second time for io metrics to be processed
    dd_run_check(sqlserver_check)

    aggregator.assert_metric('sqlserver.io_file_stats.num_of_reads')
    aggregator.assert_metric('sqlserver.io_file_stats.num_of_writes')
Пример #27
0
def test_check_stored_procedure_proc_if(aggregator, init_config, instance_docker):
    instance_fail = deepcopy(instance_docker)
    proc = 'pyStoredProc'
    proc_only_fail = "select cntr_type from sys.dm_os_performance_counters where counter_name in ('FOO');"
    sp_tags = "foo:bar,baz:qux"

    instance_fail['proc_only_if'] = proc_only_fail
    instance_fail['stored_procedure'] = proc

    load_stored_procedure(instance_fail, proc, sp_tags)

    sqlserver_check = SQLServer(CHECK_NAME, init_config, [instance_fail])
    sqlserver_check.check(instance_fail)

    # apply a proc check that will never fail and assert that the metrics remain unchanged
    assert len(aggregator._metrics) == 0
Пример #28
0
def test_load_static_information(aggregator, dd_run_check, instance_docker):
    instance = copy(instance_docker)
    check = SQLServer(CHECK_NAME, {}, [instance])
    dd_run_check(check)
    assert 'version' in check.static_info_cache, "missing version static information"
    assert check.static_info_cache[
        'version'], "empty version in static information"
Пример #29
0
def test_autodiscovery_perf_counters(aggregator, dd_run_check,
                                     instance_autodiscovery):
    instance_autodiscovery['autodiscovery_include'] = ['master', 'msdb']
    check = SQLServer(CHECK_NAME, {}, [instance_autodiscovery])
    dd_run_check(check)

    expected_metrics = [
        'sqlserver.database.backup_restore_throughput',
        'sqlserver.database.log_bytes_flushed',
        'sqlserver.database.log_flushes',
        'sqlserver.database.log_flush_wait',
        'sqlserver.database.transactions',
        'sqlserver.database.write_transactions',
        'sqlserver.database.active_transactions',
    ]
    master_tags = [
        'database:master',
        'optional:tag1',
    ]
    msdb_tags = [
        'database:msdb',
        'optional:tag1',
    ]
    base_tags = ['optional:tag1']
    for metric in expected_metrics:
        aggregator.assert_metric(metric, tags=master_tags)
        aggregator.assert_metric(metric, tags=msdb_tags)
        aggregator.assert_metric(metric, tags=base_tags)
Пример #30
0
def test_no_autodiscovery_service_checks(aggregator, dd_run_check, init_config,
                                         instance_docker):
    sqlserver_check = SQLServer(CHECK_NAME, init_config, [instance_docker])
    dd_run_check(sqlserver_check)

    # assert no database service checks
    aggregator.assert_service_check('sqlserver.database.can_connect', count=0)