def test_login_metrics(dd_run_check, aggregator, instance):
    # type: (Callable[[SnowflakeCheck], None], AggregatorStub, Dict[str, Any]) -> None

    expected_login_metrics = [('SNOWFLAKE_UI', 2, 6, 8),
                              ('PYTHON_DRIVER', 0, 148, 148)]
    with mock.patch(
            'datadog_checks.snowflake.SnowflakeCheck.execute_query_raw',
            return_value=expected_login_metrics):
        check = SnowflakeCheck(CHECK_NAME, {}, [instance])
        check._conn = mock.MagicMock()
        check._query_manager.queries = [Query(queries.LoginMetrics)]
        dd_run_check(check)
    snowflake_tags = EXPECTED_TAGS + ['client_type:SNOWFLAKE_UI']
    aggregator.assert_metric('snowflake.logins.fail.count',
                             value=2,
                             tags=snowflake_tags)
    aggregator.assert_metric('snowflake.logins.success.count',
                             value=6,
                             tags=snowflake_tags)
    aggregator.assert_metric('snowflake.logins.total',
                             value=8,
                             tags=snowflake_tags)

    python_tags = EXPECTED_TAGS + ['client_type:PYTHON_DRIVER']
    aggregator.assert_metric('snowflake.logins.fail.count',
                             value=0,
                             tags=python_tags)
    aggregator.assert_metric('snowflake.logins.success.count',
                             value=148,
                             tags=python_tags)
    aggregator.assert_metric('snowflake.logins.total',
                             value=148,
                             tags=python_tags)
def test_warehouse_load(dd_run_check, aggregator, instance):
    # type: (Callable[[SnowflakeCheck], None], AggregatorStub, Dict[str, Any]) -> None

    expected_wl_metrics = [
        ('COMPUTE_WH', Decimal('0.000446667'), Decimal('0E-9'),
         Decimal('0E-9'), Decimal('0E-9')),
    ]
    expected_tags = EXPECTED_TAGS + ['warehouse:COMPUTE_WH']
    with mock.patch(
            'datadog_checks.snowflake.SnowflakeCheck.execute_query_raw',
            return_value=expected_wl_metrics):
        check = SnowflakeCheck(CHECK_NAME, {}, [instance])
        check._conn = mock.MagicMock()
        check._query_manager.queries = [Query(queries.WarehouseLoad)]
        dd_run_check(check)
    aggregator.assert_metric('snowflake.query.executed',
                             value=0.000446667,
                             tags=expected_tags)
    aggregator.assert_metric('snowflake.query.queued_overload',
                             value=0,
                             count=1,
                             tags=expected_tags)
    aggregator.assert_metric('snowflake.query.queued_provision',
                             value=0,
                             count=1,
                             tags=expected_tags)
    aggregator.assert_metric('snowflake.query.blocked',
                             value=0,
                             count=1,
                             tags=expected_tags)
def test_query_metrics(dd_run_check, aggregator, instance):
    # type: (Callable[[SnowflakeCheck], None], AggregatorStub, Dict[str, Any]) -> None

    expected_query_metrics = [
        (
            'USE',
            'COMPUTE_WH',
            'SNOWFLAKE',
            None,
            Decimal('4.333333'),
            Decimal('24.555556'),
            Decimal('0.000000'),
            Decimal('0.000000'),
            Decimal('0.000000'),
            Decimal('0.000000'),
            Decimal('0.000000'),
        ),
    ]

    expected_tags = EXPECTED_TAGS + [
        'warehouse:COMPUTE_WH', 'database:SNOWFLAKE', 'schema:None',
        'query_type:USE'
    ]
    with mock.patch(
            'datadog_checks.snowflake.SnowflakeCheck.execute_query_raw',
            return_value=expected_query_metrics):
        check = SnowflakeCheck(CHECK_NAME, {}, [instance])
        check._conn = mock.MagicMock()
        check._query_manager.queries = [Query(queries.QueryHistory)]
        dd_run_check(check)

    aggregator.assert_metric('snowflake.query.execution_time',
                             value=4.333333,
                             tags=expected_tags)
    aggregator.assert_metric('snowflake.query.compilation_time',
                             value=24.555556,
                             tags=expected_tags)
    aggregator.assert_metric('snowflake.query.bytes_scanned',
                             value=0,
                             count=1,
                             tags=expected_tags)
    aggregator.assert_metric('snowflake.query.bytes_written',
                             value=0,
                             count=1,
                             tags=expected_tags)
    aggregator.assert_metric('snowflake.query.bytes_deleted',
                             value=0,
                             count=1,
                             tags=expected_tags)
    aggregator.assert_metric('snowflake.query.bytes_spilled.local',
                             value=0,
                             count=1,
                             tags=expected_tags)
    aggregator.assert_metric('snowflake.query.bytes_spilled.remote',
                             value=0,
                             count=1,
                             tags=expected_tags)
Esempio n. 4
0
def test_emit_generic_and_non_generic_tags_by_default(instance):
    instance = copy.deepcopy(instance)
    instance['disable_generic_tags'] = False
    check = SnowflakeCheck(CHECK_NAME, {}, [instance])
    tags = EXPECTED_TAGS + [
        'service_type:WAREHOUSE_METERING', 'service:COMPUTE_WH'
    ]
    normalised_tags = tags + ['snowflake_service:COMPUTE_WH']
    assert set(normalised_tags) == set(check._normalize_tags_type(tags))
Esempio n. 5
0
def test_metric_group_exceptions(instance):
    instance = copy.deepcopy(instance)
    instance['metric_groups'] = ['fake.metric.group']
    with pytest.raises(
            Exception,
            match='No valid metric_groups configured, please list at least one.'
    ):
        check = SnowflakeCheck(CHECK_NAME, {}, [instance])
        check.log = mock.MagicMock()
        check.log.warning.assert_called_once_with(
            "Invalid metric_groups found in snowflake conf.yaml: fake.metric.group"
        )
Esempio n. 6
0
def test_config():
    # Test missing account
    user_config = {
        'username': '******',
        'password': '******',
    }
    with pytest.raises(Exception, match='Must specify an account'):
        SnowflakeCheck(CHECK_NAME, {}, [user_config])

    # Test missing user and pass
    account_config = {'account': 'TEST123'}
    with pytest.raises(Exception, match='Must specify a user and password'):
        SnowflakeCheck(CHECK_NAME, {}, [account_config])
Esempio n. 7
0
def test_authenticator_option_pass(options):
    instance = {
        # Common configuration
        'account': 'test_acct.us-central1.gcp',
        'database': 'SNOWFLAKE',
        'schema': 'ACCOUNT_USAGE',
        'role': 'ACCOUNTADMIN',
        'user': '******',
    }
    instance.update(options)

    check = SnowflakeCheck(CHECK_NAME, {}, [instance])
    check.load_configuration_models()
Esempio n. 8
0
def test_invalid_auth(instance):
    # Test oauth
    oauth_inst = copy.deepcopy(instance)
    oauth_inst['authenticator'] = 'oauth'
    with pytest.raises(Exception,
                       match='If using OAuth, you must specify a token'):
        SnowflakeCheck(CHECK_NAME, {}, [oauth_inst])

    oauth_inst['authenticator'] = 'testauth'
    with pytest.raises(
            Exception,
            match='The Authenticator method set is invalid: testauth'):
        SnowflakeCheck(CHECK_NAME, {}, [oauth_inst])
def test_no_schema(dd_run_check, aggregator, instance):
    config = copy.deepcopy(instance)
    del config['schema']
    config['login_timeout'] = 5
    check = SnowflakeCheck(CHECK_NAME, {}, [config])
    dd_run_check(check)
    aggregator.assert_service_check('snowflake.can_connect',
                                    SnowflakeCheck.CRITICAL)
Esempio n. 10
0
def test_emits_critical_service_check_when_service_is_down(
        dd_run_check, aggregator, instance):
    config = copy.deepcopy(instance)
    config['login_timeout'] = 5
    check = SnowflakeCheck(CHECK_NAME, {}, [config])
    dd_run_check(check)
    aggregator.assert_service_check('snowflake.can_connect',
                                    SnowflakeCheck.CRITICAL)
Esempio n. 11
0
def test_invalid_oauth(oauth_instance):
    # Test oauth without user
    with pytest.raises(Exception, match='Must specify a user'):
        SnowflakeCheck(CHECK_NAME, {}, [INVALID_CONFIG])

    # Test oauth without token
    no_token_config = copy.deepcopy(INVALID_CONFIG)
    no_token_config['user'] = "******"
    with pytest.raises(Exception,
                       match='If using OAuth, you must specify a token'):
        SnowflakeCheck(CHECK_NAME, {}, [no_token_config])

    oauth_inst = copy.deepcopy(oauth_instance)
    oauth_inst['authenticator'] = 'testauth'
    with pytest.raises(
            Exception,
            match='The Authenticator method set is invalid: testauth'):
        SnowflakeCheck(CHECK_NAME, {}, [oauth_inst])
Esempio n. 12
0
def test_version_metadata(dd_run_check, instance, datadog_agent):
    expected_version = [('4.30.2', )]
    version_metadata = {
        'version.major': '4',
        'version.minor': '30',
        'version.patch': '2',
        'version.raw': '4.30.2',
        'version.scheme': 'semver',
    }
    with mock.patch(
            'datadog_checks.snowflake.SnowflakeCheck.execute_query_raw',
            return_value=expected_version):
        check = SnowflakeCheck(CHECK_NAME, {}, [instance])
        check.check_id = 'test:123'
        check._conn = mock.MagicMock()
        check._query_manager.queries = []
        dd_run_check(check)

    datadog_agent.assert_metadata('test:123', version_metadata)
Esempio n. 13
0
def test_db_storage_metrics(dd_run_check, aggregator, instance):
    # type: (Callable[[SnowflakeCheck], None], AggregatorStub, Dict[str, Any]) -> None

    expected_db_storage_usage = [('SNOWFLAKE_DB', Decimal('133.000000'),
                                  Decimal('9.100000'))]
    expected_tags = EXPECTED_TAGS + ['database:SNOWFLAKE_DB']
    with mock.patch(
            'datadog_checks.snowflake.SnowflakeCheck.execute_query_raw',
            return_value=expected_db_storage_usage):
        check = SnowflakeCheck(CHECK_NAME, {}, [instance])
        check._conn = mock.MagicMock()
        check._query_manager.queries = [Query(queries.DatabaseStorageMetrics)]
        dd_run_check(check)
    aggregator.assert_metric('snowflake.storage.database.storage_bytes',
                             value=133.0,
                             tags=expected_tags)
    aggregator.assert_metric('snowflake.storage.database.failsafe_bytes',
                             value=9.1,
                             tags=expected_tags)
Esempio n. 14
0
def test_proxy_init_config_override(instance):
    with mock.patch('datadog_checks.base.stubs.datadog_agent.get_config',
                    return_value=INVALID_PROXY):
        init_config = {'proxy': PROXY_CONFIG}
        check = SnowflakeCheck(CHECK_NAME, init_config, [instance])
        assert check.http.options['proxies'] == {
            'http': 'http_host',
            'https': 'https_host'
        }
        assert check._proxies == {'http': 'http_host', 'https': 'https_host'}
        assert check.http.no_proxy_uris == ['uri1', 'uri2', 'uri3', 'uri4']
Esempio n. 15
0
def test_proxy_agent_config(instance):
    with mock.patch('datadog_checks.base.stubs.datadog_agent.get_config',
                    return_value=PROXY_CONFIG):
        check = SnowflakeCheck(CHECK_NAME, {}, [instance])

        assert check.http.options['proxies'] == {
            'http': 'http_host',
            'https': 'https_host'
        }
        assert check._proxies == {'http': 'http_host', 'https': 'https_host'}
        assert check.http.no_proxy_uris == ['uri1', 'uri2', 'uri3', 'uri4']
Esempio n. 16
0
def test_no_metric_group(instance):
    inst = copy.deepcopy(instance)
    inst['metric_groups'] = []
    with pytest.raises(
            Exception,
            match=
            'No valid metric_groups or custom query configured, please list at least one.'
    ):
        SnowflakeCheck(CHECK_NAME, {}, [inst])

    inst['custom_queries'] = [
        {
            'query': "SELECT a,b from mytable where a='stuff' limit 1;",
            'columns': [{}, {
                'name': 'metric.b',
                'type': 'gauge'
            }],
        },
    ]
    SnowflakeCheck(CHECK_NAME, {}, [inst])
Esempio n. 17
0
def test_warehouse_usage_metrics(dd_run_check, aggregator, instance):
    # type: (Callable[[SnowflakeCheck], None], AggregatorStub, Dict[str, Any]) -> None

    expected_wh_usage = [(
        'COMPUTE_WH',
        Decimal('13.000000000'),
        Decimal('1.000000000'),
        Decimal('0.870148056'),
        Decimal('0.066934465846'),
        Decimal('13.870148056'),
        Decimal('1.066934465846'),
    )]
    expected_tags = EXPECTED_TAGS + ['warehouse:COMPUTE_WH']
    with mock.patch(
            'datadog_checks.snowflake.SnowflakeCheck.execute_query_raw',
            return_value=expected_wh_usage):
        check = SnowflakeCheck(CHECK_NAME, {}, [instance])
        check._conn = mock.MagicMock()
        check._query_manager.queries = [Query(queries.WarehouseCreditUsage)]
        dd_run_check(check)

    aggregator.assert_metric('snowflake.billing.warehouse.cloud_service.avg',
                             count=1,
                             tags=expected_tags)
    aggregator.assert_metric('snowflake.billing.warehouse.total_credit.avg',
                             count=1,
                             tags=expected_tags)
    aggregator.assert_metric(
        'snowflake.billing.warehouse.virtual_warehouse.avg',
        count=1,
        tags=expected_tags)
    aggregator.assert_metric('snowflake.billing.warehouse.cloud_service.sum',
                             count=1,
                             tags=expected_tags)
    aggregator.assert_metric('snowflake.billing.warehouse.total_credit.sum',
                             count=1,
                             tags=expected_tags)
    aggregator.assert_metric(
        'snowflake.billing.warehouse.virtual_warehouse.sum',
        count=1,
        tags=expected_tags)
Esempio n. 18
0
def test_additional_metric_groups(instance):
    instance = copy.deepcopy(instance)
    instance['metric_groups'] = ['snowflake.logins', 'snowflake.data_transfer']
    check = SnowflakeCheck(CHECK_NAME, {}, [instance])
    assert check.config.metric_groups == [
        'snowflake.logins', 'snowflake.data_transfer'
    ]

    assert check.metric_queries == [
        queries.LoginMetrics,
        queries.DataTransferHistory,
    ]
Esempio n. 19
0
def test_default_auth(instance):
    check = SnowflakeCheck(CHECK_NAME, {}, [instance])
    check._conn = mock.MagicMock()
    check._query_manager = mock.MagicMock()

    with mock.patch('datadog_checks.snowflake.check.sf') as sf:
        check.check(instance)
        sf.connect.assert_called_with(
            user='******',
            password='******',
            account='test_acct.us-central1.gcp',
            database='SNOWFLAKE',
            schema='ACCOUNT_USAGE',
            warehouse=None,
            role='ACCOUNTADMIN',
            passcode_in_password=False,
            passcode=None,
            client_prefetch_threads=4,
            login_timeout=60,
            ocsp_response_cache_filename=None,
            authenticator='snowflake',
            token=None,
            client_session_keep_alive=False,
            proxy_host=None,
            proxy_port=None,
            proxy_user=None,
            proxy_password=None,
        )
Esempio n. 20
0
def test_oauth_auth(instance):
    # Test oauth
    oauth_inst = copy.deepcopy(instance)
    oauth_inst['authenticator'] = 'oauth'
    oauth_inst['token'] = 'testtoken'

    with mock.patch('datadog_checks.snowflake.check.sf') as sf:
        check = SnowflakeCheck(CHECK_NAME, {}, [oauth_inst])
        check._conn = mock.MagicMock()
        check._query_manager = mock.MagicMock()
        check.check(oauth_inst)
        sf.connect.assert_called_with(
            user='******',
            password='******',
            account='test_acct.us-central1.gcp',
            database='SNOWFLAKE',
            schema='ACCOUNT_USAGE',
            warehouse=None,
            role='ACCOUNTADMIN',
            passcode_in_password=False,
            passcode=None,
            client_prefetch_threads=4,
            login_timeout=60,
            ocsp_response_cache_filename=None,
            authenticator='oauth',
            token='testtoken',
            client_session_keep_alive=False,
        )
Esempio n. 21
0
def test_storage_metrics(dd_run_check, aggregator, instance):
    # type: (Callable[[SnowflakeCheck], None], AggregatorStub, Dict[str, Any]) -> None

    expected_storage = [(Decimal('0.000000'), Decimal('1206.000000'),
                         Decimal('19.200000'))]
    with mock.patch(
            'datadog_checks.snowflake.SnowflakeCheck.execute_query_raw',
            return_value=expected_storage):
        check = SnowflakeCheck(CHECK_NAME, {}, [instance])
        check._conn = mock.MagicMock()
        check._query_manager.queries = [Query(queries.StorageUsageMetrics)]
        dd_run_check(check)

    aggregator.assert_metric('snowflake.storage.storage_bytes.total',
                             value=0.0,
                             tags=EXPECTED_TAGS)
    aggregator.assert_metric('snowflake.storage.stage_bytes.total',
                             value=1206.0,
                             tags=EXPECTED_TAGS)
    aggregator.assert_metric('snowflake.storage.failsafe_bytes.total',
                             value=19.2,
                             tags=EXPECTED_TAGS)
Esempio n. 22
0
def test_aggregate_last_24_hours_queries(aggregate_last_24_hours,
                                         expected_query):
    inst = {
        'metric_groups': ['snowflake.replication'],
        'username': '******',
        'password': '******',
        'account': 'account',
        'role': 'role',
    }
    inst['aggregate_last_24_hours'] = aggregate_last_24_hours
    check = SnowflakeCheck(CHECK_NAME, {}, [inst])

    # Only one query configured
    assert check.metric_queries[0]['query'] == expected_query
Esempio n. 23
0
def test_credit_usage_metrics(dd_run_check, aggregator, instance):
    # type: (AggregatorStub, Dict[str, Any]) -> None

    expected_credit_usage = [(
        'WAREHOUSE_METERING',
        'COMPUTE_WH',
        Decimal('12.000000000'),
        Decimal('1.000000000'),
        Decimal('0.80397000'),
        Decimal('0.066997500000'),
        Decimal('12.803970000'),
        Decimal('1.066997500000'),
    )]
    expected_tags = EXPECTED_TAGS + [
        'service_type:WAREHOUSE_METERING', 'service:COMPUTE_WH'
    ]
    with mock.patch(
            'datadog_checks.snowflake.SnowflakeCheck.execute_query_raw',
            return_value=expected_credit_usage):
        check = SnowflakeCheck(CHECK_NAME, {}, [instance])
        check._conn = mock.MagicMock()
        check._query_manager.queries = [queries.CreditUsage]
        dd_run_check(check)

    aggregator.assert_metric('snowflake.billing.cloud_service.sum',
                             count=1,
                             tags=expected_tags)
    aggregator.assert_metric('snowflake.billing.cloud_service.avg',
                             count=1,
                             tags=expected_tags)
    aggregator.assert_metric('snowflake.billing.total_credit.sum', count=1)
    aggregator.assert_metric('snowflake.billing.total_credit.avg', count=1)
    aggregator.assert_metric('snowflake.billing.virtual_warehouse.sum',
                             count=1)
    aggregator.assert_metric('snowflake.billing.virtual_warehouse.avg',
                             count=1)
Esempio n. 24
0
def test_token_path(dd_run_check, aggregator):
    instance = {
        'username': '******',
        'account': 'account',
        'role': 'ACCOUNTADMIN',
        'authenticator': 'oauth',
        'token_path': '/path/to/token',
    }

    default_args = {
        'user': '******',
        'password': None,
        'account': 'account',
        'database': 'SNOWFLAKE',
        'schema': 'ACCOUNT_USAGE',
        'warehouse': None,
        'role': 'ACCOUNTADMIN',
        'passcode_in_password': False,
        'passcode': None,
        'client_prefetch_threads': 4,
        'login_timeout': 60,
        'ocsp_response_cache_filename': None,
        'authenticator': 'oauth',
        'client_session_keep_alive': False,
        'private_key': None,
        'proxy_host': None,
        'proxy_port': None,
        'proxy_user': None,
        'proxy_password': None,
    }

    tokens = ['mytoken1', 'mytoken2', 'mytoken3']

    check = SnowflakeCheck(CHECK_NAME, {}, [instance])
    with mock.patch(
            'datadog_checks.snowflake.check.open',
            side_effect=[
                mock.mock_open(read_data=tok).return_value for tok in tokens
            ],
    ), mock.patch('datadog_checks.snowflake.check.sf') as sf:
        dd_run_check(check)
        sf.connect.assert_called_once_with(token='mytoken1', **default_args)

        dd_run_check(check)
        sf.connect.assert_called_with(token='mytoken2', **default_args)

        dd_run_check(check)
        sf.connect.assert_called_with(token='mytoken3', **default_args)
Esempio n. 25
0
def test_default_metric_groups(instance):
    check = SnowflakeCheck(CHECK_NAME, {}, [instance])
    assert check.config.metric_groups == [
        'snowflake.query',
        'snowflake.billing',
        'snowflake.storage',
        'snowflake.logins',
    ]

    assert check.metric_queries == [
        queries.WarehouseLoad,
        queries.QueryHistory,
        queries.CreditUsage,
        queries.WarehouseCreditUsage,
        queries.StorageUsageMetrics,
        queries.LoginMetrics,
    ]
Esempio n. 26
0
def test_key_auth(dd_run_check, instance):
    # Key auth
    inst = copy.deepcopy(instance)
    inst['private_key_path'] = os.path.join(os.path.dirname(__file__), 'keys',
                                            'rsa_key_example.p8')

    check = SnowflakeCheck(CHECK_NAME, {}, [inst])
    # Checking size instead of the read key
    read_key = check.read_key()
    assert len(read_key) == 1216

    with mock.patch('datadog_checks.snowflake.check.sf') as sf:
        check = SnowflakeCheck(CHECK_NAME, {}, [inst])
        dd_run_check(check)
        sf.connect.assert_called_with(
            user='******',
            password='******',
            account='test_acct.us-central1.gcp',
            database='SNOWFLAKE',
            schema='ACCOUNT_USAGE',
            warehouse=None,
            role='ACCOUNTADMIN',
            passcode_in_password=False,
            passcode=None,
            client_prefetch_threads=4,
            login_timeout=3,
            ocsp_response_cache_filename=None,
            authenticator='snowflake',
            token=None,
            private_key=read_key,
            client_session_keep_alive=False,
            proxy_host=None,
            proxy_port=None,
            proxy_user=None,
            proxy_password=None,
        )

    inst['private_key_path'] = os.path.join(os.path.dirname(__file__), 'keys',
                                            'wrong_key.p8')
    check = SnowflakeCheck(CHECK_NAME, {}, [inst])
    with pytest.raises(FileNotFoundError):
        check.read_key()

    # Read key protected by a passphrase
    inst['private_key_path'] = os.path.join(os.path.dirname(__file__), 'keys',
                                            'rsa_key_pass_example.p8')
    inst['private_key_password'] = '******'
    check = SnowflakeCheck(CHECK_NAME, {}, [inst])
    assert len(check.read_key()) == 1218
Esempio n. 27
0
def test_default_authentication(instance):
    # Test default auth
    check = SnowflakeCheck(CHECK_NAME, {}, [instance])
    assert check.config.authenticator == 'snowflake'
Esempio n. 28
0
def test_mixed_metric_group(instance):
    instance = copy.deepcopy(instance)
    instance['metric_groups'] = ['fake.metric.group', 'snowflake.logins']
    check = SnowflakeCheck(CHECK_NAME, {}, [instance])
    assert check.metric_queries == [queries.LoginMetrics]