Ejemplo n.º 1
0
def test_check_kafka(aggregator, kafka_instance):
    """
    Testing Kafka_consumer check.
    """
    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, {})
    kafka_consumer_check.check(kafka_instance)

    for name, consumer_group in iteritems(kafka_instance['consumer_groups']):
        for topic, partitions in iteritems(consumer_group):
            for partition in partitions:
                tags = [
                    "topic:{}".format(topic), "partition:{}".format(partition)
                ] + ['optional:tag1']
                for mname in BROKER_METRICS:
                    aggregator.assert_metric(mname, tags=tags, at_least=1)
                for mname in CONSUMER_METRICS:
                    aggregator.assert_metric(
                        mname,
                        tags=tags +
                        ["source:kafka", "consumer_group:{}".format(name)],
                        at_least=1)

    # let's reassert for the __consumer_offsets - multiple partitions
    aggregator.assert_metric('kafka.broker_offset', at_least=1)
    aggregator.assert_all_metrics_covered()
def test_multiple_servers_zk(aggregator, zk_instance):
    """
    Testing Kafka_consumer check.
    """
    multiple_server_zk_instance = copy.deepcopy(zk_instance)
    multiple_server_zk_instance['kafka_connect_str'] = [
        multiple_server_zk_instance['kafka_connect_str'],
        '{}:9092'.format(HOST),
    ]

    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, [{}])
    kafka_consumer_check.check(multiple_server_zk_instance)

    for name, consumer_group in multiple_server_zk_instance['consumer_groups'].items():
        for topic, partitions in consumer_group.items():
            for partition in partitions:
                tags = ["topic:{}".format(topic), "partition:{}".format(partition)]
                for mname in BROKER_METRICS:
                    aggregator.assert_metric(mname, tags=tags, at_least=1)
                for mname in CONSUMER_METRICS:
                    aggregator.assert_metric(
                        mname, tags=tags + ["source:zk", "consumer_group:{}".format(name)], at_least=1
                    )

    aggregator.assert_all_metrics_covered()
def test_check_nogroups_zk(aggregator, zk_instance):
    """
    Testing Kafka_consumer check grabbing groups from ZK
    """
    nogroup_instance = copy.deepcopy(zk_instance)
    nogroup_instance.pop('consumer_groups')
    nogroup_instance['monitor_unlisted_consumer_groups'] = True

    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, {})
    kafka_consumer_check.check(nogroup_instance)

    for topic in TOPICS:
        if topic != '__consumer_offsets':
            for partition in PARTITIONS:
                tags = [
                    "topic:{}".format(topic), "partition:{}".format(partition)
                ]
                for mname in BROKER_METRICS:
                    aggregator.assert_metric(mname, tags=tags, at_least=1)
                for mname in CONSUMER_METRICS:
                    aggregator.assert_metric(
                        mname,
                        tags=tags +
                        ['source:zk', 'consumer_group:my_consumer'],
                        at_least=1)
        else:
            for mname in BROKER_METRICS + CONSUMER_METRICS:
                aggregator.assert_metric(mname, at_least=1)

    aggregator.assert_all_metrics_covered()
Ejemplo n.º 4
0
def test_uses_new_implementation_when_new_version_specified(kafka_instance):
    instance = copy.deepcopy(kafka_instance)
    instance['kafka_client_api_version'] = '0.10.2'
    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, [instance])
    kafka_consumer_check._init_check_based_on_kafka_version()

    assert isinstance(kafka_consumer_check.sub_check, NewKafkaConsumerCheck)
Ejemplo n.º 5
0
def test_can_send_event(aggregator, kafka_instance, dd_run_check):
    """
    Testing Kafka_consumer check.
    """
    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, [kafka_instance])
    kafka_consumer_check.send_event("test", "test", [], "test", "test")
    aggregator.assert_event("test", exact_match=False, count=1)
Ejemplo n.º 6
0
def test_check_zk_basic_case_integration(aggregator, zk_instance):
    """
    Testing Kafka_consumer check.
    """
    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, [zk_instance])
    kafka_consumer_check.check(zk_instance)

    _assert_check_zk_basic_case(aggregator, zk_instance)
def test_check_kafka(aggregator, kafka_instance):
    """
    Testing Kafka_consumer check.
    """
    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, [kafka_instance])
    kafka_consumer_check.check(kafka_instance)

    assert_check_kafka(aggregator, kafka_instance['consumer_groups'])
Ejemplo n.º 8
0
def test_consumer_config_error(caplog):
    instance = {
        'kafka_connect_str': KAFKA_CONNECT_STR,
        'kafka_consumer_offsets': True,
        'tags': ['optional:tag1']
    }
    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, [instance])
    kafka_consumer_check.check(instance)
    assert 'monitor_unlisted_consumer_groups is False' in caplog.text
def test_no_topics(aggregator, kafka_instance):
    kafka_instance['consumer_groups'] = {'my_consumer': {}}
    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, [kafka_instance])
    kafka_consumer_check.check(kafka_instance)

    if is_legacy_check(kafka_consumer_check):
        pytest.skip("This test does not apply to legacy check")

    assert_check_kafka(aggregator, {'my_consumer': {'marvel': [0]}})
Ejemplo n.º 10
0
def test_check_kafka_metrics_limit(aggregator, kafka_instance):
    """
    Testing Kafka_consumer check.
    """
    kafka_consumer_check = KafkaCheck('kafka_consumer',
                                      {'max_partition_contexts': 1},
                                      [kafka_instance])
    kafka_consumer_check.check(kafka_instance)

    assert len(aggregator._metrics) == 1
Ejemplo n.º 11
0
def test_tls_config_legacy(extra_config, expected_http_kwargs, kafka_instance):
    instance = kafka_instance
    instance.update(extra_config)

    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, [instance])

    kafka_consumer_check.get_tls_context()
    actual_options = {
        k: v
        for k, v in kafka_consumer_check._tls_context_wrapper.config.items()
        if k in expected_http_kwargs
    }
    assert expected_http_kwargs == actual_options
def test_consumer_config_error(caplog):
    instance = {
        'kafka_connect_str': KAFKA_CONNECT_STR,
        'kafka_consumer_offsets': True,
        'tags': ['optional:tag1']
    }
    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, [instance])

    if is_legacy_check(kafka_consumer_check):
        pytest.skip("This test does not apply to legacy check")

    kafka_consumer_check.check(instance)
    assert 'monitor_unlisted_consumer_groups is False' in caplog.text
Ejemplo n.º 13
0
def test_version_metadata(datadog_agent, kafka_instance, dd_run_check):
    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, [kafka_instance])
    kafka_consumer_check.check_id = 'test:123'

    kafka_client = kafka_consumer_check.create_kafka_client()
    version_data = [str(part) for part in kafka_client.check_version()]
    kafka_client.close()
    version_parts = {
        'version.{}'.format(name): part
        for name, part in zip(('major', 'minor', 'patch'), version_data)
    }
    version_parts['version.scheme'] = 'semver'
    version_parts['version.raw'] = '.'.join(version_data)

    dd_run_check(kafka_consumer_check)
    datadog_agent.assert_metadata('test:123', version_parts)
Ejemplo n.º 14
0
def test_check_kafka(aggregator, kafka_instance):
    """
    Testing Kafka_consumer check.
    """
    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, [kafka_instance])
    kafka_consumer_check.check(kafka_instance)

    for name, consumer_group in kafka_instance['consumer_groups'].items():
        for topic, partitions in consumer_group.items():
            for partition in partitions:
                tags = ["topic:{}".format(topic), "partition:{}".format(partition)] + ['optional:tag1']
                for mname in BROKER_METRICS:
                    aggregator.assert_metric(mname, tags=tags, at_least=1)
                for mname in CONSUMER_METRICS:
                    aggregator.assert_metric(mname, tags=tags + ["consumer_group:{}".format(name)], at_least=1)

    aggregator.assert_all_metrics_covered()
Ejemplo n.º 15
0
def test_tls_config_ok(kafka_instance_tls):
    with mock.patch('datadog_checks.base.utils.tls.ssl') as ssl:
        with mock.patch('kafka.KafkaClient') as kafka_client:

            # mock Kafka Client
            kafka_client.return_value = mock.MagicMock()

            # mock TLS context
            tls_context = mock.MagicMock()
            ssl.SSLContext.return_value = tls_context

            kafka_consumer_check = KafkaCheck('kafka_consumer', {},
                                              [kafka_instance_tls])
            kafka_consumer_check._create_kafka_client(clazz=kafka_client)

            assert tls_context.check_hostname is True
            assert tls_context.tls_cert is not None
            assert tls_context.check_hostname is True
            assert kafka_consumer_check.create_kafka_client is not None
Ejemplo n.º 16
0
def test_gssapi(kafka_instance, dd_run_check):
    instance = copy.deepcopy(kafka_instance)
    instance['kafka_client_api_version'] = '0.10.2'
    instance['sasl_mechanism'] = 'GSSAPI'
    instance['security_protocol'] = 'SASL_PLAINTEXT'
    instance['sasl_kerberos_service_name'] = 'kafka'
    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, [instance])
    # assert the check doesn't fail with:
    # Exception: Could not find main GSSAPI shared library.
    with pytest.raises(Exception, match='check_version'):
        dd_run_check(kafka_consumer_check)
Ejemplo n.º 17
0
def test_multiple_servers_zk(kafka_cluster, kafka_producer, zk_consumer,
                             zk_instance, aggregator):
    """
    Testing Kafka_consumer check.
    """
    if not is_supported(['zookeeper']):
        pytest.skip(
            "zookeeper consumer offsets not supported in current environment")

    if not kafka_producer.is_alive():
        kafka_producer.start()
        time.sleep(5)

    if not zk_consumer.is_alive():
        zk_consumer.start()
        time.sleep(5)

    multiple_server_zk_instance = copy.deepcopy(zk_instance)
    multiple_server_zk_instance['kafka_connect_str'] = [
        multiple_server_zk_instance['kafka_connect_str'],
        '{}:9092'.format(HOST)
    ]

    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, {})
    kafka_consumer_check.check(multiple_server_zk_instance)

    for name, consumer_group in multiple_server_zk_instance[
            'consumer_groups'].iteritems():
        for topic, partitions in consumer_group.iteritems():
            for partition in partitions:
                tags = [
                    "topic:{}".format(topic), "partition:{}".format(partition)
                ]
                for mname in BROKER_METRICS:
                    aggregator.assert_metric(mname, tags=tags, at_least=1)
                for mname in CONSUMER_METRICS:
                    aggregator.assert_metric(
                        mname,
                        tags=tags +
                        ["source:zk", "consumer_group:{}".format(name)],
                        at_least=1)
Ejemplo n.º 18
0
def test_check_nogroups_zk(kafka_cluster, kafka_producer, zk_consumer,
                           zk_instance, aggregator):
    """
    Testing Kafka_consumer check grabbing groups from ZK
    """
    if not is_supported(['zookeeper']):
        pytest.skip(
            "zookeeper consumer offsets not supported in current environment")

    if not kafka_producer.is_alive():
        kafka_producer.start()
        time.sleep(5)

    if not zk_consumer.is_alive():
        zk_consumer.start()
        time.sleep(5)

    nogroup_instance = copy.deepcopy(zk_instance)
    nogroup_instance.pop('consumer_groups')
    nogroup_instance['monitor_unlisted_consumer_groups'] = True

    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, {})
    kafka_consumer_check.check(nogroup_instance)

    for topic in TOPICS:
        if topic is not '__consumer_offsets':
            for partition in PARTITIONS:
                tags = [
                    "topic:{}".format(topic), "partition:{}".format(partition)
                ]
                for mname in BROKER_METRICS:
                    aggregator.assert_metric(mname, tags=tags, at_least=1)
                    for mname in CONSUMER_METRICS:
                        aggregator.assert_metric(
                            mname,
                            tags=tags +
                            ["source:zk", "consumer_group:my_consumer"],
                            at_least=1)
        else:
            for mname in BROKER_METRICS + CONSUMER_METRICS:
                aggregator.assert_metric(mname, at_least=1)
Ejemplo n.º 19
0
def test_get_interpolated_timestamp(kafka_instance):
    instance = copy.deepcopy(kafka_instance)
    instance['kafka_client_api_version'] = '0.10.2'
    instance['sasl_kerberos_service_name'] = 'kafka'
    check = KafkaCheck('kafka_consumer', {}, [instance])
    check._init_check_based_on_kafka_version()
    # at offset 0, time is 100s, at offset 10, time is 200sec.
    # by interpolation, at offset 5, time should be 150sec.
    assert check.sub_check._get_interpolated_timestamp({
        0: 100,
        10: 200
    }, 5) == 150
    assert check.sub_check._get_interpolated_timestamp({
        10: 100,
        20: 200
    }, 5) == 50
    assert check.sub_check._get_interpolated_timestamp({
        0: 100,
        10: 200
    }, 15) == 250
    assert check.sub_check._get_interpolated_timestamp({10: 200}, 15) is None
Ejemplo n.º 20
0
def test_check_zk(aggregator, zk_instance):
    """
    Testing Kafka_consumer check.
    """
    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, {})
    kafka_consumer_check.check(zk_instance)

    for name, consumer_group in iteritems(zk_instance['consumer_groups']):
        for topic, partitions in iteritems(consumer_group):
            for partition in partitions:
                tags = ["topic:{}".format(topic), "partition:{}".format(partition)]
                for mname in BROKER_METRICS:
                    aggregator.assert_metric(mname, tags=tags, at_least=1)
                for mname in CONSUMER_METRICS:
                    aggregator.assert_metric(
                        mname, tags=tags + ["source:zk", "consumer_group:{}".format(name)], at_least=1
                    )

    # let's reassert for the __consumer_offsets - multiple partitions
    aggregator.assert_metric('kafka.broker_offset', at_least=1)
    aggregator.assert_all_metrics_covered()

    all_partitions = {
        'kafka_connect_str': KAFKA_CONNECT_STR,
        'zk_connect_str': ZK_CONNECT_STR,
        'consumer_groups': {'my_consumer': {'marvel': []}},
    }
    kafka_consumer_check.check(all_partitions)
    aggregator.assert_metric(
        'kafka.consumer_offset', tags=['topic:marvel', 'partition:0',
            'consumer_group:my_consumer', 'source:zk'], at_least=1)
    aggregator.assert_metric(
        'kafka.consumer_offset', tags=['topic:marvel', 'partition:1',
            'consumer_group:my_consumer', 'source:zk'], at_least=1)
def test_check_no_partitions_zk(aggregator, zk_instance):
    """
    Testing Kafka_consumer check grabbing partitions from ZK
    """
    no_partitions_instance = copy.deepcopy(zk_instance)
    topic = 'marvel'
    no_partitions_instance['consumer_groups'] = {'my_consumer': {topic: []}}

    kafka_consumer_check = KafkaCheck('kafka_consumer', {},
                                      [no_partitions_instance])
    kafka_consumer_check.check(no_partitions_instance)

    for partition in PARTITIONS:
        tags = ["topic:{}".format(topic), "partition:{}".format(partition)]
        for mname in BROKER_METRICS:
            aggregator.assert_metric(mname, tags=tags, at_least=1)
        for mname in CONSUMER_METRICS:
            aggregator.assert_metric(
                mname,
                tags=tags + ['source:zk', 'consumer_group:my_consumer'],
                at_least=1)

    aggregator.assert_all_metrics_covered()
Ejemplo n.º 22
0
def test_check_zk(kafka_cluster, kafka_producer, zk_consumer, zk_instance,
                  aggregator):
    """
    Testing Kafka_consumer check.
    """
    if not is_supported(['zookeeper']):
        pytest.skip(
            "zookeeper consumer offsets not supported in current environment")

    if not kafka_producer.is_alive():
        kafka_producer.start()
        time.sleep(5)

    if not zk_consumer.is_alive():
        zk_consumer.start()
        time.sleep(5)

    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, {})
    kafka_consumer_check.check(zk_instance)

    for name, consumer_group in zk_instance['consumer_groups'].iteritems():
        for topic, partitions in consumer_group.iteritems():
            for partition in partitions:
                tags = [
                    "topic:{}".format(topic), "partition:{}".format(partition)
                ]
                for mname in BROKER_METRICS:
                    aggregator.assert_metric(mname, tags=tags, at_least=1)
                for mname in CONSUMER_METRICS:
                    aggregator.assert_metric(
                        mname,
                        tags=tags +
                        ["source:zk", "consumer_group:{}".format(name)],
                        at_least=1)

    # let's reassert for the __consumer_offsets - multiple partitions
    aggregator.assert_metric('kafka.broker_offset', at_least=1)
Ejemplo n.º 23
0
def test_should_zk():
    check = KafkaCheck('kafka_consumer', {}, {})
    # Kafka Consumer Offsets set to True and we have a zk_connect_str that hasn't been run yet
    assert check._should_zk([ZK_CONNECT_STR, ZK_CONNECT_STR], 10, True) is True
    # Kafka Consumer Offsets is set to False, should immediately ZK
    assert check._should_zk(ZK_CONNECT_STR, 10, False) is True
    # Last time we checked ZK_CONNECT_STR was less than interval ago, shouldn't ZK
    zk_connect_hash = hash_mutable(ZK_CONNECT_STR)
    check._zk_last_ts[zk_connect_hash] = time.time()
    assert check._should_zk(ZK_CONNECT_STR, 100, True) is False
def test_no_partitions(aggregator, kafka_instance):
    kafka_instance['consumer_groups'] = {'my_consumer': {'marvel': []}}
    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, [kafka_instance])
    kafka_consumer_check.check(kafka_instance)

    assert_check_kafka(aggregator, {'my_consumer': {'marvel': [0]}})
Ejemplo n.º 25
0
def test_no_topics(aggregator, kafka_instance, dd_run_check):
    kafka_instance['consumer_groups'] = {'my_consumer': {}}
    kafka_consumer_check = KafkaCheck('kafka_consumer', {}, [kafka_instance])
    dd_run_check(kafka_consumer_check)

    assert_check_kafka(aggregator, {'my_consumer': {'marvel': [0]}})