Beispiel #1
0
def test_happy_path_with_an_only_metrics_list(aggregator):
    config = _build_instance("empty",
                             stats_endpoint="http://localhost:9999",
                             only_metrics=[r"^libbeat.kafka", r"truncated$"])
    check = FilebeatCheck("filebeat", {}, [config])
    tags = ["stats_endpoint:http://localhost:9999"]

    with mock_request():
        check.check(config)

    with mock_request({
            "libbeat.logstash.published_and_acked_events": 1138956,
            "libbeat.kafka.published_and_acked_events": 12
    }):
        check.check(config)

    # these metrics shouldn't have been reported, because they don't match
    # any regex in the "only_metrics" list
    aggregator.assert_metric("libbeat.logstash.published_and_acked_events",
                             count=0)
    aggregator.assert_metric("filebeat.harvester.running", count=0)

    # but these 4 should have
    aggregator.assert_metric("libbeat.kafka.published_and_acked_events",
                             metric_type=aggregator.COUNTER,
                             value=12,
                             tags=tags)
    aggregator.assert_metric("libbeat.kafka.published_but_not_acked_events",
                             metric_type=aggregator.COUNTER,
                             value=0,
                             tags=tags)
    aggregator.assert_metric("libbeat.kafka.call_count.PublishEvents",
                             metric_type=aggregator.COUNTER,
                             value=0,
                             tags=tags)
    aggregator.assert_metric("filebeat.harvester.files.truncated",
                             metric_type=aggregator.COUNTER,
                             value=0,
                             tags=tags)
Beispiel #2
0
def test_normalize_metrics(aggregator):
    config = _build_instance("empty", stats_endpoint="http://localhost:9999", normalize_metrics=True)
    check = FilebeatCheck("filebeat", {}, {})
    tags = ["stats_endpoint:http://localhost:9999"]

    with mock_request():
        check.check(config)

    aggregator.assert_metric("filebeat.harvester.running", metric_type=aggregator.GAUGE, tags=tags)

    with mock_request(
        {"filebeat.libbeat.logstash.published_and_acked_events": 1138956, "filebeat.harvester.running": 9}
    ):
        check.check(config)

    aggregator.assert_metric(
        "filebeat.libbeat.logstash.published_and_acked_events", metric_type=aggregator.COUNTER, tags=tags
    )
    aggregator.assert_metric(
        "filebeat.libbeat.kafka.published_and_acked_events", metric_type=aggregator.COUNTER, tags=tags
    )
    aggregator.assert_metric("filebeat.harvester.running", metric_type=aggregator.GAUGE, tags=tags)
def test_normalize_metrics_with_an_only_metrics_list(aggregator):
    config = _build_instance(
        "empty",
        stats_endpoint="http://localhost:9999",
        only_metrics=[r"^libbeat.kafka", r"truncated$"],
        normalize_metrics=True,
    )
    check = FilebeatCheck("filebeat", {}, {})
    tags = ["stats_endpoint:http://localhost:9999"]

    with mock_request():
        check.check(config)

    with mock_request({
            "filebeat.libbeat.logstash.published_and_acked_events": 1138956,
            "libbeat.kafka.published_and_acked_events": 12,
    }):
        check.check(config)

    aggregator.assert_metric(
        "filebeat.libbeat.kafka.published_and_acked_events",
        metric_type=aggregator.COUNTER,
        value=12,
        tags=tags)
    aggregator.assert_metric(
        "filebeat.libbeat.kafka.published_but_not_acked_events",
        metric_type=aggregator.COUNTER,
        value=0,
        tags=tags)
    aggregator.assert_metric("filebeat.libbeat.kafka.call_count.PublishEvents",
                             metric_type=aggregator.COUNTER,
                             value=0,
                             tags=tags)
    aggregator.assert_metric("filebeat.harvester.files.truncated",
                             metric_type=aggregator.COUNTER,
                             value=0,
                             tags=tags)
def test_happy_path(aggregator):
    config = _build_instance("empty", stats_endpoint="http://localhost:9999")
    check = FilebeatCheck("filebeat", {}, {})
    tags = ["stats_endpoint:http://localhost:9999"]

    # the first run shouldn't yield any increment metric, but it should
    # still report the gauge metrics
    with mock_request():
        check.check(config)

    aggregator.assert_metric("libbeat.logstash.published_and_acked_events",
                             count=0)
    aggregator.assert_metric("filebeat.harvester.running",
                             metric_type=aggregator.GAUGE,
                             value=10,
                             tags=tags)

    # now the second run should have all the increment metrics as well
    with mock_request({
            "libbeat.logstash.published_and_acked_events": 1138956,
            "filebeat.harvester.running": 9
    }):
        check.check(config)

    aggregator.assert_metric("libbeat.logstash.published_and_acked_events",
                             metric_type=aggregator.COUNTER,
                             value=28,
                             tags=tags)
    aggregator.assert_metric("libbeat.kafka.published_and_acked_events",
                             metric_type=aggregator.COUNTER,
                             value=0,
                             tags=tags)
    aggregator.assert_metric("filebeat.harvester.running",
                             metric_type=aggregator.GAUGE,
                             value=9,
                             tags=tags)
def _assert_config_raises(profiler_config, expected_substring):
    bad_config = _build_instance(profiler_config)
    check = FilebeatCheck("filebeat", {}, {})
    with pytest.raises(Exception) as excinfo:
        check.check(bad_config)
        assert expected_substring in excinfo.value
def test_when_the_http_connection_is_refused(aggregator):
    config = _build_instance("empty", stats_endpoint="http://0.28.28.0:9999")
    check = FilebeatCheck("filebeat", {}, {})
    check.check(config)
    aggregator.assert_metric("filebeat.harvester.running", count=0)
Beispiel #7
0
def test_bad_config():
    check = FilebeatCheck("filebeat", {}, {})
    with pytest.raises(Exception) as excinfo:
        check.check({})
        assert "an absolute path to a filebeat registry path must be specified" in excinfo.value
Beispiel #8
0
def test_default_timeout(init_config, instance, expected_timeout):
    check = FilebeatCheck("filebeat", init_config, [instance])
    assert check.http.options['timeout'] == expected_timeout
Beispiel #9
0
def test_missing_source_file(aggregator):
    config = _build_instance("missing_source_file")
    check = FilebeatCheck("filebeat", {}, [config])
    check.check(config)
    aggregator.assert_metric("filebeat.registry.unprocessed_bytes", count=0)
Beispiel #10
0
def test_missing_registry_file(aggregator):
    config = _build_instance("i_dont_exist")
    check = FilebeatCheck("filebeat", {}, [config])
    # tests that it simply silently ignores it
    check.check(config)
    aggregator.assert_metric("filebeat.registry.unprocessed_bytes", count=0)
Beispiel #11
0
def test_source_file_device_has_changed(aggregator):
    check = FilebeatCheck("filebeat", {}, {})
    with mocked_os_stat({"/test_dd_agent/var/log/syslog": mocked_file_stats(1024917, 152171, 51714)}):
        check.check(_build_instance("single_source"))
    aggregator.assert_metric("filebeat.registry.unprocessed_bytes", count=0)