def test_normalize_metrics(aggregator):
    config = _build_instance("empty",
                             stats_endpoint="http://localhost:9999",
                             normalize_metrics=True)
    check = FilebeatCheck("filebeat", {}, {})
    tags = ["stats_endpoint:http://localhost:9999"]

    with mock_request():
        check.check(config)

    aggregator.assert_metric("filebeat.harvester.running",
                             metric_type=aggregator.GAUGE,
                             tags=tags)

    with mock_request({
            "filebeat.libbeat.logstash.published_and_acked_events": 1138956,
            "filebeat.harvester.running": 9
    }):
        check.check(config)

    aggregator.assert_metric(
        "filebeat.libbeat.logstash.published_and_acked_events",
        metric_type=aggregator.COUNTER,
        tags=tags)
    aggregator.assert_metric(
        "filebeat.libbeat.kafka.published_and_acked_events",
        metric_type=aggregator.COUNTER,
        tags=tags)
    aggregator.assert_metric("filebeat.harvester.running",
                             metric_type=aggregator.GAUGE,
                             tags=tags)
Example #2
0
def test_happy_path_with_an_only_metrics_list(aggregator):
    config = _build_instance(
        "empty", stats_endpoint="http://localhost:9999", only_metrics=[r"^libbeat.kafka", r"truncated$"]
    )
    check = FilebeatCheck("filebeat", {}, {})
    tags = ["stats_endpoint:http://localhost:9999"]

    with mock_request():
        check.check(config)

    with mock_request(
        {"libbeat.logstash.published_and_acked_events": 1138956, "libbeat.kafka.published_and_acked_events": 12}
    ):
        check.check(config)

    # these metrics shouldn't have been reported, because they don't match
    # any regex in the "only_metrics" list
    aggregator.assert_metric("libbeat.logstash.published_and_acked_events", count=0)
    aggregator.assert_metric("filebeat.harvester.running", count=0)

    # but these 4 should have
    aggregator.assert_metric(
        "libbeat.kafka.published_and_acked_events", metric_type=aggregator.COUNTER, value=12, tags=tags
    )
    aggregator.assert_metric(
        "libbeat.kafka.published_but_not_acked_events", metric_type=aggregator.COUNTER, value=0, tags=tags
    )
    aggregator.assert_metric(
        "libbeat.kafka.call_count.PublishEvents", metric_type=aggregator.COUNTER, value=0, tags=tags
    )
    aggregator.assert_metric("filebeat.harvester.files.truncated", metric_type=aggregator.COUNTER, value=0, tags=tags)
def test_regexes_only_get_compiled_and_run_once():
    regex = r"^libbeat.kafka"
    config = _build_instance("empty",
                             stats_endpoint="http://localhost:9999",
                             only_metrics=[regex])
    check = FilebeatCheck("filebeat", {}, {})

    with mock_request():
        # the 1st run should compile regexes & run regexes
        with mock.patch.object(re, "compile") as re_compile:
            with mock.patch.object(re, "search") as re_search:
                check.check(config)

                re_compile.assert_called_once_with(regex)
                # once per metric name
                assert re_search.call_count == 50

    with mock_request({
            "libbeat.logstash.published_and_acked_events": 1138956,
            "libbeat.kafka.published_and_acked_events": 12
    }):
        with mock.patch.object(re, "compile") as re_compile:
            with mock.patch.object(re, "search") as re_search:
                check.check(config)

                # no further regex compiling nor searching should have happened
                assert re_compile.call_count == 0
                assert re_search.call_count == 0
def test_bad_config():
    check = FilebeatCheck("filebeat", {}, {})
    with pytest.raises(Exception) as excinfo:
        check.check({})
        assert (
            "an absolute path to a filebeat registry path must be specified"
            in excinfo.value)
Example #5
0
def test_normalize_metrics_with_an_only_metrics_list(aggregator):
    config = _build_instance(
        "empty",
        stats_endpoint="http://localhost:9999",
        only_metrics=[r"^libbeat.kafka", r"truncated$"],
        normalize_metrics=True,
    )
    check = FilebeatCheck("filebeat", {}, {})
    tags = ["stats_endpoint:http://localhost:9999"]

    with mock_request():
        check.check(config)

    with mock_request(
        {
            "filebeat.libbeat.logstash.published_and_acked_events": 1138956,
            "libbeat.kafka.published_and_acked_events": 12,
        }
    ):
        check.check(config)

    aggregator.assert_metric(
        "filebeat.libbeat.kafka.published_and_acked_events", metric_type=aggregator.COUNTER, value=12, tags=tags
    )
    aggregator.assert_metric(
        "filebeat.libbeat.kafka.published_but_not_acked_events", metric_type=aggregator.COUNTER, value=0, tags=tags
    )
    aggregator.assert_metric(
        "filebeat.libbeat.kafka.call_count.PublishEvents", metric_type=aggregator.COUNTER, value=0, tags=tags
    )
    aggregator.assert_metric("filebeat.harvester.files.truncated", metric_type=aggregator.COUNTER, value=0, tags=tags)
Example #6
0
def test_check(aggregator, dd_environment):
    check = FilebeatCheck("filebeat", {}, {})
    check.check(dd_environment)
    check.check(dd_environment)
    tags = ["stats_endpoint:{}".format(dd_environment["stats_endpoint"])]
    aggregator.assert_metric("filebeat.harvester.running", metric_type=aggregator.GAUGE, count=2, tags=tags)
    aggregator.assert_metric("libbeat.config.module.starts", metric_type=aggregator.COUNTER, count=1, tags=tags)
def test_source_file_device_has_changed(aggregator):
    check = FilebeatCheck("filebeat", {}, {})
    with mocked_os_stat({
            "/test_dd_agent/var/log/syslog":
            mocked_file_stats(1024917, 152171, 51714)
    }):
        check.check(_build_instance("single_source"))
    aggregator.assert_metric("filebeat.registry.unprocessed_bytes", count=0)
def test_check_fail(aggregator, instance):
    instance['stats_endpoint'] = BAD_ENDPOINT
    check = FilebeatCheck("filebeat", {}, [instance])
    check.check(instance)
    aggregator.assert_service_check("filebeat.can_connect",
                                    status=FilebeatCheck.CRITICAL,
                                    tags=[])
    assert len(aggregator._metrics) == 0
def test_when_the_http_call_times_out(aggregator):
    config = _build_instance("empty", stats_endpoint="http://localhost:9999")
    check = FilebeatCheck("filebeat", {}, {})

    request_failure = mock.Mock()
    request_failure.raise_for_status.side_effect = Exception("Error")
    with mock.patch("requests.get", return_value=request_failure):
        check.check(config)

    aggregator.assert_metric("filebeat.harvester.running", count=0)
Example #10
0
def test_with_an_invalid_regex_in_the_only_metrics_list():
    config = _build_instance("empty", stats_endpoint="http://localhost:9999", only_metrics=["invalid regex ["])
    check = FilebeatCheck("filebeat", {}, {})

    expected_message = (
        'Invalid only_metric regex for filebeat: "invalid regex [", ' "error: unexpected end of regular expression"
    )

    with pytest.raises(Exception) as excinfo:
        check.check(config)
        assert expected_message in excinfo.value
Example #11
0
def test_ignore_registry(aggregator, instance):
    instance['registry_file_path'] = "malformed_json"
    instance["ignore_registry"] = True
    check = FilebeatCheck("filebeat", {}, [instance])
    # test that it silently ignores the registry file
    # and does the http check
    check.check(instance)
    tags = ["stats_endpoint:{}".format(instance['stats_endpoint'])]
    aggregator.assert_service_check("filebeat.can_connect",
                                    status=FilebeatCheck.OK,
                                    tags=tags)
Example #12
0
def test_instance_tags(aggregator, instance):
    instance['registry_file_path'] = "happy_path"
    instance['tags'] = ["foo:bar"]
    check = FilebeatCheck("filebeat", {}, [instance])
    # test that it uses both the instance tags and the
    # `stats_endpoint` tag generated
    check.check(instance)
    tags = instance['tags'] + [
        "stats_endpoint:{}".format(instance['stats_endpoint'])
    ]
    aggregator.assert_service_check("filebeat.can_connect",
                                    status=FilebeatCheck.OK,
                                    tags=tags)
Example #13
0
def test_with_two_different_instances(aggregator):
    config = _build_instance("empty", stats_endpoint="http://localhost:9999")
    check = FilebeatCheck("filebeat", {}, {})
    tags = ["stats_endpoint:http://localhost:9999"]

    with mock_request():
        check.check(config)

    with mock_request({"libbeat.logstash.published_and_acked_events": 1138956, "filebeat.harvester.running": 9}):
        check.check(config)

    # metrics for the first instance
    aggregator.assert_metric(
        "libbeat.logstash.published_and_acked_events", metric_type=aggregator.COUNTER, value=28, tags=tags
    )
    aggregator.assert_metric(
        "libbeat.kafka.published_and_acked_events", metric_type=aggregator.COUNTER, value=0, tags=tags
    )
    aggregator.assert_metric("filebeat.harvester.running", metric_type=aggregator.GAUGE, value=9, tags=tags)

    config = _build_instance("empty", stats_endpoint="http://localhost:19999", only_metrics=[r"events$"])

    # and for the second
    tags = ["stats_endpoint:http://localhost:19999"]
    with mock_request():
        check.check(config)
    with mock_request({"libbeat.logstash.published_and_acked_events": 1238956, "filebeat.harvester.running": 29}):
        check.check(config)
    aggregator.assert_metric(
        "libbeat.logstash.published_and_acked_events", metric_type=aggregator.COUNTER, value=100028, tags=tags
    )
    aggregator.assert_metric(
        "libbeat.kafka.published_and_acked_events", metric_type=aggregator.COUNTER, value=0, tags=tags
    )
    aggregator.assert_metric("filebeat.harvester.running", count=0, tags=tags)
Example #14
0
def test_registry_happy_path_with_legacy_format(aggregator):
    check = FilebeatCheck("filebeat", {}, {})
    with mocked_os_stat(
        {
            "/test_dd_agent/var/log/nginx/access.log": mocked_file_stats(394154, 277025, 51713),
            "/test_dd_agent/var/log/syslog": mocked_file_stats(1024917, 152172, 51713),
        }
    ):
        check.check(_build_instance("happy_path_legacy_format"))

    aggregator.assert_metric(
        "filebeat.registry.unprocessed_bytes", value=2407, tags=["source:/test_dd_agent/var/log/nginx/access.log"]
    )
    aggregator.assert_metric(
        "filebeat.registry.unprocessed_bytes", value=0, tags=["source:/test_dd_agent/var/log/syslog"]
    )
def test_check(aggregator, instance):
    check = FilebeatCheck("filebeat", {}, [instance])
    check.check(instance)
    check.check(instance)
    tags = ["stats_endpoint:{}".format(instance['stats_endpoint'])]
    aggregator.assert_metric("filebeat.harvester.running",
                             metric_type=aggregator.GAUGE,
                             count=2,
                             tags=tags)
    aggregator.assert_metric("libbeat.config.module.starts",
                             metric_type=aggregator.COUNTER,
                             count=1,
                             tags=tags)
    aggregator.assert_service_check("filebeat.can_connect",
                                    status=FilebeatCheck.OK,
                                    tags=tags)
def test_happy_path(aggregator):
    config = _build_instance("empty", stats_endpoint="http://localhost:9999")
    check = FilebeatCheck("filebeat", {}, {})
    tags = ["stats_endpoint:http://localhost:9999"]

    # the first run shouldn't yield any increment metric, but it should
    # still report the gauge metrics
    with mock_request():
        check.check(config)

    aggregator.assert_metric("libbeat.logstash.published_and_acked_events",
                             count=0)
    aggregator.assert_metric("filebeat.harvester.running",
                             metric_type=aggregator.GAUGE,
                             value=10,
                             tags=tags)

    # now the second run should have all the increment metrics as well
    with mock_request({
            "libbeat.logstash.published_and_acked_events": 1138956,
            "filebeat.harvester.running": 9,
    }):
        check.check(config)

    aggregator.assert_metric(
        "libbeat.logstash.published_and_acked_events",
        metric_type=aggregator.COUNTER,
        value=28,
        tags=tags,
    )
    aggregator.assert_metric(
        "libbeat.kafka.published_and_acked_events",
        metric_type=aggregator.COUNTER,
        value=0,
        tags=tags,
    )
    aggregator.assert_metric("filebeat.harvester.running",
                             metric_type=aggregator.GAUGE,
                             value=9,
                             tags=tags)
def test_when_filebeat_restarts(aggregator):
    config = _build_instance("empty", stats_endpoint="http://localhost:9999")
    check = FilebeatCheck("filebeat", {}, {})

    with mock_request():
        check.check(config)

    with mock_request({
            "libbeat.logstash.published_and_acked_events": 0,
            "libbeat.kafka.published_and_acked_events": 12
    }):
        check.check(config)

    # none of these metrics should have been reported, because of the restart
    aggregator.assert_metric("libbeat.logstash.published_and_acked_events",
                             count=0)
    aggregator.assert_metric("libbeat.kafka.published_and_acked_events",
                             count=0)

    # at the next run though, we should get normal increment from in between
    # the 2nd & 3rd runs
    with mock_request({
            "libbeat.logstash.published_and_acked_events": 28,
            "libbeat.kafka.published_and_acked_events": 23
    }):
        check.check(config)

    aggregator.assert_metric("libbeat.logstash.published_and_acked_events",
                             metric_type=aggregator.COUNTER,
                             value=28)
    aggregator.assert_metric("libbeat.kafka.published_and_acked_events",
                             metric_type=aggregator.COUNTER,
                             value=11)
def _assert_config_raises(profiler_config, expected_substring):
    bad_config = _build_instance(profiler_config)
    check = FilebeatCheck("filebeat", {}, {})
    with pytest.raises(Exception) as excinfo:
        check.check(bad_config)
        assert expected_substring in excinfo.value
def test_when_the_http_connection_is_refused(aggregator):
    config = _build_instance("empty", stats_endpoint="http://0.28.28.0:9999")
    check = FilebeatCheck("filebeat", {}, {})
    check.check(config)
    aggregator.assert_metric("filebeat.harvester.running", count=0)
Example #20
0
def test_missing_registry_file(aggregator):
    config = _build_instance("i_dont_exist")
    check = FilebeatCheck("filebeat", {}, [config])
    # tests that it simply silently ignores it
    check.check(config)
    aggregator.assert_metric("filebeat.registry.unprocessed_bytes", count=0)
Example #21
0
def test_missing_source_file(aggregator):
    config = _build_instance("missing_source_file")
    check = FilebeatCheck("filebeat", {}, [config])
    check.check(config)
    aggregator.assert_metric("filebeat.registry.unprocessed_bytes", count=0)