Exemplo n.º 1
0
def test_failed_connection(aggregator):
    bad_sc_tags = ['host:{}'.format(HOST), 'port:{}'.format(BAD_PORT)]

    check = LogstashCheck(CHECK_NAME, {}, [BAD_INSTANCE])
    with pytest.raises(requests.exceptions.ConnectionError):
        check.check(BAD_INSTANCE)

    aggregator.assert_service_check('logstash.can_connect',
                                    tags=bad_sc_tags,
                                    status=LogstashCheck.CRITICAL)
Exemplo n.º 2
0
def test_check(aggregator):
    port = 9600
    bad_port = 9405
    url = URL
    bad_url = 'http://{}:{}'.format(HOST, bad_port)

    tags = [u"foo:bar", u"baz"]

    input_tag = [u"input_name:stdin"]
    output_tag = [u"output_name:stdout"]
    filter_tag = [u"filter_name:json"]

    bad_instance = {'url': bad_url}
    good_instance = {'url': url, 'tags': tags}

    check = LogstashCheck(CHECK_NAME, {}, {})

    with pytest.raises(requests.exceptions.ConnectionError):
        check.check(bad_instance)

    check.check(good_instance)
    default_tags = ["url:{}".format(URL)]

    instance_config = check.get_instance_config(good_instance)

    logstash_version = check._get_logstash_version(instance_config)

    expected_metrics = dict(STATS_METRICS)

    if logstash_version and LooseVersion(logstash_version) < LooseVersion(
            "6.0.0"):
        expected_metrics.update(PIPELINE_METRICS)
        expected_metrics.update(PIPELINE_INPUTS_METRICS)
        expected_metrics.update(PIPELINE_OUTPUTS_METRICS)
        expected_metrics.update(PIPELINE_FILTERS_METRICS)

    good_sc_tags = ['host:{}'.format(HOST), 'port:{}'.format(port)]
    bad_sc_tags = ['host:{}'.format(HOST), 'port:{}'.format(bad_port)]

    for m_name, desc in expected_metrics.items():
        m_tags = tags + default_tags
        if m_name in PIPELINE_INPUTS_METRICS:
            m_tags = m_tags + input_tag
        if m_name in PIPELINE_OUTPUTS_METRICS:
            m_tags = m_tags + output_tag
        if m_name in PIPELINE_FILTERS_METRICS:
            m_tags = m_tags + filter_tag
        if desc[0] == "gauge":
            aggregator.assert_metric(m_name, tags=m_tags, count=1)

    aggregator.assert_service_check('logstash.can_connect',
                                    tags=good_sc_tags + tags,
                                    status=LogstashCheck.OK)
    aggregator.assert_service_check('logstash.can_connect',
                                    tags=bad_sc_tags,
                                    status=LogstashCheck.CRITICAL)
Exemplo n.º 3
0
def test_check(aggregator):

    check = LogstashCheck(CHECK_NAME, {}, [GOOD_INSTANCE])

    check.check(GOOD_INSTANCE)
    default_tags = ["url:{}".format(URL)]

    instance_config = check.get_instance_config(GOOD_INSTANCE)

    logstash_version = check._get_logstash_version(instance_config)
    is_multi_pipeline = logstash_version and LooseVersion(
        "6.0.0") <= LooseVersion(logstash_version)

    input_tag = [u"plugin_conf_id:dummy_input"]
    output_tag = [u"plugin_conf_id:dummy_output", u"output_name:stdout"]
    filter_tag = [u"plugin_conf_id:dummy_filter", u"filter_name:json"]
    if logstash_version and LooseVersion("6.0.0") <= LooseVersion(
            logstash_version):
        input_tag.append(u"input_name:beats")
    else:
        input_tag.append(u"input_name:stdin")

    expected_metrics = dict(STATS_METRICS)
    expected_metrics.update(PIPELINE_METRICS)
    expected_metrics.update(PIPELINE_INPUTS_METRICS)
    expected_metrics.update(PIPELINE_OUTPUTS_METRICS)
    expected_metrics.update(PIPELINE_FILTERS_METRICS)

    good_sc_tags = ['host:{}'.format(HOST), 'port:{}'.format(PORT)]

    pipeline_metrics = dict(PIPELINE_METRICS, **PIPELINE_INPUTS_METRICS)
    pipeline_metrics.update(PIPELINE_FILTERS_METRICS)
    pipeline_metrics.update(PIPELINE_OUTPUTS_METRICS)

    for m_name, desc in expected_metrics.items():
        m_tags = TAGS + default_tags
        if m_name in PIPELINE_INPUTS_METRICS:
            m_tags = m_tags + input_tag
        if m_name in PIPELINE_OUTPUTS_METRICS:
            m_tags = m_tags + output_tag
        if m_name in PIPELINE_FILTERS_METRICS:
            m_tags = m_tags + filter_tag

        is_pipeline_metric = m_name in pipeline_metrics
        if desc[0] == "gauge":
            if is_multi_pipeline and is_pipeline_metric:
                aggregator.assert_metric(m_name,
                                         count=1,
                                         tags=m_tags + [u'pipeline_name:main'])
                aggregator.assert_metric(m_name,
                                         count=1,
                                         tags=m_tags +
                                         [u'pipeline_name:second_pipeline'])
            else:
                aggregator.assert_metric(m_name, count=1, tags=m_tags)

    aggregator.assert_service_check('logstash.can_connect',
                                    tags=good_sc_tags + TAGS,
                                    status=LogstashCheck.OK)
Exemplo n.º 4
0
def test_check(aggregator):
    port = 9600
    bad_port = 9405
    url = URL
    bad_url = 'http://{}:{}'.format(HOST, bad_port)

    tags = [u"foo:bar", u"baz"]

    bad_instance = {'url': bad_url}
    good_instance = {'url': url, 'tags': tags}

    check = LogstashCheck(CHECK_NAME, {}, {})

    with pytest.raises(requests.exceptions.ConnectionError):
        check.check(bad_instance)

    check.check(good_instance)
    default_tags = ["url:{}".format(URL)]

    instance_config = check.get_instance_config(good_instance)

    logstash_version = check._get_logstash_version(instance_config)
    is_multi_pipeline = logstash_version and LooseVersion("6.0.0") <= LooseVersion(logstash_version)

    input_tag = [u"plugin_conf_id:dummy_input"]
    output_tag = [u"plugin_conf_id:dummy_output", u"output_name:stdout"]
    filter_tag = [u"plugin_conf_id:dummy_filter", u"filter_name:json"]
    if logstash_version and LooseVersion("6.0.0") <= LooseVersion(logstash_version):
        input_tag.append(u"input_name:beats")
    else:
        input_tag.append(u"input_name:stdin")

    expected_metrics = dict(STATS_METRICS)
    expected_metrics.update(PIPELINE_METRICS)
    expected_metrics.update(PIPELINE_INPUTS_METRICS)
    expected_metrics.update(PIPELINE_OUTPUTS_METRICS)
    expected_metrics.update(PIPELINE_FILTERS_METRICS)

    good_sc_tags = ['host:{}'.format(HOST), 'port:{}'.format(port)]
    bad_sc_tags = ['host:{}'.format(HOST), 'port:{}'.format(bad_port)]

    pipeline_metrics = dict(PIPELINE_METRICS, **PIPELINE_INPUTS_METRICS)
    pipeline_metrics.update(PIPELINE_FILTERS_METRICS)
    pipeline_metrics.update(PIPELINE_OUTPUTS_METRICS)

    for m_name, desc in expected_metrics.items():
        m_tags = tags + default_tags
        if m_name in PIPELINE_INPUTS_METRICS:
            m_tags = m_tags + input_tag
        if m_name in PIPELINE_OUTPUTS_METRICS:
            m_tags = m_tags + output_tag
        if m_name in PIPELINE_FILTERS_METRICS:
            m_tags = m_tags + filter_tag

        is_pipeline_metric = m_name in pipeline_metrics
        if desc[0] == "gauge":
            if is_multi_pipeline and is_pipeline_metric:
                aggregator.assert_metric(m_name, count=1, tags=m_tags + [u'pipeline_name:main'])
                aggregator.assert_metric(m_name, count=1, tags=m_tags + [u'pipeline_name:second_pipeline'])
            else:
                aggregator.assert_metric(m_name, count=1, tags=m_tags)

    aggregator.assert_service_check('logstash.can_connect', tags=good_sc_tags + tags, status=LogstashCheck.OK)
    aggregator.assert_service_check('logstash.can_connect', tags=bad_sc_tags, status=LogstashCheck.CRITICAL)