Exemplo n.º 1
0
def test_extra_metrics_passthrough():
    """
    The specified extraMetrics should be allowed through even though they are
    not included by default.
    """
    metadata = Metadata.from_package("expvar")

    with run_expvar() as expvar_container_ip:
        with Agent.run(f"""
               monitors:
                 - type: expvar
                   host: {expvar_container_ip}
                   port: 8080
                   intervalSeconds: 1
                   extraMetrics:
                    - memstats.by_size.mallocs
               """) as agent:
            assert "memstats.by_size.mallocs" in metadata.nonincluded_metrics
            verify(agent,
                   metadata.included_metrics | {"memstats.by_size.mallocs"})
Exemplo n.º 2
0
def test_built_in_filtering_disabled_no_whitelist_for_monitor():
    """
    Test a monitor that doesn't have any entries in whitelist.json
    """
    metadata = Metadata.from_package("expvar")

    with run_expvar() as expvar_container_ip:
        with Agent.run(f"""
               enableBuiltInFiltering: false
               monitors:
                 - type: expvar
                   host: {expvar_container_ip}
                   port: 8080
                   intervalSeconds: 1
                   enhancedMetrics: true
                   # This should be ignored
                   extraMetrics:
                    - memstats.by_size.mallocs
               metricsToExclude:
                - {{"#from": "{REPO_ROOT_DIR}/whitelist.json", flatten: true}}
               """) as agent:
            verify(agent, metadata.all_metrics)
Exemplo n.º 3
0
def test_built_in_filtering_disabled_whitelisted_monitor():
    """
    Test a monitor that is in whitelist.json.
    """
    metadata = Metadata.from_package("collectd/redis")

    with run_redis() as [ip_addr, redis_client]:
        redis_client.lpush("queue-1", *["a", "b", "c"])
        redis_client.lpush("queue-2", *["x", "y"])

        with Agent.run(f"""
               enableBuiltInFiltering: false
               monitors:
                 - type: collectd/redis
                   host: {ip_addr}
                   port: 6379
                   intervalSeconds: 1
                   sendListLengths:
                    - databaseIndex: 0
                      keyPattern: queue-*
               metricsToExclude:
                - {{"#from": "{REPO_ROOT_DIR}/whitelist.json", flatten: true}}
               """) as agent:
            key_llen_metric = "gauge.key_llen"
            assert key_llen_metric not in metadata.included_metrics
            verify(agent,
                   metadata.included_metrics - {"gauge.slave_repl_offset"})

            # Add a non-default metric to the whitelist via metricsToInclude
            # and make sure it comes through
            agent.config["metricsToInclude"] = [{
                "monitorType": "collectd/redis",
                "metricName": key_llen_metric
            }]
            agent.write_config()
            verify(
                agent, metadata.included_metrics - {"gauge.slave_repl_offset"}
                | {key_llen_metric})
Exemplo n.º 4
0
"""
Tests for the cadvisor monitor
"""
from functools import partial as p

import pytest

from tests.helpers.assertions import tcp_socket_open
from tests.helpers.metadata import Metadata
from tests.helpers.util import container_ip, run_container, wait_for
from tests.helpers.verify import run_agent_verify

pytestmark = [pytest.mark.cadvisor, pytest.mark.monitor_without_endpoints]

METADATA = Metadata.from_package("cadvisor", mon_type="cadvisor")


def run(config, metrics):
    cadvisor_opts = dict(
        volumes={
            "/": {
                "bind": "/rootfs",
                "mode": "ro"
            },
            "/var/run": {
                "bind": "/var/run",
                "mode": "ro"
            },
            "/sys": {
                "bind": "/sys",
                "mode": "ro"
Exemplo n.º 5
0
"""
Tests for the expvar monitor
"""

from contextlib import contextmanager

import pytest
from tests.helpers.assertions import tcp_socket_open
from tests.helpers.metadata import Metadata
from tests.helpers.util import container_ip, run_service, wait_for
from tests.helpers.verify import run_agent_verify_all_metrics, run_agent_verify, run_agent_verify_default_metrics

pytestmark = [pytest.mark.expvar, pytest.mark.monitor_with_endpoints]

METADATA = Metadata.from_package("expvar")


@contextmanager
def run_expvar():
    """expvar container fixture"""
    with run_service("expvar") as container:
        host = container_ip(container)
        assert wait_for(lambda: tcp_socket_open(host, 8080),
                        60), "service didn't start"
        yield host


def test_expvar_default():
    with run_expvar() as expvar_container_ip:
        run_agent_verify_default_metrics(
            f"""
Exemplo n.º 6
0

@contextmanager
def run_redis(image="redis:4-alpine"):
    with run_container(image) as redis_container:
        host = container_ip(redis_container)
        assert wait_for(p(tcp_socket_open, host, 6379),
                        60), "service not listening on port"

        redis_client = redis.StrictRedis(host=host, port=6379, db=0)
        assert wait_for(redis_client.ping, 60), "service didn't start"

        yield [host, redis_client]


METADATA = Metadata.from_package("collectd/redis")


@pytest.mark.parametrize("image", ["redis:3-alpine", "redis:4-alpine"])
def test_redis(image):
    with run_redis(image) as [hostname, _]:
        config = MONITOR_CONFIG.substitute(host=hostname)
        with Agent.run(config) as agent:
            verify(agent,
                   METADATA.default_metrics - {"gauge.slave_repl_offset"})


def test_redis_key_lengths():
    with run_redis() as [hostname, redis_client]:
        redis_client.lpush("queue-1", *["a", "b", "c"])
        redis_client.lpush("queue-2", *["x", "y"])
Exemplo n.º 7
0
from functools import partial as p

import pytest
from tests.helpers.agent import Agent
from tests.helpers.assertions import any_metric_found, datapoints_have_some_or_all_dims, has_log_message
from tests.helpers.metadata import Metadata
from tests.helpers.util import container_ip, ensure_always, run_service
from tests.helpers.verify import verify

pytestmark = [pytest.mark.haproxy, pytest.mark.monitor_with_endpoints]

METADATA = Metadata.from_package("haproxy")

EXPECTED_DEFAULTS = METADATA.default_metrics

EXPECTED_DEFAULTS_FROM_SOCKET = {
    "haproxy_connection_rate_all",
    "haproxy_idle_percent",
    "haproxy_requests",
    "haproxy_session_rate_all",
}


@pytest.mark.parametrize("version", ["1.8"])
def test_haproxy_default_metrics_from_stats_page(version):
    with run_service("haproxy", buildargs={"HAPROXY_VERSION":
                                           version}) as service_container:
        host = container_ip(service_container)
        with Agent.run(f"""
           monitors:
           - type: haproxy
Exemplo n.º 8
0
import pytest

from tests.helpers.agent import Agent
from tests.helpers.assertions import any_metric_has_any_dim_key, has_datapoint_with_dim, has_log_message, http_status
from tests.helpers.metadata import Metadata
from tests.helpers.util import container_ip, run_service, wait_for
from tests.helpers.verify import run_agent_verify

pytestmark = [
    pytest.mark.collectd,
    pytest.mark.elasticsearch,
    pytest.mark.monitor_with_endpoints,
    pytest.mark.flaky(reruns=2),
]

METADATA = Metadata.from_package("elasticsearch")
ENV = {"cluster.name": "testCluster"}
AGENT_CONFIG_TEMPLATE = """
    monitors:
    - type: elasticsearch
      host: {host}
      port: 9200
      username: elastic
      password: testing123
      {flag}
    """


def check_service_status(host):
    assert wait_for(p(http_status, url=f"http://{host}:9200/_nodes/_local", status=[200]), 180), "service didn't start"
          - type: docker-container-stats

        """
        ) as agent:
            assert wait_for(
                p(has_datapoint_with_dim, agent.fake_services, "container_id", nginx_container.id)
            ), "Didn't get nginx datapoints"
            nginx_container.remove(force=True)
            time.sleep(5)
            agent.fake_services.reset_datapoints()
            assert ensure_always(
                lambda: not has_datapoint_with_dim(agent.fake_services, "container_id", nginx_container.id)
            )


METADATA = Metadata.from_package("docker")


def test_docker_default():
    with run_service(
        "elasticsearch/6.6.1"
    ):  # just get a container that does some block io running so we have some stats
        metrics = METADATA.default_metrics
        run_agent_verify(
            f"""
            monitors:
            - type: docker-container-stats
            """,
            metrics,
        )
Exemplo n.º 10
0
import pytest
from tests.helpers.agent import Agent
from tests.helpers.assertions import has_datapoint_with_dim, has_datapoint_with_metric_name
from tests.helpers.metadata import Metadata
from tests.helpers.util import ensure_always, run_service, wait_for
from tests.helpers.verify import (
    run_agent_verify_included_metrics,
    verify_expected_is_subset,
    run_agent_verify_all_metrics,
)

pytestmark = [pytest.mark.docker_container_stats, pytest.mark.monitor_without_endpoints]


METADATA = Metadata.from_package("collectd/docker")


def test_docker_included():
    with run_service(
        "elasticsearch/6.6.1"
    ):  # just get a container that does some block io running so we have some stats
        run_agent_verify_included_metrics(
            f"""
            monitors:
            - type: collectd/docker
              dockerURL: unix:///var/run/docker.sock
            """,
            METADATA,
        )
Exemplo n.º 11
0
import sys

import pytest

from tests.helpers.agent import Agent
from tests.helpers.metadata import Metadata
from tests.helpers.verify import verify

pytestmark = [
    pytest.mark.windows, pytest.mark.memory,
    pytest.mark.monitor_without_endpoints
]

METADATA = Metadata.from_package("memory")


def test_memory():
    expected_metrics = {"memory.used", "memory.utilization"}
    if sys.platform == "linux":
        expected_metrics.update({
            "memory.buffered", "memory.cached", "memory.free",
            "memory.slab_recl", "memory.slab_unrecl"
        })
    with Agent.run("""
        monitors:
          - type: memory
        """) as agent:
        for met in expected_metrics:
            assert met in METADATA.default_metrics

        verify(agent, expected_metrics)
Exemplo n.º 12
0
"""
from contextlib import contextmanager
from functools import partial as p

import pytest

from tests.helpers.agent import Agent
from tests.helpers.assertions import has_datapoint_with_dim, tcp_socket_open
from tests.helpers.metadata import Metadata
from tests.helpers.util import container_ip, run_service, wait_for
from tests.helpers.verify import verify

pytestmark = [pytest.mark.collectd, pytest.mark.php, pytest.mark.monitor_with_endpoints]


METADATA = Metadata.from_package("collectd/php")
INSTANCE = "test"


@contextmanager
def run_php_fpm():
    with run_service("php") as php_container:
        host = container_ip(php_container)
        assert wait_for(p(tcp_socket_open, host, 80), 60), "service didn't start"
        yield host


def test_php_default():
    with run_php_fpm() as host, Agent.run(
        f"""
        monitors:
import sys

import pytest

from tests.helpers.assertions import has_log_message
from tests.helpers.metadata import Metadata
from tests.helpers.verify import run_agent_verify_default_metrics, run_agent_verify_all_metrics

pytestmark = [
    pytest.mark.skipif(sys.platform != "win32", reason="only runs on windows"),
    pytest.mark.windows_only,
    pytest.mark.windowslegacy,
]

METADATA = Metadata.from_package("windowslegacy")


def test_windowslegacy_default():
    agent = run_agent_verify_default_metrics(
        """
        monitors:
        - type: windows-legacy
        """,
        METADATA,
    )
    assert not has_log_message(agent.output.lower(),
                               "error"), "error found in agent output!"


def test_windowslegacy_all():
    agent = run_agent_verify_all_metrics(
Exemplo n.º 14
0
from contextlib import contextmanager
from functools import partial as p

import pytest

from tests.helpers.agent import Agent
from tests.helpers.assertions import has_datapoint_with_dim, tcp_socket_open
from tests.helpers.metadata import Metadata
from tests.helpers.util import container_ip, run_service, wait_for
from tests.helpers.verify import verify

pytestmark = [
    pytest.mark.collectd, pytest.mark.nginx, pytest.mark.monitor_with_endpoints
]

METADATA = Metadata.from_package("collectd/nginx")


@contextmanager
def run_nginx():
    with run_service("nginx") as nginx_container:
        host = container_ip(nginx_container)
        assert wait_for(p(tcp_socket_open, host, 80),
                        60), "service didn't start"
        yield host


def test_nginx_included():
    with run_nginx() as host, Agent.run(f"""
        monitors:
        - type: collectd/nginx
Exemplo n.º 15
0
import pytest
from tests.helpers.kubernetes import LATEST
from tests.helpers.metadata import Metadata
from tests.helpers.verify import verify

pytestmark = [pytest.mark.kubernetes]
METADATA = Metadata.from_package("kubernetes/scheduler")


@pytest.mark.kubernetes
@LATEST
def test_kubernetes_scheduler(k8s_cluster):
    config = """
        observers:
        - type: k8s-api

        monitors:
        - type: kubernetes-scheduler
          discoveryRule: kubernetes_pod_name =~ "kube-scheduler"
          port: 10251
          extraMetrics: ["*"]
     """
    with k8s_cluster.run_agent(config) as agent:
        verify(agent, METADATA.all_metrics)
Exemplo n.º 16
0
from tests.helpers.agent import Agent
from tests.helpers.assertions import (
    has_datapoint_with_dim,
    has_datapoint_with_metric_name,
    has_log_message,
    http_status,
)
from tests.helpers.metadata import Metadata
from tests.helpers.util import run_service, container_ip, wait_for

pytestmark = [
    pytest.mark.collectd, pytest.mark.elasticsearch,
    pytest.mark.monitor_with_endpoints
]

METADATA = Metadata.from_package("collectd/elasticsearch")


@pytest.mark.flaky(reruns=2)
def test_elasticsearch_without_cluster_option():
    with run_service("elasticsearch/6.4.2",
                     environment={"cluster.name":
                                  "testCluster"}) as es_container:
        host = container_ip(es_container)
        assert wait_for(
            p(http_status,
              url=f"http://{host}:9200/_nodes/_local",
              status=[200]), 180), "service didn't start"
        config = dedent(f"""
            monitors:
            - type: collectd/elasticsearch
Exemplo n.º 17
0
"""

ETCD_TLS_CONFIG = """
monitors:
  - type: etcd
    host: {host}
    port: {port}
    useHTTPS: true
    clientCertPath: {testServices}/etcd/certs/client.crt
    clientKeyPath: {testServices}/etcd/certs/client.key
    caCertPath: {testServices}/etcd/certs/server.crt
    skipVerify: {skipValidation}
    sendAllMetrics: true
"""

METADATA = Metadata.from_package("etcd")


@contextmanager
def run_etcd(tls=False, **kwargs):
    if tls:
        cmd = """
            --listen-client-urls https://0.0.0.0:2379
            --advertise-client-urls https://0.0.0.0:2379
            --trusted-ca-file /opt/testing/certs/server.crt
            --cert-file /opt/testing/certs/server.crt
            --key-file /opt/testing/certs/server.key
            --client-cert-auth
        """
        with run_service("etcd", command=cmd, **kwargs) as container:
            host = container_ip(container)
Exemplo n.º 18
0
from functools import partial as p

import pytest
import requests
from tests.helpers.agent import Agent
from tests.helpers.assertions import has_datapoint, tcp_socket_open
from tests.helpers.metadata import Metadata
from tests.helpers.util import container_ip, run_service, wait_for, wait_for_assertion
from tests.helpers.verify import verify

pytestmark = [pytest.mark.collectd, pytest.mark.haproxy, pytest.mark.monitor_with_endpoints]

METADATA = Metadata.from_package("collectd/haproxy")

EXPECTED_DEFAULTS = METADATA.included_metrics


@pytest.mark.parametrize("version", ["1.9", "latest"])
def test_haproxy_basic(version):
    with run_service("haproxy", buildargs={"HAPROXY_VERSION": version}) as service_container:
        host = container_ip(service_container)
        assert wait_for(p(tcp_socket_open, host, 9000)), "haproxy not listening on port"

        with Agent.run(
            f"""
           monitors:
           - type: collectd/haproxy
             host: {host}
             port: 9000
             enhancedMetrics: false
           """
Exemplo n.º 19
0
from tests.helpers.agent import Agent
from tests.helpers.assertions import has_datapoint, tcp_socket_open
from tests.helpers.metadata import Metadata
from tests.helpers.util import container_ip, run_container, wait_for
from tests.helpers.verify import verify

pytestmark = [
    pytest.mark.collectd, pytest.mark.postgresql,
    pytest.mark.monitor_with_endpoints
]

ENV = [
    "POSTGRES_USER=test_user", "POSTGRES_PASSWORD=test_pwd", "POSTGRES_DB=test"
]

METADATA = Metadata.from_package("collectd/postgresql")


def test_postgresql_defaults():
    with run_container("postgres:10", environment=ENV) as cont:
        host = container_ip(cont)
        assert wait_for(p(tcp_socket_open, host, 5432),
                        60), "service didn't start"

        with Agent.run(f"""
                monitors:
                  - type: collectd/postgresql
                    host: {host}
                    port: 5432
                    username: "******"
                    password: "******"
Exemplo n.º 20
0
import pytest

from tests.helpers.metadata import Metadata
from tests.helpers.util import run_container, container_ip
from tests.helpers.verify import run_agent_verify_included_metrics, run_agent_verify_all_metrics

VERSIONS = ["memcached:1.5-alpine", "memcached:latest"]
METADATA = Metadata.from_package("collectd/memcached")


@pytest.mark.parametrize("version", VERSIONS)
def test_memcached_included(version):
    with run_container(version) as container:
        host = container_ip(container)
        run_agent_verify_included_metrics(
            f"""
            monitors:
            - type: collectd/memcached
              host: {host}
              port: 11211
            """,
            METADATA,
        )


@pytest.mark.parametrize("version", VERSIONS)
def test_memcached_all(version):
    with run_container(version) as container:
        host = container_ip(container)
        run_agent_verify_all_metrics(
            f"""
Exemplo n.º 21
0
from functools import partial as p
from textwrap import dedent
import pytest
from signalfx.generated_protocol_buffers import signal_fx_protocol_buffers_pb2 as sf_pbuf
from tests.helpers.metadata import Metadata
from tests.helpers.agent import Agent
from tests.helpers.assertions import has_datapoint, has_datapoint_with_dim
from tests.helpers.verify import run_agent_verify_default_metrics
from tests.helpers.util import wait_for

pytestmark = [pytest.mark.telegraf]

METADATA = Metadata.from_package("telegraf/monitors/dns")
SERVER = "1.1.1.1"
DOMAIN = "signalfx.com"


def test_telegraf_dns_metrics():
    # Config to get every possible metrics
    agent_config = dedent(f"""
        monitors:
        - type: telegraf/dns
          servers:
            - {SERVER}
        """)
    run_agent_verify_default_metrics(agent_config, METADATA)


def test_telegraf_resolve():
    with Agent.run(f"""
        monitors:
Exemplo n.º 22
0
from contextlib import contextmanager
from functools import partial as p

from tests.helpers.agent import Agent
from tests.helpers.assertions import tcp_socket_open
from tests.helpers.metadata import Metadata
from tests.helpers.util import container_ip, run_container, wait_for
from tests.helpers.verify import run_agent_verify_default_metrics, verify_expected_is_subset

METADATA = Metadata.from_package("collectd/zookeeper")

ENV = ["ZOO_4LW_COMMANDS_WHITELIST=mntr,ruok", "ZOO_STANDALONE_ENABLED=false"]


@contextmanager
def run_zookeeper(version="zookeeper:3.4", env=None):
    with run_container(version, environment=env) as zk_cont:
        host = container_ip(zk_cont)
        assert wait_for(p(tcp_socket_open, host, 2181), 30)
        yield host


def test_zookeeeper():
    with run_zookeeper() as host:
        run_agent_verify_default_metrics(
            f"""
            monitors:
            - type: collectd/zookeeper
              host: {host}
              port: 2181
            """,
Exemplo n.º 23
0
import sys

import pytest

from tests.helpers.assertions import http_status
from tests.helpers.metadata import Metadata
from tests.helpers.verify import run_agent_verify_default_metrics, run_agent_verify_all_metrics

pytestmark = [
    pytest.mark.skipif(sys.platform != "win32", reason="only runs on windows"),
    pytest.mark.windows_only,
    pytest.mark.windowsiis,
]

METADATA = Metadata.from_package("windowsiis")


def test_windowsiis_default():
    run_agent_verify_default_metrics(
        """
        monitors:
        - type: windows-iis
        """,
        METADATA,
    )


def test_windowsiis_all():
    # Required to make sure a worker (w3wp process) has started for process metrics.
    assert http_status("http://localhost",
                       [200]), "IIS should be running on localhost"
Exemplo n.º 24
0
from contextlib import contextmanager
from functools import partial as p

import pytest

from tests.helpers.agent import Agent
from tests.helpers.assertions import tcp_socket_open, has_datapoint_with_dim
from tests.helpers.metadata import Metadata
from tests.helpers.util import container_ip, run_container, wait_for
from tests.helpers.verify import verify
from tests.monitors.collectd_hadoop.hadoop_test import start_hadoop

pytestmark = [pytest.mark.collectd, pytest.mark.hadoopjmx, pytest.mark.monitor_with_endpoints]

METADATA = Metadata.from_package("collectd/hadoopjmx")
LATEST_VERSION = "3.0.3"
VERSIONS = ["2.9.1", LATEST_VERSION]
NODETYPE_PORT = {"nameNode": 5677, "dataNode": 5677, "resourceManager": 5680, "nodeManager": 8002}
NODETYPE_GROUP = {
    "nameNode": "name-node",
    "dataNode": "data-node",
    "resourceManager": "resource-manager",
    "nodeManager": "node-manager",
}
YARN_VAR = {"resourceManager": "YARN_RESOURCEMANAGER_OPTS", "nodeManager": "YARN_NODEMANAGER_OPTS"}
YARN_OPTS = (
    '%s="-Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false '
    '-Dcom.sun.management.jmxremote.port=%d $%s"'
)
YARN_ENV_PATH = "/usr/local/hadoop/etc/hadoop/yarn-env.sh"
HADOOPJMX_CONFIG = """
Exemplo n.º 25
0
"""
Tests for the cpu monitor
"""

import pytest

from tests.helpers.metadata import Metadata
from tests.helpers.verify import run_agent_verify_default_metrics, run_agent_verify_all_metrics

pytestmark = [
    pytest.mark.windows, pytest.mark.cpu, pytest.mark.monitor_without_endpoints
]

METADATA = Metadata.from_package("cpu")


def test_cpu_default():
    run_agent_verify_default_metrics(
        """
        monitors:
        - type: cpu
        """,
        METADATA,
    )


def test_cpu_all():
    run_agent_verify_all_metrics(
        """
        monitors:
        - type: cpu
Exemplo n.º 26
0
"""
Tests for the collectd/protocols monitor
"""

import pytest

from tests.helpers.assertions import has_log_message
from tests.helpers.metadata import Metadata
from tests.helpers.verify import run_agent_verify_default_metrics, run_agent_verify_all_metrics

pytestmark = [pytest.mark.collectd, pytest.mark.protocols, pytest.mark.monitor_without_endpoints]

METADATA = Metadata.from_package("collectd/protocols")


def test_protocols_default():
    """
    Test that we get all default metrics
    """
    agent = run_agent_verify_default_metrics(
        """
        monitors:
        - type: collectd/protocols
        """,
        METADATA,
    )
    assert not has_log_message(agent.output.lower(), "error"), "error found in agent output!"


def test_protocols_all():
    """
Exemplo n.º 27
0
    pytest.mark.collectd, pytest.mark.jenkins,
    pytest.mark.monitor_with_endpoints
]

METRICS_KEY = "33DD8B2F1FD645B814993275703F_EE1FD4D4E204446D5F3200E0F6-C55AC14E"

JENKINS_VERSIONS = [
    # technically we support 1.580.3, but the scripts needed to programmatically
    # setup jenkins do not work prior to 1.651.3
    ("jenkins", "1.651.3-alpine"),
    # TODO: jenkins doesn't have a latest tag so we'll need to update this
    # periodically
    ("jenkins/jenkins", "2.196-alpine"),
]

METADATA = Metadata.from_package("collectd/jenkins")
ENHANCED_METRICS = {
    ("jenkins", "1.651.3-alpine"): METADATA.all_metrics - {
        "gauge.jenkins.job.duration",
        "gauge.jenkins.node.executor.count.value",
        "gauge.jenkins.node.executor.in-use.value",
        "gauge.jenkins.node.health-check.score",
        "gauge.jenkins.node.queue.size.value",
        "gauge.jenkins.node.slave.online.status",
        "gauge.jenkins.node.vm.memory.heap.usage",
        "gauge.jenkins.node.vm.memory.non-heap.used",
        "gauge.jenkins.node.vm.memory.total.used",
    },
    ("jenkins/jenkins", "2.196-alpine"): METADATA.all_metrics -
    {"gauge.jenkins.job.duration", "gauge.jenkins.node.slave.online.status"},
}
Exemplo n.º 28
0
import pytest

from tests.helpers.agent import Agent
from tests.helpers.metadata import Metadata
from tests.helpers.verify import run_agent_verify_default_metrics, verify_expected_is_subset

pytestmark = [pytest.mark.collectd, pytest.mark.disk, pytest.mark.monitor_without_endpoints]

METADATA = Metadata.from_package("collectd/disk")


def test_disk_default():
    run_agent_verify_default_metrics(
        """
        monitors:
        - type: collectd/disk
        """,
        METADATA,
    )


def test_disk_all():
    with Agent.run(
        """
        monitors:
        - type: collectd/disk
          extraMetrics: ["*"]
        """
    ) as agent:
        # pending_operations only shows up sometimes on CI. Maybe only reported when non-zero?
        verify_expected_is_subset(agent, METADATA.all_metrics - {"pending_operations"})
Exemplo n.º 29
0
from textwrap import dedent

import pytest
from tests.helpers.agent import Agent
from tests.helpers.assertions import has_datapoint_with_dim
from tests.helpers.metadata import Metadata
from tests.helpers.util import wait_for
from tests.helpers.verify import run_agent_verify_default_metrics

pytestmark = [
    pytest.mark.windows, pytest.mark.filesystems,
    pytest.mark.monitor_without_endpoints
]

MONITOR = "ntp"
METADATA = Metadata.from_package(MONITOR)
HOST = "pool.ntp.org"


@pytest.mark.flaky(reruns=2, reruns_delay=30)
def test_default_metrics():
    # Config to get every possible metrics
    agent_config = dedent(f"""
        monitors:
        - type: ntp
          host: {HOST}
        """)
    # every metrics should be reported for https site
    run_agent_verify_default_metrics(agent_config, METADATA)

Exemplo n.º 30
0
import pytest
from tests.helpers.agent import Agent
from tests.helpers.assertions import has_datapoint
from tests.helpers.metadata import Metadata
from tests.helpers.util import run_service
from tests.helpers.verify import verify

pytestmark = [pytest.mark.monitor_without_endpoints]


METADATA = Metadata.from_package("cgroups")


def test_cgroup_monitor():
    with run_service(
        "nginx", cpu_period=100_000, cpu_quota=10000, cpu_shares=50, mem_limit=20 * 1024 * 1024
    ) as nginx_container:
        with Agent.run(
            """
    monitors:
      - type: cgroups
        extraMetrics: ['*']
    """
        ) as agent:
            verify(agent, METADATA.all_metrics)

            expected_cgroup = "/docker/" + nginx_container.id

            assert has_datapoint(
                agent.fake_services, metric_name="cgroup.cpu_shares", value=50, dimensions={"cgroup": expected_cgroup}
            )