Exemplo n.º 1
0
        ),
    ]),
])
def test_discover_smart_stats(section, discovered):
    assert list(smart.discover_smart_stats(section)) == discovered


@pytest.mark.parametrize("item, params, section, result", [
    ("/dev/sda", {
        'Current_Pending_Sector': 0,
        'End-to-End_Error': 0,
        'Reallocated_Sector_Ct': 0,
        'Reported_Uncorrect': 0,
        'UDMA_CRC_Error_Count': 0,
    }, SECTION_SD, [
        Result(state=State.OK, summary='Powered on: 67 days 1 hour'),
        Metric('Power_On_Hours', 1609),
        Result(state=State.OK, summary='Power cycles: 9'),
        Metric('Power_Cycle_Count', 9),
        Result(state=State.OK, summary='Uncorrectable errors: 0'),
        Metric('Reported_Uncorrect', 0),
        Result(state=State.OK, summary='Reallocated sectors: 0'),
        Metric('Reallocated_Sector_Ct', 0),
        Result(state=State.OK, summary='Pending sectors: 0'),
        Metric('Current_Pending_Sector', 0),
        Result(state=State.OK, summary='End-to-End errors: 0'),
        Metric('End-to-End_Error', 0),
        Result(state=State.OK, summary='UDMA CRC errors: 0'),
        Metric('UDMA_CRC_Error_Count', 0),
    ]),
    ("/dev/nvme0n1", {'Critical_Warning': 0}, SECTION_NVME, [
Exemplo n.º 2
0
def test_subset_patterns():

    section_ps = ps_section.parse_ps(
        splitter("""(user,0,0,0.5) main
(user,0,0,0.4) main_dev
(user,0,0,0.1) main_dev
(user,0,0,0.5) main_test"""))

    # Boundary in match is necessary otherwise main instance accumulates all
    inv_params: List[Dict] = [{
        'default_params': {
            'cpu_rescale_max': True,
            'levels': (1, 1, 99999, 99999)
        },
        'match': '~(main.*)\\b',
        'descr': '%s',
    }, {}]

    discovered = [
        Service(
            item='main',
            parameters={
                'cpu_rescale_max': True,
                'levels': (1, 1, 99999, 99999),
                'process': '~(main.*)\\b',
                'match_groups': ('main',),
                'user': None,
                'cgroup': (None, False),
            },
        ),
        Service(
            item='main_dev',
            parameters={
                'cpu_rescale_max': True,
                'levels': (1, 1, 99999, 99999),
                'process': '~(main.*)\\b',
                'match_groups': ('main_dev',),
                'user': None,
                'cgroup': (None, False),
            },
        ),
        Service(
            item='main_test',
            parameters={
                'cpu_rescale_max': True,
                'levels': (1, 1, 99999, 99999),
                'process': '~(main.*)\\b',
                'match_groups': ('main_test',),
                'user': None,
                'cgroup': (None, False),
            },
        ),
    ]

    test_discovered = ps_utils.discover_ps(inv_params, section_ps, None, None)  # type: ignore[arg-type]
    assert {s.item: s for s in test_discovered} == {s.item: s for s in discovered}  # type: ignore[attr-defined]

    for service, count in zip(discovered, [1, 2, 1]):
        assert isinstance(service.item, str)
        with value_store.context(CheckPluginName("ps"), "unit-test"):
            output = list(ps_utils.check_ps_common(
                label="Processes",
                item=service.item,
                params=service.parameters,  # type: ignore[arg-type]
                process_lines=[
                    (None, psi, cmd_line) for (psi, cmd_line) in section_ps[1]],
                cpu_cores=1,
                total_ram=None,
            ))
        assert output[0] == Result(state=state.OK, summary="Processes: %s" % count)
Exemplo n.º 3
0
    monkeypatch.setattr(sap_hana_diskusage, "get_value_store", lambda: value_store_patched)
    yield value_store_patched


@pytest.mark.usefixtures("load_all_agent_based_plugins")
@pytest.mark.parametrize("item, info, expected_result", [
    (
        "HXE 90 HXE - Log",
        [
            ["[[HXE 90 HXE]]"],
            ["Data", "OK", "Size 64.3 GB, Used 10.1 GB, Free 85 %"],
            ["Log", "OK", "Size 64.3 GB, Used 10.1 GB, Free 85 %"],
            ["Trace", "OK", "Size 64.3 GB, Used 10.1 GB, Free 85 %"],
        ],
        [
            Result(state=State.OK, summary="Status: OK"),
            Metric("fs_used",
                   10342.400000000001,
                   levels=(52674.56, 59258.88),
                   boundaries=(0.0, 65843.2)),
            Metric("fs_size", 65843.2, boundaries=(0.0, None)),
            Metric(
                "fs_used_percent", 15.707620528771386, levels=(80.0, 90.0),
                boundaries=(0.0, 100.0)),
            Result(state=State.OK, summary="15.71% used (10.1 of 64.3 GiB)"),
            Metric("growth", -4469.024458823538),
            Result(state=State.OK, summary="trend per 1 day 0 hours: +621 TiB"),
            Result(state=State.OK, summary="trend per 1 day 0 hours: +988323.73%"),
            Metric("trend", 650743967.1166623, boundaries=(0.0, 2743.4666666666662)),
            Result(state=State.OK, summary="Time left until disk full: 7 seconds"),
        ],
Exemplo n.º 4
0
}


@pytest.mark.parametrize("section,parsed_sections", [
    ([[['1', '0']]], (1, 0)),
])
def test_parse_juniper_trpz_aps(section, parsed_sections):  # type: ignore
    section = parse_juniper_trpz_aps(section)
    assert section == parsed_sections


@pytest.mark.parametrize("section,expected_results", [
    ((1, 0), [
        Metric('ap_devices_total', 1.0),
        Metric('total_sessions', 0.0),
        Result(state=state.OK, summary='Online access points: 1, Sessions: 0'),
        ]),
])
def test_check_juniper_trpz_aps(section, expected_results):  # type: ignore
    results = list(check_juniper_trpz_aps(section))
    for r in results:
        print(r)
    assert results == expected_results


@pytest.mark.parametrize("node_sections,expected_results", [
    ({
        "node1": (1, 2),
        "node2": (3, 4)
    }, [
        Result(state=state.OK, summary='Total: 4 access points, Sessions: 6'),
Exemplo n.º 5
0
            "phase_1_input": (0, 0),
            "phase_1_output": (0, 0),
            "phase_2_input": (0, 0),
            "phase_2_output": (0, 0),
        },
    )


@pytest.mark.parametrize(
    "item, params, expected_result",
    [
        pytest.param(
            "110.173.49.157",
            {},
            [
                Result(state=State.OK,
                       summary="Phase 1: in: 463 kBit/s, out: 485 kBit/s"),
                Result(state=State.OK,
                       summary="Phase 2: in: 34.2 GBit/s, out: 4.42 GBit/s"),
                Metric("if_in_octets", 4275729150.0),
                Metric("if_out_octets", 552130799.0),
            ],
            id="standard case",
        ),
        pytest.param(
            "211.167.210.107",
            {},
            [
                Result(state=State.OK,
                       summary="Phase 1: in: 12.5 kBit/s, out: 12.8 kBit/s"),
                Result(state=State.OK, summary="Phase 2 missing"),
                Metric("if_in_octets", 1564.0),
Exemplo n.º 6
0
            "item": "MEMPOOL_DMA",
            "params": {
                'trend_perfdata': True,
                'trend_range': 24,
                'trend_showtimeleft': True,
                'trend_timeleft': (12, 6)
            },
            "section": {
                'System memory': ['3848263744', '8765044672'],
                'MEMPOOL_MSGLYR': ['123040', '8265568'],
                'MEMPOOL_DMA': ['429262192', '378092176'],
                'MEMPOOL_GLOBAL_SHARED': ['1092814800', '95541296'],
            }
        },
        (
            Result(state=state.OK,
                   summary='Usage: 53.17% - 409 MiB of 770 MiB'),
            Metric(
                'mem_used_percent', 53.16899356888102, boundaries=(0.0, None)),
        ),
    ),
])
def test_check_cisco_mem(check_args, expected_result):
    assert list(_idem_check_cisco_mem(value_store={},
                                      **check_args)) == list(expected_result)


if __name__ == "__main__":
    # Please keep these lines - they make TDD easy and have no effect on normal test runs.
    # Just run this file from your IDE and dive into the code.
    import os
    from testlib.utils import cmk_path
        [
            Service(item=None, parameters={}, labels=[]),
        ],
    ),
])
def test_discovery_docker_container_status_uptime(section_uptime, expected_services):
    _test_discovery(
        lambda parsed: docker.discover_docker_container_status_uptime(parsed, section_uptime),
        PARSED,
        expected_services,
    )


@pytest.mark.parametrize("params, expected_results", [
    ({}, [
        Result(state=state.OK, summary='Up since Jun 05 2019 08:58:07'),
        Result(state=state.OK, summary='Uptime: 1 hour 1 minute'),
        Metric('uptime', 3713.0),
    ]),
    ({
        "min": (1000, 2000)
    }, [
        Result(state=state.OK, summary='Up since Jun 05 2019 08:58:07'),
        Result(state=state.OK, summary='Uptime: 1 hour 1 minute'),
        Metric('uptime', 3713.0),
    ]),
    ({
        "max": (1000, 2000)
    }, [
        Result(state=state.OK, summary='Up since Jun 05 2019 08:58:07'),
        Result(
Exemplo n.º 8
0

@pytest.mark.parametrize(
    "file_stat, reftime, params, expected_result",
    [
        (
            FileinfoItem(
                name="z:\\working\\client\\todo\\BP-15f86cb7-89d7-41a9-8aec-04b9e179f0b4.xml",
                missing=False,
                failed=False,
                size=539,
                time=None,
            ),
            123456,
            {"timeofday": [((8, 0), (9, 0))]},
            [Result(state=State.WARN, summary="File stat time failed")],
        ),
        (
            FileinfoItem(
                name="z:\\working\\client\\todo\\BP-15f86cb7-89d7-41a9-8aec-04b9e179f0b4.xml",
                missing=True,
                failed=False,
                size=539,
                time=1189173868,
            ),
            123456,
            {"timeofday": [((8, 0), (9, 0))]},
            [Result(state=State.OK, summary="File not found - Out of relevant time of day")],
        ),
        (
            FileinfoItem(
Exemplo n.º 9
0
        ),
        (
            # I am not sure how that happened, but we have seen empty files
            [[
                '==>',
                'empty_file.123running',
                '<==',
            ]],
            {}),
    ])
def test_parse(string_table, expected_parsed_data):
    assert job.parse_job(string_table) == expected_parsed_data


RESULTS_SHREK: List[Union[Metric, Result]] = [
    Result(state=state.OK, summary='Latest exit code: 0'),
    Result(state=state.OK, summary='Real time: 2 minutes 0 seconds'),
    Metric('real_time', 120.0, boundaries=(0.0, None)),
    Result(state=state.OK,
           notice='Latest job started at Jan 12 2019 14:53:21'),
    Metric('start_time', 1547301201.0),
    Result(state=state.OK, summary='Job age: 1 year 178 days'),
    Result(state=state.OK, notice='Avg. memory: 1000 B'),
    Metric('avg_mem_bytes', 1000.0, boundaries=(0.0, None)),
    Result(state=state.OK, notice='Invol. context switches: 12'),
    Metric('invol_context_switches', 12.0, boundaries=(0.0, None)),
    Result(state=state.OK, notice='Max. memory: 1.18 MiB'),
    Metric('max_res_bytes', 1234000.0, boundaries=(0.0, None)),
    Result(state=state.OK, notice='Filesystem reads: 0'),
    Metric('reads', 0.0, boundaries=(0.0, None)),
    Result(state=state.OK, notice='System time: 0 seconds'),
Exemplo n.º 10
0
@pytest.mark.parametrize(
    "item, params, section, result",
    [
        (
            "/dev/sda",
            {
                "Current_Pending_Sector": 0,
                "End-to-End_Error": 0,
                "Reallocated_Sector_Ct": 0,
                "Reported_Uncorrect": 0,
                "UDMA_CRC_Error_Count": 0,
            },
            SECTION_SD,
            [
                Result(state=State.OK, summary="Powered on: 67 days 1 hour"),
                Metric("Power_On_Hours", 1609),
                Result(state=State.OK, summary="Power cycles: 9"),
                Metric("Power_Cycle_Count", 9),
                Result(state=State.OK, summary="Uncorrectable errors: 0"),
                Metric("Reported_Uncorrect", 0),
                Result(state=State.OK, summary="Reallocated sectors: 0"),
                Metric("Reallocated_Sector_Ct", 0),
                Result(state=State.OK, summary="Pending sectors: 0"),
                Metric("Current_Pending_Sector", 0),
                Result(state=State.OK, summary="Command timeout counter: 0"),
                Metric("Command_Timeout", 0.0),
                Result(state=State.OK, summary="End-to-End errors: 0"),
                Metric("End-to-End_Error", 0),
                Result(state=State.OK, summary="UDMA CRC errors: 0"),
                Metric("UDMA_CRC_Error_Count", 0),
Exemplo n.º 11
0
            # I am not sure how that happened, but we have seen empty files
            [[
                "==>",
                "empty_file.123running",
                "<==",
            ]],
            {},
        ),
    ],
)
def test_parse(string_table, expected_parsed_data):
    assert job.parse_job(string_table) == expected_parsed_data


RESULTS_SHREK: List[Union[Metric, Result]] = [
    Result(state=State.OK, summary="Latest exit code: 0"),
    Result(state=State.OK, summary="Real time: 2 minutes 0 seconds"),
    Metric("real_time", 120.0, boundaries=(0.0, None)),
    Result(state=State.OK,
           notice="Latest job started at Jan 12 2019 14:53:21"),
    Metric("start_time", 1547301201.0),
    Result(state=State.OK, summary="Job age: 1 year 178 days"),
    Result(state=State.OK, notice="Avg. memory: 1000 B"),
    Metric("avg_mem_bytes", 1000.0, boundaries=(0.0, None)),
    Result(state=State.OK, notice="Invol. context switches: 12"),
    Metric("invol_context_switches", 12.0, boundaries=(0.0, None)),
    Result(state=State.OK, notice="Max. memory: 1.18 MiB"),
    Metric("max_res_bytes", 1234000.0, boundaries=(0.0, None)),
    Result(state=State.OK, notice="Filesystem reads: 0"),
    Metric("reads", 0.0, boundaries=(0.0, None)),
    Result(state=State.OK, notice="System time: 0 seconds"),
Exemplo n.º 12
0
                "state": "running",
            },
            services.WINDOWS_SERVICES_DISCOVERY_DEFAULT_PARAMETERS,
        ],
        [],
    ),
])
def test_discovery_windows_services(params, discovered_services):
    assert discovered_services == list(
        services.discovery_windows_services(params, PARSED))


@pytest.mark.parametrize("item, params, yielded_results", [
    ("WSearch", services.WINDOWS_SERVICES_CHECK_DEFAULT_PARAMETERS, [
        Result(state=state.CRIT,
               summary='Windows Search: stopped (start type is demand)',
               details='Windows Search: stopped (start type is demand)')
    ]),
    ("WSearch", {
        "else": 1
    }, [
        Result(state=state.WARN,
               summary='Windows Search: stopped (start type is demand)',
               details='Windows Search: stopped (start type is demand)')
    ]),
    ("WSearch", {
        "states": [("stopped", None, 0)]
    }, [
        Result(state=state.OK,
               summary='Windows Search: stopped (start type is demand)',
               details='Windows Search: stopped (start type is demand)')
Exemplo n.º 13
0
    """
    section = {"MemTotal": 0}
    assert list(discover_mem_used(section)) == [Service()]
    result, = check_mem_used({}, section)
    assert isinstance(result, Result)
    assert result.state == State.UNKNOWN
    assert result.summary.startswith("Reported total memory is 0 B")


@pytest.mark.parametrize(
    "label,used,total,levels,kwargs,expected",
    [
        # all variants of "no levels"
        ("Longterm", 23 * MEGA, 42 * MEGA, None, {}, [
            Result(
                state=state.OK,
                summary="Longterm: 54.76% - 23.0 MiB of 42.0 MiB",
            ),
        ]),
        ("Longterm", 23 * MEGA, 42 * MEGA, "ignore", {}, [
            Result(
                state=state.OK,
                summary="Longterm: 54.76% - 23.0 MiB of 42.0 MiB",
            ),
        ]),
        ("Longterm", 23 * MEGA, 42 * MEGA, ("ignore", None), {}, [
            Result(
                state=state.OK,
                summary="Longterm: 54.76% - 23.0 MiB of 42.0 MiB",
            ),
        ]),
        ("Longterm", 23 * MEGA, 42 * MEGA, ("ignore", (None, None)), {}, [
Exemplo n.º 14
0
    mocked_time.time = mocker.Mock(side_effect=itertools.count(0.1, 60.1))
    mocker.patch.object(kube_pod_resources, "time", mocked_time)
    return mocked_time


@pytest.mark.parametrize(
    "pending_pods_in_each_check_call, expected_result_in_each_check_call",
    [
        pytest.param(
            (
                ["pod"],
                ["pod"],
                ["pod"],
            ),
            (
                Result(state=State.OK, summary="Pending: 1"),
                Result(
                    state=State.WARN,
                    summary=
                    "Pending: 1, thereof 1 (pod) for longer than 1 minute 0 seconds",
                ),
                Result(
                    state=State.CRIT,
                    summary=
                    "Pending: 1, thereof 1 (pod) for longer than 2 minutes 0 seconds",
                ),
            ),
            id="crit",
        ),
        pytest.param(
            (
Exemplo n.º 15
0
        'AP19': '1',
        'AP02': '1'
    }, [
        Service(item='AP19'),
        Service(item='AP02'),
    ]),
])
def test_discovery_cisco_wlc(section, services):
    assert list(discovery_cisco_wlc(section)) == services


@pytest.mark.parametrize("item,params,section,results", [
    ("AP19", {}, {
        'AP19': '1',
        'AP02': '1'
    }, [Result(state=state.OK, summary='Accesspoint: online')]),
    ("AP18", {}, {
        'AP19': '1',
        'AP02': '1'
    }, [Result(state=state.CRIT, summary='Accesspoint not found')]),
])
def test_check_cisco_wlc(item, params, section, results):
    assert list(check_cisco_wlc(item, params, section)) == results


@pytest.mark.parametrize("item,params,section,result", [
    ("AP19", {}, {
        "node1": {
            'AP19': '1',
            'AP02': '1'
        }
Exemplo n.º 16
0
        Service(item="SSL VPTX voltage"),
        Service(item="Power supply 1 status"),
        Service(item="Power supply 2 status"),
    ]


@pytest.mark.parametrize(
    [
        "item",
        "expected_result",
    ],
    [
        pytest.param(
            "+3.3V main bus voltage",
            [
                Result(state=State.OK, summary="3.2 V"),
                Metric("voltage", 3.244),
            ],
            id="voltage ok",
        ),
        pytest.param(
            "SSL VPTX voltage",
            [
                Result(state=State.CRIT, summary="1.8 V"),
                Metric("voltage", 1.8),
            ],
            id="voltage crit",
        ),
        pytest.param(
            "System fan 1 front speed",
            [
Exemplo n.º 17
0
def test_discover_liebert_humidity_air(section, extra_section, result):
    discovered = list(discover_liebert_humidity_air(section, extra_section))
    assert discovered == result


@pytest.mark.parametrize(
    'item, params, section, extra_section, result',
    [
        (
            'Return',
            PARAMETERS,
            PARSED_SECTION,
            PARSED_EXTRA_SECTION,
            [
                Result(state=state.OK,
                       summary='36.50 % RH',
                       details='36.50 % RH'),
                Metric(name='humidity', value=36.5, levels=(50.0, 55.0)),
            ],
        ),
        (
            # Item 'Cigar' is not discovered in the discovery function. However, it is tested in this check function
            # in order to test whether the check handles the item correctly when it changes its status from 'on' to
            # 'standby'.
            'Cigar',
            PARAMETERS,
            PARSED_SECTION,
            PARSED_EXTRA_SECTION,
            [
                Result(state=state.OK,
                       summary='Unit is in standby (unavailable)',
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (C) 2019 tribe29 GmbH - License: GNU General Public License v2
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.

import pytest

from cmk.base.plugins.agent_based.agent_based_api.v1 import Metric, Result, State
from cmk.base.plugins.agent_based.kaspersky_av_quarantine import check_kaspersky_av_quarantine


@pytest.mark.parametrize(
    "section,expected_results",
    [(
        {
            "Objects": " 1",
            "Last added": " unkown"
        },
        [
            Result(state=State.CRIT,
                   summary="1 Objects in Quarantine, Last added: unkown"),
            Metric(name="Objects", value=1.0),
        ],
    )],
)
def test_check_kaskpersky_av_client(section, expected_results):
    assert list(check_kaspersky_av_quarantine(section)) == expected_results
Exemplo n.º 19
0
             'discovered_speed': 1000000000,
         },
     ),
 ],
 [
     (
         '1',
         {
             'errors': {
                 'both': ('abs', (10, 20))
             },
             'discovered_speed': 1000000000,
             'discovered_oper_status': ['1']
         },
         [
             Result(state=state.OK, summary='[GTB1020-2-CL_mgmt]'),
             Result(state=state.OK,
                    summary='(up)',
                    details='Operational state: up'),
             Result(state=state.OK, summary='Speed: 1 GBit/s'),
             Result(state=state.OK,
                    summary='Current Port: e0f-112 (is home port)'),
         ],
     ),
     (
         '2',
         {
             'errors': {
                 'both': ('abs', (10, 20))
             },
             'discovered_speed': 1000000000,
Exemplo n.º 20
0
            Service(item=SAMPLE_SECTION[1].name),
        ]
    ),
])
def test_discovery_dell_storage_temp(section, result):
    assert list(dell_storage_temp.discovery_dell_storage_temp(section)) == result


@pytest.mark.parametrize('item, section, result', [
    ('', [], []),
    ('01-08', SAMPLE_SECTION, []),
    (
        SAMPLE_SECTION[0].name,
        SAMPLE_SECTION,
        [
            Result(state=State.OK, summary='Up'),
            Metric('temp', 22.0, levels=(42.0, 47.0)),
            Result(state=State.OK, summary='Temperature: 22°C'),
            Result(state=State.OK, notice='Configuration: prefer user levels over device levels (used device levels)')
        ]
    ),
    (
        SAMPLE_SECTION[1].name,
        SAMPLE_SECTION,
        [
            Result(state=State.OK, summary='Up'),
            Result(state=State.OK, summary='Midplane'),
            Metric('temp', 27.0, levels=(54.0, 57.0)),
            Result(state=State.OK, summary='Temperature: 27°C'),
            Result(state=State.OK, notice='Configuration: prefer user levels over device levels (used device levels)')
        ]
def test_check_docker_container_status():
    expected_results = [Result(state=state.OK, summary='Container running')]
    assert list(docker.check_docker_container_status(PARSED)) == expected_results
Exemplo n.º 22
0
def test_check_diskstat_summary(value_store):
    with pytest.raises(IgnoreResultsError):
        list(
            diskstat.check_diskstat(
                'SUMMARY',
                type_defs.Parameters({}),
                {
                    'disk1': DISK_HALF,
                    'disk2': DISK_HALF,
                },
                {},
            ))
    results_summary = list(
        diskstat.check_diskstat(
            'SUMMARY',
            type_defs.Parameters({}),
            {
                'disk1': DISK,
                'disk2': DISK,
            },
            None,
        ))
    assert results_summary == [
        Result(state=state.OK,
               summary='Utilization: 0.00%',
               details='Utilization: 0.00%'),
        Metric('disk_utilization',
               3.933167173747347e-06,
               levels=(None, None),
               boundaries=(None, None)),
        Result(state=state.OK,
               summary='Read throughput: 35.3 B/s',
               details='Read throughput: 35.3 B/s'),
        Metric('disk_read_throughput',
               35.30109578585019,
               levels=(None, None),
               boundaries=(None, None)),
        Result(state=state.OK,
               summary='Write throughput: 111 B/s',
               details='Write throughput: 111 B/s'),
        Metric('disk_write_throughput',
               110.81089251058174,
               levels=(None, None),
               boundaries=(None, None)),
        Result(state=state.OK,
               summary='Average wait: 540 microseconds',
               details='Average wait: 540 microseconds'),
        Metric('disk_average_wait',
               0.0005402843870952481,
               levels=(None, None),
               boundaries=(None, None)),
        Result(state=state.OK,
               summary='Average read wait: 39 microseconds',
               details='Average read wait: 39 microseconds'),
        Metric('disk_average_read_wait',
               3.987349554326878e-05,
               levels=(None, None),
               boundaries=(None, None)),
        Result(state=state.OK,
               summary='Average write wait: 691 microseconds',
               details='Average write wait: 691 microseconds'),
        Metric('disk_average_write_wait',
               0.0006915664158721743,
               levels=(None, None),
               boundaries=(None, None)),
        Result(state=state.OK,
               summary='Latency: 223 microseconds',
               details='Latency: 223 microseconds'),
        Metric('disk_latency',
               0.00022327168360432604,
               levels=(None, None),
               boundaries=(None, None)),
        Result(state=state.OK,
               summary='Average queue length: 10.00',
               details='Average queue length: 10.00'),
        Metric('disk_queue_length',
               10.0,
               levels=(None, None),
               boundaries=(None, None)),
        Result(state=state.OK,
               summary='Read operations: 0.01/s',
               details='Read operations: 0.01/s'),
        Metric('disk_read_ios',
               0.008178677645811379,
               levels=(None, None),
               boundaries=(None, None)),
        Result(state=state.OK,
               summary='Write operations: 0.03/s',
               details='Write operations: 0.03/s'),
        Metric('disk_write_ios',
               0.027053440554341245,
               levels=(None, None),
               boundaries=(None, None)),
        Metric('disk_average_read_request_size',
               4316.235131718299,
               levels=(None, None),
               boundaries=(None, None)),
        Metric('disk_average_request_size',
               4147.124719166019,
               levels=(None, None),
               boundaries=(None, None)),
        Metric('disk_average_write_request_size',
               4096.0,
               levels=(None, None),
               boundaries=(None, None)),
    ]

    # compare against single-item output
    with pytest.raises(IgnoreResultsError):
        list(
            diskstat.check_diskstat(
                'disk1',
                type_defs.Parameters({}),
                {
                    'disk1': DISK_HALF,
                    'disk2': DISK_HALF,
                },
                None,
            ))
    results_single_disk = list(
        diskstat.check_diskstat(
            'disk1',
            type_defs.Parameters({}),
            {
                'disk1': DISK,
                'disk2': DISK,
            },
            None,
        ))
    assert len(results_summary) == len(results_single_disk)
    for res_sum, res_single in zip(results_summary, results_single_disk):
        assert isinstance(res_single, type(res_sum))
        if isinstance(res_sum, Metric):
            assert isinstance(res_single, Metric)
            assert res_sum.value >= res_single.value
Exemplo n.º 23
0
         [Service(item='Liebert CRV')],
     )])
def test_discover_liebert_system(section, result):
    discovered = list(discover_liebert_system(section))
    assert discovered == result


@pytest.mark.parametrize('section, result', [
    ({
        'System Model Number': 'Liebert CRV',
        'System Status': 'Normal Operation',
        'Unit Operating State': 'standby',
        'Unit Operating State Reason': 'Reason Unknown',
    }, [
        Result(state=state.OK,
               summary='System Model Number: Liebert CRV',
               details='System Model Number: Liebert CRV'),
        Result(state=state.OK,
               summary='System Status: Normal Operation',
               details='System Status: Normal Operation'),
        Result(state=state.OK,
               summary='Unit Operating State: standby',
               details='Unit Operating State: standby'),
        Result(state=state.OK,
               summary='Unit Operating State Reason: Reason Unknown',
               details='Unit Operating State Reason: Reason Unknown'),
    ]),
    (
        {
            'System Model Number': 'Liebert CRV',
            'System Status': 'Normal Operation',
Exemplo n.º 24
0
def test_check_diskstat_single_item(value_store):
    with pytest.raises(IgnoreResultsError):
        list(
            diskstat.check_diskstat(
                'item',
                type_defs.Parameters({}),
                {'item': DISK_HALF},
                None,
            ))
    assert list(
        diskstat.check_diskstat(
            'item',
            type_defs.Parameters({}),
            {'item': DISK},
            None,
        )) == [
            Result(state=state.OK,
                   summary='Utilization: 0.00%',
                   details='Utilization: 0.00%'),
            Metric('disk_utilization',
                   3.933167173747347e-06,
                   levels=(None, None),
                   boundaries=(None, None)),
            Result(state=state.OK,
                   summary='Read throughput: 17.7 B/s',
                   details='Read throughput: 17.7 B/s'),
            Metric('disk_read_throughput',
                   17.650547892925093,
                   levels=(None, None),
                   boundaries=(None, None)),
            Result(state=state.OK,
                   summary='Write throughput: 55.4 B/s',
                   details='Write throughput: 55.4 B/s'),
            Metric('disk_write_throughput',
                   55.40544625529087,
                   levels=(None, None),
                   boundaries=(None, None)),
            Result(state=state.OK,
                   summary='Average wait: 540 microseconds',
                   details='Average wait: 540 microseconds'),
            Metric('disk_average_wait',
                   0.0005402843870952481,
                   levels=(None, None),
                   boundaries=(None, None)),
            Result(state=state.OK,
                   summary='Average read wait: 39 microseconds',
                   details='Average read wait: 39 microseconds'),
            Metric('disk_average_read_wait',
                   3.987349554326878e-05,
                   levels=(None, None),
                   boundaries=(None, None)),
            Result(state=state.OK,
                   summary='Average write wait: 691 microseconds',
                   details='Average write wait: 691 microseconds'),
            Metric('disk_average_write_wait',
                   0.0006915664158721743,
                   levels=(None, None),
                   boundaries=(None, None)),
            Result(state=state.OK,
                   summary='Latency: 223 microseconds',
                   details='Latency: 223 microseconds'),
            Metric('disk_latency',
                   0.00022327168360432604,
                   levels=(None, None),
                   boundaries=(None, None)),
            Result(state=state.OK,
                   summary='Average queue length: 10.00',
                   details='Average queue length: 10.00'),
            Metric('disk_queue_length',
                   10.0,
                   levels=(None, None),
                   boundaries=(None, None)),
            Result(state=state.OK,
                   summary='Read operations: 0.00/s',
                   details='Read operations: 0.00/s'),
            Metric('disk_read_ios',
                   0.004089338822905689,
                   levels=(None, None),
                   boundaries=(None, None)),
            Result(state=state.OK,
                   summary='Write operations: 0.01/s',
                   details='Write operations: 0.01/s'),
            Metric('disk_write_ios',
                   0.013526720277170622,
                   levels=(None, None),
                   boundaries=(None, None)),
            Metric('disk_average_read_request_size',
                   4316.235131718299,
                   levels=(None, None),
                   boundaries=(None, None)),
            Metric('disk_average_request_size',
                   4147.124719166019,
                   levels=(None, None),
                   boundaries=(None, None)),
            Metric('disk_average_write_request_size',
                   4096.0,
                   levels=(None, None),
                   boundaries=(None, None)),
        ]
Exemplo n.º 25
0
    info = list(itertools.chain.from_iterable(generate_inputs()))
    assert sorted({s.item: s for s in ps_utils.discover_ps(  # type: ignore[attr-defined]
        PS_DISCOVERY_WATO_RULES,  # type: ignore[arg-type]
        ps_section.parse_ps(info),
        None,
        None,
    )}.values(), key=lambda s: s.item or "") == sorted(PS_DISCOVERED_ITEMS, key=lambda s: s.item or
            "")  # type: ignore[attr-defined]


CheckResult = tuple

check_results = [
    [
        Result(
            state=state.OK,
            summary="Processes: 1",
        ),
        Metric("count", 1, levels=(100000, 100000), boundaries=(0, None)),
        Result(
            state=state.WARN,
            notice="virtual: 1.00 GiB (warn/crit at 1.00 GiB/2.00 GiB)",
        ),
        Metric("vsz", 1050360, levels=(1073741824, 2147483648)),
        Result(
            state=state.OK,
            notice="physical: 296 MiB",
        ),
        Metric("rss", 303252, levels=(1073741824, 2147483648)),
        Result(
            state=state.WARN,
            notice="Percentage of total RAM: 28.92% (warn/crit at 25.00%/50.00%)",
Exemplo n.º 26
0
def test_network_check():
    section = gcp.parse_piggyback(NETWORK_SECTION)
    params = CHECK_DEFAULT_PARAMETERS
    item = "nic0"
    results = list(check_network(item, params, section))
    assert results == [
        Result(state=State.OK, summary="[0]"),
        Result(state=State.OK, summary="(up)", details="Operational state: up"),
        Result(state=State.OK, summary="Speed: unknown"),
        Metric("outqlen", 0.0),
        Result(state=State.OK, summary="In: 385 B/s"),
        Metric("in", 385.4, boundaries=(0.0, None)),
        Result(state=State.OK, summary="Out: 245 B/s"),
        Metric("out", 245.26666666666668, boundaries=(0.0, None)),
        Result(state=State.OK, notice="Errors in: 0%"),
        Metric("inerr", 0.0),
        Result(state=State.OK, notice="Multicast in: 0 packets/s"),
        Metric("inmcast", 0.0),
        Result(state=State.OK, notice="Broadcast in: 0 packets/s"),
        Metric("inbcast", 0.0),
        Result(state=State.OK, notice="Unicast in: 0 packets/s"),
        Metric("inucast", 0.0),
        Result(state=State.OK, notice="Non-unicast in: 0 packets/s"),
        Metric("innucast", 0.0),
        Result(state=State.OK, notice="Discards in: 0 packets/s"),
        Metric("indisc", 0.0),
        Result(state=State.OK, notice="Errors out: 0%"),
        Metric("outerr", 0.0),
        Result(state=State.OK, notice="Multicast out: 0 packets/s"),
        Metric("outmcast", 0.0),
        Result(state=State.OK, notice="Broadcast out: 0 packets/s"),
        Metric("outbcast", 0.0),
        Result(state=State.OK, notice="Unicast out: 0 packets/s"),
        Metric("outucast", 0.0),
        Result(state=State.OK, notice="Non-unicast out: 0 packets/s"),
        Metric("outnucast", 0.0),
        Result(state=State.OK, notice="Discards out: 0 packets/s"),
        Metric("outdisc", 0.0),
    ]
Exemplo n.º 27
0
def test_cpu_util_single_process_levels(cpu_cores):
    """Test CPU utilization per single process.
- Check that Number of cores weight is active
- Check that single process CPU utilization is present only on warn/crit states"""

    params: Dict[str, Any] = {
        'process': '~.*firefox',
        'process_info': "text",
        'cpu_rescale_max': True,
        'levels': (1, 1, 99999, 99999),
        'single_cpulevels': (45.0, 80.0),
    }

    def run_check_ps_common_with_elapsed_time(check_time, cputime):
        with on_time(check_time, "CET"):
            agent_info = """(on,2275004,434008,00:00:49/26:58,25576) firefox
(on,1869920,359836,00:01:23/6:57,25664) firefox
(on,7962644,229660,00:00:10/26:56,25758) firefox
(on,1523536,83064,00:{:02}:00/26:55,25898) firefox"""
            _cpu_info, parsed_lines = ps_section.parse_ps(splitter(agent_info.format(cputime)))
            lines_with_node_name: List[Tuple[Optional[str], ps_utils.ps_info, List[str]]] = [
                (None, ps_info, cmd_line) for (ps_info, cmd_line) in parsed_lines]

            return list(ps_utils.check_ps_common(
                label="Processes",
                item='firefox',
                params=params,  # type: ignore[arg-type]
                process_lines=lines_with_node_name,
                cpu_cores=cpu_cores,
                total_ram=None,
            ))

    with value_store.context(CheckPluginName("ps"), "unit-test"):
        # CPU utilization is a counter, initialize it
        run_check_ps_common_with_elapsed_time(0, 0)
        # CPU utilization is a counter, after 60s time, one process consumes 2 min of CPU
        output = run_check_ps_common_with_elapsed_time(60, 2)

    cpu_util = 200.0 / cpu_cores
    cpu_util_s = ps_utils.render.percent(cpu_util)
    single_msg = 'firefox with PID 25898 CPU: %s (warn/crit at 45.00%%/80.00%%)' % cpu_util_s
    reference = [
        Result(state=state.OK, summary="Processes: 4"),
        Metric("count", 4, levels=(100000, 100000), boundaries=(0, None)),
        Result(state=state.OK, notice="virtual: 13.0 GiB"),
        Metric("vsz", 13631104),
        Result(state=state.OK, notice="physical: 1.06 GiB"),
        Metric("rss", 1106568),
        Metric('pcpu', cpu_util),
        Result(state=state.OK, summary="CPU: %s" % cpu_util_s),
        Result(state=state.OK, notice='firefox with PID 25576 CPU: 0%'),
        Result(state=state.OK, notice='firefox with PID 25664 CPU: 0%'),
        Result(state=state.OK, notice='firefox with PID 25758 CPU: 0%'),
        Result(state=state.OK, notice='firefox with PID 25898 CPU: 40.00%'),
        Result(state=state.OK, notice='Youngest running for: 6 minutes 57 seconds'),
        Metric("age_youngest", 417.0),
        Result(state=state.OK, notice='Oldest running for: 26 minutes 58 seconds'),
        Metric("age_oldest", 1618.0),
        Result(state=state.OK, notice="\r\n".join([
            'name firefox, user on, virtual size 2275004kB, resident size 434008kB,'
            ' creation time Jan 01 1970 00:34:02, pid 25576, cpu usage 0.0%',
            'name firefox, user on, virtual size 1869920kB, resident size 359836kB,'
            ' creation time Jan 01 1970 00:54:03, pid 25664, cpu usage 0.0%',
            'name firefox, user on, virtual size 7962644kB, resident size 229660kB,'
            ' creation time Jan 01 1970 00:34:04, pid 25758, cpu usage 0.0%',
            'name firefox, user on, virtual size 1523536kB, resident size 83064kB,'
            ' creation time Jan 01 1970 00:34:05, pid 25898, cpu usage %.1f%%\r\n'
            % cpu_util,
        ])),
    ]

    if cpu_util > params['single_cpulevels'][1]:
        reference[11] = Result(state=state.CRIT, notice=single_msg)
    elif cpu_util > params['single_cpulevels'][0]:
        reference[11] = Result(state=state.WARN, notice=single_msg)

    assert output == reference
Exemplo n.º 28
0
        (_SECTION, [Service()]),
    ],
)
def test_discover_k8s_pod_container(fix_register, section, expected_result):
    check_plugin = fix_register.check_plugins[CheckPluginName(
        "k8s_pod_container")]
    assert sorted(check_plugin.discovery_function(section)) == expected_result


@pytest.mark.parametrize(
    "section, expected_result",
    [
        (
            _SECTION,
            [
                Result(state=State.OK, summary="Ready: 0/1"),
                Metric("docker_all_containers", 1.0, boundaries=(0.0, 1.0)),
                Metric("ready_containers", 0.0, boundaries=(0.0, 1.0)),
                Result(state=State.OK, summary="Running: 0"),
                Result(state=State.OK, summary="Waiting: 0"),
                Result(state=State.OK, summary="Terminated: 1"),
            ],
        ),
    ],
)
def test_check_k8s_pod_container(fix_register, section, expected_result):
    check_plugin = fix_register.check_plugins[CheckPluginName(
        "k8s_pod_container")]
    assert list(check_plugin.check_function(
        params={}, section=section)) == expected_result
Exemplo n.º 29
0
    # _scale_levels_predictive to handle the changed values
    from cmk.gui.plugins.wato import PredictiveLevels

    pl = PredictiveLevels()
    pl.validate_value(LEVELS, "")
    pl.validate_datatype(LEVELS, "")


@pytest.mark.parametrize(
    "params,disk,exp_res",
    [
        (
            {},
            DISK,
            [
                Result(state=state.OK, notice="Utilization: 53.24%"),
                Metric("disk_utilization", 0.53242),
                Result(state=state.OK, summary="Read: 12.3 kB/s"),
                Metric("disk_read_throughput", 12312.4324),
                Result(state=state.OK, summary="Write: 3.45 kB/s"),
                Metric("disk_write_throughput", 3453.345),
                Result(state=state.OK, notice="Average wait: 30 seconds"),
                Metric("disk_average_wait", 30.0),
                Result(state=state.OK,
                       notice="Average read wait: 2 minutes 3 seconds"),
                Metric("disk_average_read_wait", 123.0),
                Result(state=state.OK,
                       notice="Average write wait: 1 minute 30 seconds"),
                Metric("disk_average_write_wait", 90.0),
                Result(state=state.OK, notice="Average queue length: 123.00"),
                Metric("disk_queue_length", 123.0),
Exemplo n.º 30
0
def test_discovery():
    assert list(
        winperf_processor.discover_winperf_processor_util(SECTION)) == [
            Service(),
        ]


@pytest.mark.parametrize(
    "value_store, params, result",
    [
        ({}, {}, [
            IgnoreResults("Initialized: 'util'"),
            IgnoreResults("Initialized: 'user'"),
            IgnoreResults("Initialized: 'privileged'"),
            Result(state=State.OK, notice='Number of processors: 4'),
            Metric('cpus', 4),
        ]),
        (
            VALUE_STORE,
            {},
            [
                Result(state=State.OK, summary='Total CPU: 99.99983%'),
                Metric('util', 99.99983122362869,
                       boundaries=(0, 4)),  # boundaries: 4 as in 4 CPUs
                Result(state=State.OK, notice='User: 0.00051%'),
                Metric('user', 0.0005063291139240507),
                Result(state=State.OK, notice='Privileged: 0.000017%'),
                Metric('privileged', 1.687763713080169e-05),
                Result(state=State.OK, notice='Number of processors: 4'),
                Metric('cpus', 4),